hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
285466c83ba2be432549428b4b7f2d63e05e68ef | 1,351 | require "test_helper"
class OtpTest < MiniTest::Unit::TestCase
def setup
@user = User.new
@user.email = '[email protected]'
@user.run_callbacks :create
@visitor = Visitor.new
@visitor.email = '[email protected]'
@visitor.run_callbacks :create
end
def test_authenticate_with_otp
code = @user.otp_code
assert @user.authenticate_otp(code)
code = @visitor.otp_code
assert @visitor.authenticate_otp(code)
end
def test_authenticate_with_otp_when_drift_is_allowed
code = @user.otp_code(Time.now - 30)
assert @user.authenticate_otp(code, drift: 60)
code = @visitor.otp_code(Time.now - 30)
assert @visitor.authenticate_otp(code, drift: 60)
end
def test_otp_code
assert_match(/\d{5,6}/, @user.otp_code.to_s)
assert_match(/\d{5,6}/, @visitor.otp_code.to_s)
end
def test_provisioning_uri_with_provided_account
assert_match %r{otpauth://totp/roberto\?secret=\w{16}}, @user.provisioning_uri("roberto")
assert_match %r{otpauth://totp/roberto\?secret=\w{16}}, @visitor.provisioning_uri("roberto")
end
def test_provisioning_uri_with_email_field
assert_match %r{otpauth://totp/roberto@heapsource\.com\?secret=\w{16}}, @user.provisioning_uri
assert_match %r{otpauth://totp/roberto@heapsource\.com\?secret=\w{16}}, @visitor.provisioning_uri
end
end
| 30.704545 | 101 | 0.724648 |
e9a7b5d533fc4a14d686c1a9eb27bae380cec0ad | 1,999 | require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
Bundler.require
require "doccex"
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
| 38.442308 | 99 | 0.729865 |
87e1fdc79aeecc928d199319c3753b1a51e16850 | 6,829 | require 'rails_helper'
RSpec.describe People::MembersController, vcr: true do
describe "GET index" do
before(:each) do
get :index
end
it 'should have a response with http status ok (200)' do
expect(response).to have_http_status(:ok)
end
it 'assigns @people and @letters' do
assigns(:people).each do |person|
expect(person).to be_a(Grom::Node)
expect(person.type).to eq('https://id.parliament.uk/schema/Person')
end
expect(assigns(:letters)).to be_a(Array)
end
it 'assigns @people in alphabetical order' do
expect(assigns(:people)[0].given_name).to eq('personGivenName - 1')
expect(assigns(:people)[1].given_name).to eq('personGivenName - 10')
end
it 'renders the members template' do
expect(response).to render_template('index')
end
end
describe "GET current" do
before(:each) do
get :current
end
it 'should have a response with http status ok (200)' do
expect(response).to have_http_status(:ok)
end
it 'assigns @people and @letters' do
assigns(:people).each do |person|
expect(person).to be_a(Grom::Node)
expect(person.type).to eq('https://id.parliament.uk/schema/Person')
end
expect(assigns(:letters)).to be_a(Array)
end
it 'assigns @people in alphabetical order' do
expect(assigns(:people)[0].given_name).to eq('personGivenName - 1')
expect(assigns(:people)[1].given_name).to eq('personGivenName - 10')
end
it 'renders the current_members template' do
expect(response).to render_template('current')
end
end
describe 'GET letters' do
context 'there is a response' do
before(:each) do
get :letters, params: { letter: 'y' }
end
it 'should have a response with http status ok (200)' do
expect(response).to have_http_status(:ok)
end
it 'assigns @people and @letters' do
assigns(:people).each do |person|
expect(person).to be_a(Grom::Node)
expect(person.type).to eq('https://id.parliament.uk/schema/Person')
end
expect(assigns(:letters)).to be_a(Array)
end
it 'assigns @people in alphabetical order' do
expect(assigns(:people)[0].given_name).to eq('personGivenName - 1')
expect(assigns(:people)[1].given_name).to eq('personGivenName - 10')
end
it 'renders the members_letters template' do
expect(response).to render_template('letters')
end
end
context 'there is no response' do
before(:each) do
get :letters, params: { letter: 'x' }
end
it 'http status of 200' do
expect(response).to have_http_status(200)
end
it 'has a blank @people array' do
expect(controller.instance_variable_get(:@people)).to be_empty
end
end
end
describe "GET current_letters" do
context 'there is a response' do
before(:each) do
get :current_letters, params: { letter: 'z' }
end
it 'should have a response with http status ok (200)' do
expect(response).to have_http_status(:ok)
end
it 'assigns @people and @letters' do
assigns(:people).each do |person|
expect(person).to be_a(Grom::Node)
expect(person.type).to eq('https://id.parliament.uk/schema/Person')
end
expect(assigns(:letters)).to be_a(Array)
end
it 'assigns @people in alphabetical order' do
expect(assigns(:people)[0].given_name).to eq('personGivenName - 1')
expect(assigns(:people)[1].given_name).to eq('personGivenName - 2')
end
it 'renders the current_members_letters template' do
expect(response).to render_template('current_letters')
end
end
context 'there is no response' do
before(:each) do
get :current_letters, params: { letter: 'x' }
end
it 'should have a response with a http status of 200' do
expect(response).to have_http_status(200)
end
it 'should have a blank @people array' do
expect(controller.instance_variable_get(:@people)).to be_empty
end
end
end
describe "GET a_to_z" do
before(:each) do
get :a_to_z
end
it 'should have a response with http status ok (200)' do
expect(response).to have_http_status(:ok)
end
it 'assigns @letters' do
expect(assigns(:letters)).to be_a(Array)
end
it 'renders the a_to_z_members template' do
expect(response).to render_template('a_to_z')
end
end
describe "GET a_to_z_current" do
before(:each) do
get :a_to_z_current
end
it 'should have a response with http status ok (200)' do
expect(response).to have_http_status(:ok)
end
it 'assigns @letters' do
expect(assigns(:letters)).to be_a(Array)
end
it 'renders the a_to_z_current_members template' do
expect(response).to render_template('a_to_z_current')
end
end
describe '#data_check' do
context 'an available data format is requested' do
methods = [
{
route: 'index',
data_url: "#{ENV['PARLIAMENT_BASE_URL']}/member_index"
},
{
route: 'current',
data_url: "#{ENV['PARLIAMENT_BASE_URL']}/member_current"
},
{
route: 'letters',
parameters: { letter: 'y' },
data_url: "#{ENV['PARLIAMENT_BASE_URL']}/member_by_initial?initial=y"
},
{
route: 'current_letters',
parameters: { letter: 'z' },
data_url: "#{ENV['PARLIAMENT_BASE_URL']}/member_current_by_initial?initial=z"
},
{
route: 'a_to_z',
data_url: "#{ENV['PARLIAMENT_BASE_URL']}/member_a_to_z"
},
{
route: 'a_to_z_current',
data_url: "#{ENV['PARLIAMENT_BASE_URL']}/member_current_a_to_z"
}
]
before(:each) do
headers = { 'Accept' => 'application/rdf+xml' }
request.headers.merge(headers)
end
it 'should have a response with http status redirect (302)' do
methods.each do |method|
if method.include?(:parameters)
get method[:route].to_sym, params: method[:parameters]
else
get method[:route].to_sym
end
expect(response).to have_http_status(302)
end
end
it 'redirects to the data service' do
methods.each do |method|
if method.include?(:parameters)
get method[:route].to_sym, params: method[:parameters]
else
get method[:route].to_sym
end
expect(response).to redirect_to(method[:data_url])
end
end
end
end
end
| 27.873469 | 89 | 0.607263 |
03c772296a228e78c6f30d5d6301ec9a6b0a202a | 434 | # Encoding: UTF-8
[{name: "Comments",
scope: "text.html.cfm",
settings:
{shellVariables:
[{name: "TM_COMMENT_START", value: "<!--- "},
{name: "TM_COMMENT_END", value: " --->"}]},
uuid: "904C79F1-5730-4D8D-986A-0D71587B2C1F"},
{name: "Smart Typing Pairs",
scope: "meta.scope.output.cfm, string.quoted.double.cfm",
settings: {smartTypingPairs: [["#", "#"]]},
uuid: "862EDD04-D606-4EDB-8D01-425D8024CF1B"}]
| 31 | 59 | 0.62212 |
ed331132fce884d696071e86671c7fa40280b7b8 | 1,442 | # Redmine - project management software
# Copyright (C) 2006-2016 Jean-Philippe Lang
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
module Redmine
module SubclassFactory
def self.included(base)
base.extend ClassMethods
end
module ClassMethods
def get_subclass(class_name)
klass = nil
begin
klass = class_name.to_s.classify.constantize
rescue
# invalid class name
end
unless subclasses.include? klass
klass = nil
end
klass
end
# Returns an instance of the given subclass name
def new_subclass_instance(class_name, *args)
klass = get_subclass(class_name)
if klass
klass.new(*args)
end
end
end
end
end
| 30.041667 | 81 | 0.68724 |
e8e5cef067164695a2b672ea7a5d8b2ea2e210d8 | 588 | # Load Houston
require "houston/application"
# Configure Houston
Houston.config do
# Houston should load config/database.yml from this module
# rather than from Houston Core.
root Pathname.new File.expand_path("../../..", __FILE__)
# Give dummy values to these required fields.
host "houston.test.com"
mailer_sender "[email protected]"
# Houston still hard-codes knowledge of these Roles.
# This will eventually be refactored away.
roles "Developer", "Tester"
project_roles "Maintainer"
# Mount this module on the dummy Houston application.
use :roadmap
end
| 24.5 | 60 | 0.738095 |
f88dec64caf19f295d9856052a3381edfebb4a8d | 1,328 | require 'spec_helper.rb'
Sequel.extension(:migration)
describe 'pg_trgm' do
before(:each) do
DB.create_table(:foods) do |t|
primary_key :id
String :name
end
end
after(:each) do
DB.drop_table(:foods)
DB.drop_table(:schema_info)
end
describe 'add_pg_trgm' do
before do
Sequel::Migrator.apply(DB, 'spec/files', 1)
end
it 'creates trgm index' do
expect(DB[:pg_class].where(relname: 'foods_name_trgm_index').count).to eq 1
end
it 'has trgm limit' do
expect(DB['SELECT show_limit();'].first[:show_limit]).to be_kind_of(Float)
end
end
describe 'drop_pg_trgm' do
before do
Sequel::Migrator.apply(DB, 'spec/files', 1)
end
it 'drops trgm index' do
expect(DB[:pg_class].where(relname: 'foods_name_trgm_index').count).to eq 1
Sequel::Migrator.apply(DB, 'spec/files', 0)
expect(DB[:pg_class].where(relname: 'foods_name_trgm_index').count).to eq 0
end
end
describe 'text_search' do
before do
Sequel::Migrator.apply(DB, 'spec/files', 1)
class Food < Sequel::Model(DB)
plugin :pg_trgm
end
Food.create(name: 'Banana Pancakes')
end
it 'returns proper search results' do
expect(Food.dataset.text_search(:name, 'Banan cakes').count).to eq 1
end
end
end
| 22.508475 | 81 | 0.646084 |
11169874255e2d923a4b2f8834f7bb7ad4b7c74e | 1,386 | # -*- encoding: utf-8 -*-
# stub: activejob 5.1.2 ruby lib
Gem::Specification.new do |s|
s.name = "activejob".freeze
s.version = "5.1.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["David Heinemeier Hansson".freeze]
s.date = "2017-06-26"
s.description = "Declare job classes that can be run by a variety of queueing backends.".freeze
s.email = "[email protected]".freeze
s.homepage = "http://rubyonrails.org".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.2.2".freeze)
s.rubygems_version = "2.6.13".freeze
s.summary = "Job framework with pluggable queues.".freeze
s.installed_by_version = "2.6.13" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>.freeze, ["= 5.1.2"])
s.add_runtime_dependency(%q<globalid>.freeze, [">= 0.3.6"])
else
s.add_dependency(%q<activesupport>.freeze, ["= 5.1.2"])
s.add_dependency(%q<globalid>.freeze, [">= 0.3.6"])
end
else
s.add_dependency(%q<activesupport>.freeze, ["= 5.1.2"])
s.add_dependency(%q<globalid>.freeze, [">= 0.3.6"])
end
end
| 37.459459 | 112 | 0.675325 |
284798233ed52c1c740f46644581109c5cc25f26 | 1,132 | require 'optparse'
require 'vagrant'
module VagrantPlugins
module Mount
module Command
class Unmount < Vagrant.plugin('2', :command)
def self.synopsis
'Unmount ISO from Virtual Machine'
end
def initialize(argv, env)
@env=env
super
end
def execute
options = { remove_device: false }
parser = OptionParser.new do |opts|
opts.banner = 'Usage: vagrant unmount [options] [vm-name]'
opts.separator ''
opts.separator ' Options:'
opts.on("--iso path", "The path of the ISO to unmount") { |arg| options[:path] = arg }
opts.on("--remove-device", "Remove the controller/device/port after unmounting") { |arg| options[:remove_device] = true }
end
argv = parse_options(parser)
return unless argv
argv << "default" if argv.empty?
with_target_vms(argv) do |vm|
vm.action(:unmount, mount_point: options[:path], remove_device: options[:remove_device])
end
0
end
end
end
end
end
| 28.3 | 133 | 0.567138 |
875356e21250471ff800e8f41e4210335aa1f723 | 5,315 | describe PxeController do
before do
stub_user(:features => :all)
end
describe '#tree_select ' do
it 'calls methods with x_node as param' do
controller.params = {:id => 'root', :tree => :pxe_servers_tree}
expect(controller).to receive(:get_node_info).with("root")
expect(controller).to receive(:replace_right_cell).with(:nodetype => "root")
controller.tree_select
end
end
describe '#accordion_select ' do
it 'calls methods with x_node as param' do
controller.params = {:id => 'pxe_servers_accord', :tree => :pxe_servers_tree}
allow(controller).to receive(:x_node).and_return('root')
expect(controller).to receive(:get_node_info).with("root")
expect(controller).to receive(:replace_right_cell).with(:nodetype => "root")
controller.accordion_select
end
end
describe 'x_button' do
let!(:server) { EvmSpecHelper.local_miq_server }
let(:pending_actions) { %w(pxe_image_tag windows_image_tag) }
let(:allowed_actions) { PxeController::PXE_X_BUTTON_ALLOWED_ACTIONS.keys - pending_actions }
before do
ApplicationController.handle_exceptions = true
login_as user_with_feature(%w(pxe_server_accord pxe_server_refresh) + allowed_actions)
end
describe 'corresponding methods are called for allowed actions' do
PxeController::PXE_X_BUTTON_ALLOWED_ACTIONS.each_pair do |action_name, method|
it "calls the appropriate method: '#{method}' for action '#{action_name}'" do
pending("Action hasn't been fully implemented with toolbar entries or features yet") if pending_actions.include?(action_name)
expect(controller).to receive(method)
get :x_button, :params => { :pressed => action_name }
end
end
end
it 'exception is raised for unknown action' do
get :x_button, :params => { :pressed => 'random_dude', :format => :html }
expect(response).to render_template('layouts/exception')
end
it "Pressing Refresh button should show display name in the flash message" do
pxe = FactoryBot.create(:pxe_server)
controller.params = {:id => pxe.id}
controller.instance_variable_set(:@sb,
:trees => {
:pxe_tree => {:active_node => "ps-#{pxe.id}"}
},
:active_tree => :pxe_servers_tree)
allow(controller).to receive(:get_node_info)
allow(controller).to receive(:replace_right_cell)
controller.send(:pxe_server_refresh)
expect(assigns(:flash_array).first[:message]).to include("Refresh Relationships successfully initiated")
end
end
describe "#tree_select" do
before { login_as FactoryBot.create(:user_admin) }
subject { post :tree_select, :params => {:id => 'root', :tree => 'pxe_servers_tree'} }
render_views
it do
allow(controller).to receive(:data_for_breadcrumbs).and_return({})
bypass_rescue
is_expected.to have_http_status 200
end
end
describe 'replace_right_cell' do
it "Can build all the trees" do
seed_session_trees('pxe', :pxe_tree, 'root')
session_to_sb
expect(controller).to receive(:reload_trees_by_presenter).with(
instance_of(ExplorerPresenter),
array_including(
instance_of(TreeBuilderPxeServers),
instance_of(TreeBuilderPxeImageTypes),
instance_of(TreeBuilderPxeImageTypes),
instance_of(TreeBuilderPxeCustomizationTemplates),
instance_of(TreeBuilderIsoDatastores)
)
)
expect(controller).to receive(:render)
controller.send(:replace_right_cell, :replace_trees => %i(pxe_servers pxe_image_types customization_templates iso_datastores))
end
end
context "GenericSessionMixin" do
let(:lastaction) { 'lastaction' }
let(:display) { 'display' }
let(:current_page) { 'current_page' }
describe '#get_session_data' do
it "Sets variables correctly" do
allow(controller).to receive(:session).and_return(:pxe_lastaction => lastaction,
:pxe_display => display,
:pxe_current_page => current_page)
controller.send(:get_session_data)
expect(controller.instance_variable_get(:@title)).to eq("PXE")
expect(controller.instance_variable_get(:@layout)).to eq("pxe")
expect(controller.instance_variable_get(:@lastaction)).to eq(lastaction)
expect(controller.instance_variable_get(:@display)).to eq(display)
expect(controller.instance_variable_get(:@current_page)).to eq(current_page)
end
end
describe '#set_session_data' do
it "Sets session correctly" do
controller.instance_variable_set(:@lastaction, lastaction)
controller.instance_variable_set(:@display, display)
controller.instance_variable_set(:@current_page, current_page)
controller.send(:set_session_data)
expect(controller.session[:pxe_lastaction]).to eq(lastaction)
expect(controller.session[:pxe_display]).to eq(display)
expect(controller.session[:pxe_current_page]).to eq(current_page)
end
end
end
end
| 40.572519 | 135 | 0.6619 |
79a087bbffceba86c30d41fa9eae255b2e3d9c40 | 1,670 | # ------------------------------------------------------------
# Simplecov
require 'simplecov' if ENV['COVERAGE']
# ------------------------------------------------------------
# Rspec configuration
RSpec.configure do |config|
config.raise_errors_for_deprecations!
config.mock_with :rspec
end
require_relative '../../spec_helpers/rspec_custom_matchers'
# ------------------------------------------------------------
# Rails
if (env = ENV['RAILS_ENV'])
abort("Can't run tests in environment #{env}") if env != 'test'
else
ENV['RAILS_ENV'] = 'test'
end
# ------------------------------------------------------------
# Stash
ENV['STASH_ENV'] = 'test'
require 'stash_engine'
LICENSES = YAML.load_file(File.expand_path('config/licenses.yml', __dir__)).with_indifferent_access
APP_CONFIG = OpenStruct.new(YAML.load_file(File.expand_path('config/app_config.yml', __dir__))['test'])
ENGINE_PATH = Gem::Specification.find_by_name('stash_engine').gem_dir
%W[
#{ENGINE_PATH}/app/models/stash_engine/concerns
#{ENGINE_PATH}/app/models/stash_engine
#{ENGINE_PATH}/app/mailers
#{ENGINE_PATH}/app/mailers/stash_engine
].each do |path|
$LOAD_PATH.unshift(path) if File.directory?(path)
Dir.glob("#{path}/**/*.rb").sort.each(&method(:require))
end
%w[
hash_to_ostruct
inflections
repository
].each do |initializer|
require "#{ENGINE_PATH}/config/initializers/#{initializer}.rb"
end
# Note: Even if we're not doing any database work, ActiveRecord callbacks will still raise warnings
ActiveRecord::Base.raise_in_transactional_callbacks = true
# ------------------------------------------------------------
# Mocks
require 'mocks/mock_repository'
| 27.377049 | 103 | 0.608982 |
629d7d0f04aa965f4df8d0257f8d6be7d0947d5c | 1,361 | class ProviderSuggestionsController < ApplicationController
rescue_from JsonApiClient::Errors::ClientError, with: :handle_error_request
def suggest
return render(json: { error: "Bad request" }, status: :bad_request) if params_invalid?
sanitised_query = CGI.escape(params[:query])
suggestions = ProviderSuggestion.suggest(sanitised_query)
.map { |provider| { code: provider.provider_code, name: provider.provider_name } }
render json: suggestions
end
def suggest_any
return render(json: { error: "Bad request" }, status: :bad_request) if params_invalid?
sanitised_query = CGI.escape(params[:query])
suggestions = ProviderSuggestion.suggest_any(sanitised_query)
.map { |provider| { code: provider.provider_code, name: provider.provider_name } }
render json: suggestions
end
def suggest_any_accredited_body
return render(json: { error: "Bad request" }, status: :bad_request) if params_invalid?
sanitised_query = CGI.escape(params[:query])
suggestions = ProviderSuggestion.suggest_any_accredited_body(sanitised_query)
.map { |provider| { code: provider.provider_code, name: provider.provider_name } }
render json: suggestions
end
private
def params_invalid?
params[:query].nil? || params[:query].length < 3
end
def handle_error_request
render json: []
end
end
| 33.195122 | 90 | 0.735489 |
e8f0b628c330256254652250c523767184f9489a | 231 | require 'jekyll/scholar'
require 'uri'
module MarkdownFilter
class Markdown < BibTeX::Filter
def apply(value)
value.to_s.gsub(URI.regexp(['http','https','ftp'])) { |c| "<a href=\"#{$&}\">#{$&}</a>" }
end
end
end
| 21 | 95 | 0.597403 |
26c9f0ec91b5cf41dde739c5a4dd8e77b6286756 | 1,166 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "basketball_score_scraper/version"
Gem::Specification.new do |spec|
spec.name = "basketball_score_scraper"
spec.version = BasketballScoreScraper::VERSION
spec.authors = ["anupkhadka"]
spec.email = ["[email protected]"]
spec.summary = %q{A CLI gem that scraps last night's score from basketball-reference.com.}
spec.homepage = "https://github.com/anupkhadka/basketball_score_scraper"
spec.license = "MIT"
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "bin"
spec.executables = "basketball_score_scraper"
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_dependency "nokogiri", "~> 1.10"
end
| 40.206897 | 98 | 0.67753 |
edf2ba9702868de0195623dcceb6f0ac9c1fefb4 | 1,335 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v7/errors/keyword_plan_campaign_error.proto
require 'google/api/annotations_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v7/errors/keyword_plan_campaign_error.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v7.errors.KeywordPlanCampaignErrorEnum" do
end
add_enum "google.ads.googleads.v7.errors.KeywordPlanCampaignErrorEnum.KeywordPlanCampaignError" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :INVALID_NAME, 2
value :INVALID_LANGUAGES, 3
value :INVALID_GEOS, 4
value :DUPLICATE_NAME, 5
value :MAX_GEOS_EXCEEDED, 6
value :MAX_LANGUAGES_EXCEEDED, 7
end
end
end
module Google
module Ads
module GoogleAds
module V7
module Errors
KeywordPlanCampaignErrorEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v7.errors.KeywordPlanCampaignErrorEnum").msgclass
KeywordPlanCampaignErrorEnum::KeywordPlanCampaignError = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v7.errors.KeywordPlanCampaignErrorEnum.KeywordPlanCampaignError").enummodule
end
end
end
end
end
| 37.083333 | 222 | 0.758801 |
262243ad430e94f7a764d85d040ce72299d42c36 | 116 | require_relative 'dto_schema/schema'
module DTOSchema
def self.define(&block)
Schema.new(&block)
end
end
| 11.6 | 36 | 0.732759 |
260ad21d08ad1d001ceec3a098145bab89432786 | 936 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html.
# Run `pod lib lint flutter_blue_elves.podspec' to validate before publishing.
#
Pod::Spec.new do |s|
s.name = 'flutter_blue_elves'
s.version = '0.0.1'
s.summary = 'A new flutter plugin project.'
s.description = <<-DESC
A new flutter plugin project.
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Your Company' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.platform = :ios, '8.0'
# Flutter.framework does not contain a i386 slice. Only x86_64 simulators are supported.
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' }
end
| 39 | 104 | 0.604701 |
7939bd9373e786eb899b7e1ea1cc567414986b26 | 1,789 | # Puma can serve each request in a thread from an internal thread pool.
# The `threads` method setting takes two numbers: a minimum and maximum.
# Any libraries that use thread pools should be configured to match
# the maximum value specified for Puma. Default is set to 5 threads for minimum
# and maximum; this matches the default thread size of Active Record.
#
max_threads_count = ENV.fetch("RAILS_MAX_THREADS") { 5 }
min_threads_count = ENV.fetch("RAILS_MIN_THREADS") { max_threads_count }
threads min_threads_count, max_threads_count
# Specifies the `worker_timeout` threshold that Puma will use to wait before
# terminating a worker in development environments.
#
worker_timeout 3600 if ENV.fetch("RAILS_ENV", "development") == "development"
# Specifies the `port` that Puma will listen on to receive requests; default is 3000.
#
port ENV.fetch("PORT") { 13000 }
# Specifies the `environment` that Puma will run in.
#
environment ENV.fetch("RAILS_ENV") { "development" }
# Specifies the `pidfile` that Puma will use.
pidfile ENV.fetch("PIDFILE") { "tmp/pids/server.pid" }
# Specifies the number of `workers` to boot in clustered mode.
# Workers are forked web server processes. If using threads and workers together
# the concurrency of the application would be max `threads` * `workers`.
# Workers do not work on JRuby or Windows (both of which do not support
# processes).
#
# workers ENV.fetch("WEB_CONCURRENCY") { 2 }
# Use the `preload_app!` method when specifying a `workers` number.
# This directive tells Puma to first boot the application and load code
# before forking the application. This takes advantage of Copy On Write
# process behavior so workers use less memory.
#
# preload_app!
# Allow puma to be restarted by `rails restart` command.
plugin :tmp_restart
| 40.659091 | 85 | 0.764673 |
acaaabaa49774bff9a2e5249dc7baa44dcde2a3c | 5,615 | require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
module Configliere ; module Crypter ; CIPHER_TYPE = 'aes-128-cbc' ; end ; end
describe "Configliere::Encrypted", :if => check_openssl do
require 'configliere/crypter'
before do
@config = Configliere::Param.new :secret => 'encrypt_me', :normal_param => 'normal'
@config.use :encrypted
@config.define :secret, :encrypted => true
@config[:encrypt_pass] = 'pass'
end
if Configliere::Crypter::CIPHER_TYPE == 'aes-256-cbc'
let(:encrypted_str){ "KohCTcXr1aAulopntmZ8f5Gqa7PzsBmz+R2vFGYrAeg=\n" }
let(:encrypted_foo_val){ "cc+Bp5jMUBHFCvPNZIfleeatB4IGaaXjVINl12HOpcs=\n" }
elsif Configliere::Crypter::CIPHER_TYPE == 'aes-128-cbc'
let(:encrypted_str){ "mHse6HRTANh8JpIfIuyANQ8b2rXAf0+/3pzQnYsd8LE=\n" }
let(:encrypted_foo_val){ "cc+Bp5jMUBHFCvPNZIfleZYRoDmLK1LSxPkAMemhDTQ=\n" }
else
warn "Can't make test strings for #{Configliere::Crypter::CIPHER_TYPE} cipher"
end
let(:foo_val_iv){ Base64.decode64(encrypted_foo_val)[0..15] }
describe "Crypter" do
it "encrypts" do
# Force the same initialization vector as used to prepare the test value
@cipher = Configliere::Crypter.send(:new_cipher, :encrypt, 'sekrit')
Configliere::Crypter.should_receive(:new_cipher).and_return(@cipher)
@cipher.should_receive(:random_iv).and_return foo_val_iv
# OK so do the test now.
Configliere::Crypter.encrypt('foo_val', 'sekrit').should == encrypted_foo_val
end
it "decrypts" do
Configliere::Crypter.decrypt(encrypted_foo_val, 'sekrit').should == 'foo_val'
end
end
describe 'defines encrypted params' do
it 'with :encrypted => true' do
@config.send(:encrypted_params).should include(:secret)
end
it 'but not if :encrypted => false' do
@config.define :another_param, :encrypted => false
@config.send(:encrypted_params).should_not include(:another_param)
@config.send(:encrypted_params).should include(:secret)
end
it 'only if :encrypted is given' do
@config.send(:encrypted_params).should_not include(:missing_param)
end
end
describe 'the encrypt_pass' do
it 'will take an environment variable if any exists' do
@config[:encrypt_pass] = nil
ENV.should_receive(:[]).with('ENCRYPT_PASS').at_least(:once).and_return('monkey')
@config.send(:export)
@config.send(:instance_variable_get, "@encrypt_pass").should == 'monkey'
end
it 'will take an internal value if given, and remove it' do
@config[:encrypt_pass] = 'hello'
@config.send(:export)
@config.send(:instance_variable_get, "@encrypt_pass").should == 'hello'
@config[:encrypt_pass].should be_nil
@config.has_key?(:encrypt_pass).should_not be_true
end
end
describe 'encrypts' do
it 'all params with :encrypted' do
Configliere::Crypter.should_receive(:encrypt).with('encrypt_me', 'pass').and_return('ok_encrypted')
@config.send(:export).should == { :normal_param => 'normal', :encrypted_secret => 'ok_encrypted'}
end
it 'fails unless encrypt_pass is set' do
# create the config but don't set an encrypt_pass
@config = Configliere::Param.new :secret => 'encrypt_me', :normal_param => 'normal'
@config.use :encrypted
lambda{ @config.send(:encrypted, @config[:secret]) }.should raise_error('Missing encryption password!')
end
end
describe 'decrypts' do
it 'all params marked encrypted' do
@config.delete :secret
@config.defaults :encrypted_secret => 'decrypt_me'
Configliere::Crypter.should_receive(:decrypt).with('decrypt_me', 'pass').and_return('ok_decrypted')
@config.send(:resolve_encrypted!)
@config.should == { :normal_param => 'normal', :secret => 'ok_decrypted' }
end
end
describe 'loading a file' do
it 'encrypts' do
Configliere::Crypter.should_receive(:encrypt).and_return(encrypted_str)
FileUtils.stub(:mkdir_p)
File.should_receive(:open).and_yield([])
YAML.should_receive(:dump).with({ :normal_param => "normal", :encrypted_secret => encrypted_str })
@config.save! '/fake/file'
end
it 'decrypts' do
# encrypted_str = Configliere::Crypter.encrypt('decrypt_me', 'pass')
@hsh = { :loaded_param => "loaded", :encrypted_secret => encrypted_str }
File.stub(:open)
YAML.should_receive(:load).and_return(@hsh)
@config.read 'file.yaml'
@config.resolve!
@config.should_not include(:encrypted_secret)
@config.should == { :loaded_param => "loaded", :secret => 'decrypt_me', :normal_param => 'normal' }
end
end
describe '#resolve!' do
it 'calls super and returns self' do
Configliere::ParamParent.class_eval do def resolve!() dummy ; end ; end
@config.should_receive(:dummy)
@config.resolve!.should equal(@config)
Configliere::ParamParent.class_eval do def resolve!() self ; end ; end
end
it 'removes the encrypt_pass from sight' do
@config[:encrypt_pass] = 'hello'
@config.resolve!
@config.send(:instance_variable_get, "@encrypt_pass").should == 'hello'
@config[:encrypt_pass].should be_nil
@config.has_key?(:encrypt_pass).should_not be_true
end
end
describe '#validate!' do
it 'calls super and returns self' do
Configliere::ParamParent.class_eval do def validate!() dummy ; end ; end
@config.should_receive(:dummy)
@config.validate!.should equal(@config)
Configliere::ParamParent.class_eval do def validate!() self ; end ; end
end
end
end
| 38.993056 | 109 | 0.682814 |
ff426c43f23b5dd9a8cd34d7ffe1f8c63ae37ac9 | 1,141 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "helper"
require "google/cloud/recommender"
require "gapic/common"
require "gapic/grpc"
describe Google::Cloud::Recommender do
it "constructs a recommender client with the default version" do
Gapic::ServiceStub.stub :new, :stub do
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = Google::Cloud::Recommender.recommender_service do |config|
config.credentials = grpc_channel
end
client.must_be_kind_of Google::Cloud::Recommender::V1::Recommender::Client
end
end
end
| 36.806452 | 93 | 0.751096 |
e2a71ff3c8bb632be05366e5f6dd2cc8f028bc8f | 3,021 | #==
# Copyright (C) 2008 James S Urquhart
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#++
class Tag < ActiveRecord::Base
include ActionController::UrlWriter
belongs_to :page
belongs_to :created_by, :class_name => 'User', :foreign_key => 'created_by_id'
belongs_to :rel_object, :polymorphic => true
def objects
return Tag.find_objects(self.name)
end
def self.find_objects(tag_name, page)
Tag.find(:all, :conditions => {'name' => tag_name, 'page_id' => page}).collect do |tag|
tag.rel_object
end
end
def self.clear_by_object(object)
Tag.delete_all({'rel_object_type' => object.class.to_s, 'rel_object_id' => object.id})
end
def self.set_to_object(object, taglist, force_user=0)
self.clear_by_object(object)
page_id = (object.class == Page) ? nil : object.page_id
set_user = force_user == 0 ? (object.updated_by.nil? ? object.created_by : object.updated_by) : force_user
Tag.transaction do
taglist.each do |tag_name|
Tag.create(:name => tag_name.strip, :page_id => page_id, :rel_object => object, :created_by => set_user)
end
end
end
def self.list_by_object(object)
Tag.find(:all, :conditions => {'rel_object_type' => object.class.to_s, 'rel_object_id' => object.id}).collect do |tag|
tag.name
end
end
def self.list_in_page(page)
Tag.find(:all, :conditions => {'page_id' => page}, :group => 'name').collect do |tag|
tag.name
end
end
def self.count_by(tag_name, page)
tag_conditions = is_public ?
["project_id = ? AND is_private = ? AND tag = ?", project.id, false, tag_name] :
["project_id = ? AND tag = ?", project.id, tag_name]
Tag.find(:all, :conditions => {'name' => tag_name, 'page_id' => page}, :select => 'id').length
end
def self.find_object_join(model)
"INNER JOIN tags ON tags.rel_object_type = '#{model.to_s}' AND tags.rel_object_id = #{model.table_name}.id"
end
def self.find_page_join
'INNER JOIN tags ON tags.page_id = pages.id'
end
end
| 33.94382 | 120 | 0.709037 |
e8f560f6ccba3cc61ab6b33762e51b0b7fe64e9d | 2,419 | module Chosen
module Rspec
module FeatureHelpers
def chosen_select(value, *args)
options = args.extract_options!
input = chosen_input(options)
args.unshift(value).uniq.each { |item| chosen_select!(input, item) }
end
def chosen_unselect(value, *args)
options = args.extract_options!
input = chosen_input(options)
args.unshift(value).uniq.each { |item| chosen_unselect!(input, item) }
end
private
def chosen_input(options)
fail ArgumentError, 'Required argument from: not set' unless options.has_key?(:from)
from = options.delete(:from)
begin
input = chosen_find_container(from, options)
rescue Capybara::ElementNotFound
input = chosen_find_input(from, options)
end
end
def chosen_find_container(from, options)
from = from.to_s
id = from
id = "##{id}" unless from.start_with?('#')
id = "#{id}_chosen" unless from.end_with?('_chosen')
find(:css, id, options)
rescue Capybara::ElementNotFound
label = find('label', { text: from }.merge(options))
find(:css, "##{label[:for]}_chosen", options)
end
def chosen_find_input(from, options)
from = from.to_s
from = "##{from}" unless from.start_with?('#')
find(:css, from, options)
end
def chosen_multiselect?(input)
input.first('.chosen-container-multi').present?
end
def chosen_select!(input, item)
if input.tag_name == 'select'
input.find(:option, item).select_option
else
input.click
within "##{input[:id]} .chosen-drop .chosen-results" do
result = find('.active-result', text: item, match: :prefer_exact)
result.click if result.visible?
end
end
end
def chosen_unselect!(input, item)
if input.tag_name == 'select'
input.find(:option, item).unselect_option
else
if chosen_multiselect?(input)
input.first('.search-choice', text: item)
.first('.search-choice-close')
.click
else
input.first('.search-choice-close').click
end
end
end
end
end
end
RSpec.configure do |config|
config.include Chosen::Rspec::FeatureHelpers, type: :feature
end
| 26.010753 | 92 | 0.587019 |
110bfc5a84206bd4917692122e53c59ede97bf8a | 3,743 | module FoodCritic
# Encapsulates functions that previously were calls to the Chef gem.
module Chef
def chef_dsl_methods
load_metadata
@dsl_metadata[:dsl_methods].map(&:to_sym)
end
def chef_node_methods
load_metadata
@dsl_metadata[:node_methods].map(&:to_sym)
end
# Is the specified action valid for the type of resource?
def resource_action?(resource_type, action)
resource_check?(:actions, resource_type, action)
end
# Is the specified attribute valid for the type of resource?
def resource_attribute?(resource_type, attribute_name)
resource_check?(:attributes, resource_type, attribute_name)
end
# Is this a valid Lucene query?
def valid_query?(query)
fail ArgumentError, 'Query cannot be nil or empty' if query.to_s.empty?
# Attempt to create a search query parser
search = FoodCritic::Chef::Search.new
search.create_parser(search.chef_search_grammars)
if search.parser?
search.parser.parse(query.to_s)
else
# If we didn't manage to get a parser then we can't know if the query
# is valid or not.
true
end
end
private
# To avoid the runtime hit of loading the Chef gem and its dependencies
# we load the DSL metadata from a JSON file shipped with our gem.
#
# The DSL metadata doesn't necessarily reflect the version of Chef in the
# local user gemset.
def load_metadata
version = if self.respond_to?(:chef_version)
chef_version
else
Linter::DEFAULT_CHEF_VERSION
end
metadata_path = [version, version.sub(/\.[a-z].*/, ''),
Linter::DEFAULT_CHEF_VERSION].map do |version|
metadata_path(version)
end.find { |m| File.exist?(m) }
@dsl_metadata ||= Yajl::Parser.parse(IO.read(metadata_path),
symbolize_keys: true)
end
def metadata_path(chef_version)
File.join(File.dirname(__FILE__), '..', '..',
"chef_dsl_metadata/chef_#{chef_version}.json")
end
def resource_check?(key, resource_type, field)
if resource_type.to_s.empty? || field.to_s.empty?
fail ArgumentError, 'Arguments cannot be nil or empty.'
end
load_metadata
resource_fields = @dsl_metadata[key]
# If the resource type is not recognised then it may be a user-defined
# resource. We could introspect these but at present we simply return
# true.
return true unless resource_fields.include?(resource_type.to_sym)
# Otherwise the resource field must exist in our metadata to succeed
resource_fields[resource_type.to_sym].include?(field.to_s)
end
class Search
# lucene.treetop used to be provided by chef gem
# We're keeping a local copy from chef 10.x
def chef_search_grammars
[File.expand_path('../../..', __FILE__) + "/misc/lucene.treetop"]
end
# Create the search parser from the first loadable grammar.
def create_parser(grammar_paths)
@search_parser ||=
grammar_paths.inject(nil) do |parser, lucene_grammar|
begin
break parser unless parser.nil?
# Don't instantiate custom nodes
Treetop.load_from_string(
IO.read(lucene_grammar).gsub(/<[^>]+>/, ''))
LuceneParser.new
rescue
# Silently swallow and try the next grammar
end
end
end
# Has the search parser been loaded?
def parser?
! @search_parser.nil?
end
# The search parser
def parser
@search_parser
end
end
end
end
| 31.453782 | 77 | 0.633716 |
ac6a8bd01a99e82e3374c8b1e28059c7b0237fe0 | 3,791 | class Portal::District < ApplicationRecord
self.table_name = :portal_districts
acts_as_replicatable
has_many :schools, -> { order :name },
dependent: :destroy,
class_name: 'Portal::School',
foreign_key: 'district_id'
belongs_to :nces_district, :class_name => "Portal::Nces06District", :foreign_key => "nces_district_id"
scope :real, -> { where('nces_district_id is NOT NULL').includes(:schools).order("name") }
scope :virtual, -> { where('nces_district_id is NULL').includes(:schools).order("name") }
include Changeable
self.extend SearchableModel
@@searchable_attributes = %w{uuid name description}
class <<self
def searchable_attributes
@@searchable_attributes
end
##
## Given an NCES local district id that matches the STID field in an NCES district
## find and return the first district that is associated with the NCES district or nil.
##
## example:
##
## Portal::District.find_by_state_and_nces_local_id('RI', 39).name
## => "Woonsocket"
##
def find_by_state_and_nces_local_id(state, local_id)
nces_district = Portal::Nces06District
.where(:STID => local_id, :LSTATE => state)
.select("id, LEAID, STID, NAME, LSTATE")
.first
if nces_district
where(:nces_district_id => nces_district.id).first
end
end
##
## Given a district name that matches the NAME field in an NCES district find
## and return the first district that is associated with the NCES district or nil.
##
## example:
##
## Portal::District.find_by_state_and_district_name('RI', "Woonsocket").nces_local_id
## => "39"
##
def find_by_state_and_district_name(state, district_name)
nces_district = Portal::Nces06District.where(:NAME => district_name.upcase, :LSTATE => state)
.select("id, LEAID, STID, NAME, LSTATE").first
if nces_district
where(:nces_district_id => nces_district.id).first
end
end
##
## given a NCES district, either find or create a portal_distrcit for it.
##
def find_or_create_using_nces_district(nces_district)
found_instance = where(:nces_district_id => nces_district.id).first
unless found_instance
attributes = {
:name => nces_district.NAME,
:description => "imported from nces data",
:nces_district_id => nces_district.id,
:state => nces_district.LSTATE,
:leaid => nces_district.LEAID,
:zipcode => nces_district.LZIP
}
found_instance = self.create(attributes)
found_instance.save!
end
found_instance
end
def default
Portal::District.where(name: 'default').first_or_create
end
def find_by_similar_or_new(attrs,username='automatic process')
found = Portal::District.where(attrs).first
unless found
attrs[:description] ||= "created by #{username}"
found = Portal::District.new(attrs)
end
found
end
def find_by_similar_name_or_new(name,username='automatic process')
sql = "SELECT id, name FROM portal_districts"
all_names = Portal::District.find_by_sql(sql)
found = all_names.detect { |s| s.name.upcase.gsub(/[^A-Z]/,'') == name.upcase.gsub(/[^A-Z]/,'') }
unless found
found = Portal::District.new(:name => name, :description => "#{name} created by #{username}")
end
found
end
end
def virtual?
nces_district_id.nil?
end
def real?
! virtual?
end
# if the district is a 'real' district return the NCES local district id
def nces_local_id
real? ? nces_district.STID : nil
end
end
| 31.591667 | 104 | 0.636508 |
e8556462c0dca1d7fd537b16c7bbba7613dcb856 | 551 |
Pod::Spec.new do |s|
s.name = "SFBNetwork"
s.version = "0.0.1"
s.summary = "A network framework based on AFNetworking."
s.homepage = "https://github.com/SoftBoys/SFBNetwork"
s.source = { :git => "https://github.com/SoftBoys/SFBNetwork.git", :tag => s.version.to_s }
s.license = "MIT"
s.author = { "SoftBoys" => "[email protected]" }
s.platform = :ios, "8.0"
s.source_files = "SFBNetwork"
s.requires_arc = true
s.dependency "AFNetworking", "~> 3.1.0"
end
| 22.04 | 101 | 0.557169 |
4a95abab45ec6c4078962036c332291b5ebd1627 | 656 | ##
# This file is part of WhatWeb and may be subject to
# redistribution and commercial restrictions. Please see the WhatWeb
# web site for more information on licensing and terms of use.
# http://www.morningstarsecurity.com/research/whatweb
##
Plugin.define "x-hacker" do
author "Brendan Coles <[email protected]>" # 2011-04-08
version "0.1"
description "This plugin identifies the X-Hacker HTTP header and returns its value."
# ShodanHQ results as at 2011-04-08 #
# 23 for x-hacker
# Passive #
def passive
m=[]
# X-Hacker HTTP Header
m << { :string=>@headers["x-hacker"] } unless @headers["x-hacker"].nil?
# Return passive matches
m
end
end
| 21.866667 | 84 | 0.724085 |
4aef952861afcae73eaa094c259cf78b6eff9ef6 | 1,445 | # frozen_string_literal: true
require 'test_helper'
require 'active_record'
ActiveRecord::Base.establish_connection(adapter: 'sqlite3', database: ':memory:')
ActiveRecord::Migration.verbose = false
ActiveRecord::Schema.define do
create_table(:users) do |t|
t.string :first_name
t.string :last_name
end
end
ActiveRecord::Schema.define do
create_table(:widgets) do |t|
t.integer :user_id
t.boolean :runcible
t.string :color
t.decimal :size
t.text :description
end
end
class User < ActiveRecord::Base
has_many :widgets
end
class Widget < ActiveRecord::Base
belongs_to :user
end
describe Arrangement::Composer do
describe '#new' do
it 'should load default schemas' do
composer = Arrangement::Composer.new(schemas_directory_path: 'test/schemas')
assert composer.defaults.is_a?(Arrangement::Schema)
assert_equal(
{ id: 1, first_name: 'BobA', last_name: 'Jones' },
composer.defaults[:user].to_h
)
assert_equal(
{ id: 2, first_name: 'BobB', last_name: 'Jones' },
composer.defaults[:user].to_h
)
assert_equal(1, composer.defaults[:widget].to_h[:id])
assert_equal(2, composer.defaults[:widget].to_h[:id])
end
end
describe '#create' do
it 'creates a top-level object' do
composer = Arrangement::Composer.new
user = composer.create(user: {})
assert user.is_a?(User)
end
end
end
| 22.230769 | 82 | 0.675433 |
ff3c1e7088364a4c4deab0f7c6c59d9722636ea9 | 625 | ActiveAdmin.register AdminUser do
permit_params :email, :password, :password_confirmation
index do
selectable_column
id_column
column :email
column :current_sign_in_at
column :sign_in_count
column :created_at
actions
end
filter :email
filter :current_sign_in_at
filter :sign_in_count
filter :created_at
form do |f|
f.inputs do
f.input :email
f.input :password
f.input :password_confirmation
end
f.actions
end
csv do
column :email
column :created_at
end
controller do
def csv_filename
'AdminUsers.csv'
end
end
end
| 15.625 | 57 | 0.6816 |
d50025a65617c698a047dcff87a3d6092eb7fc70 | 1,288 | # Encoding: UTF-8
require 'chefspec'
require 'chefspec/berkshelf'
ChefSpec::Coverage.start! { add_filter 'openstack-identity' }
LOG_LEVEL = :fatal
SUSE_OPTS = {
platform: 'suse',
version: '11.03',
log_level: LOG_LEVEL
}
REDHAT_OPTS = {
platform: 'redhat',
version: '6.3',
log_level: LOG_LEVEL
}
UBUNTU_OPTS = {
platform: 'ubuntu',
version: '12.04',
log_level: LOG_LEVEL
}
# Helper methods
module Helpers
# Create an anchored regex to exactly match the entire line
# (name borrowed from grep --line-regexp)
#
# @param [String] str The whole line to match
# @return [Regexp] The anchored/escaped regular expression
def line_regexp(str)
/^#{Regexp.quote(str)}$/
end
end
shared_context 'identity_stubs' do
before do
::Chef::Recipe.any_instance.stub(:memcached_servers).and_return []
::Chef::Recipe.any_instance.stub(:get_password)
.with('db', anything)
.and_return('')
::Chef::Recipe.any_instance.stub(:get_password)
.with('user', anything)
.and_return('')
::Chef::Recipe.any_instance.stub(:get_password)
.with('user', 'user1')
.and_return('secret1')
::Chef::Recipe.any_instance.stub(:get_secret)
.with('openstack_identity_bootstrap_token')
.and_return('bootstrap-token')
end
end
| 24.301887 | 70 | 0.682453 |
d5ccc47a4b7e4b3a5dc993961e166fdcda2ba227 | 85 | # desc "Explaining what the task does"
# task :gamfora do
# # Task goes here
# end
| 17 | 38 | 0.670588 |
610099598445336dac9c6d592418cf34165071e6 | 513 | # frozen_string_literal: true
module Collections
# Displays a single table row representing a work in a collection.
class WorkRowComponent < ApplicationComponent
with_collection_parameter :work_version
def initialize(work_version:)
@work_version = work_version
end
attr_reader :work_version
# Returns the size of the attached files
def size
number_to_human_size(attached_files.sum(&:byte_size))
end
delegate :work, :attached_files, to: :work_version
end
end
| 23.318182 | 68 | 0.744639 |
5d457956ba36d54e0bed373a12e1062731837c3d | 1,190 | class Streamripper < Formula
desc "Separate tracks via Shoutcasts title-streaming"
homepage "http://streamripper.sourceforge.net/"
url "https://downloads.sourceforge.net/sourceforge/streamripper/streamripper-1.64.6.tar.gz"
sha256 "c1d75f2e9c7b38fd4695be66eff4533395248132f3cc61f375196403c4d8de42"
bottle do
cellar :any
sha256 "3f055510dc825aa663f35c91aa5f4e5a57baacd69c00f0c428df4a98ad9b6a7e" => :sierra
sha256 "eff1bb37cd652e9b3194e2fda3c682bda9da12f413a11c4e5e337c9bc849b2ea" => :el_capitan
sha256 "3465e96b5f17000df88a85729674f911097ab9f1b0170a0c3c89f4892dba6fbb" => :yosemite
sha256 "a92b924639b9210e83cae1e63baa8f9b45ab4ec38816e19e32ad6fbae420a510" => :mavericks
end
depends_on "pkg-config" => :build
depends_on "glib"
def install
# the Makefile ignores CPPFLAGS from the environment, which
# breaks the build when HOMEBREW_PREFIX is not /usr/local
ENV.append_to_cflags ENV.cppflags
chmod 0755, "./install-sh" # or "make install" fails
system "./configure", "--prefix=#{prefix}",
"--disable-debug",
"--disable-dependency-tracking"
system "make", "install"
end
end
| 38.387097 | 93 | 0.740336 |
ac60adc789de7f0eae7e1e4c21a340020fbc0d86 | 10,065 | # encoding: utf-8
require File.expand_path('../spec_helper', __FILE__)
require 'rubygems/mock_gem_ui'
describe ChildProcess do
here = File.dirname(__FILE__)
let(:gemspec) { eval(File.read "#{here}/../childprocess.gemspec") }
it 'validates cleanly' do
mock_ui = Gem::MockGemUi.new
Gem::DefaultUserInteraction.use_ui(mock_ui) { gemspec.validate }
expect(mock_ui.error).to_not match(/warn/i)
end
it "returns self when started" do
process = sleeping_ruby
expect(process.start).to eq process
expect(process).to be_alive
end
# We can't detect failure to execve() when using posix_spawn() on Linux
# without waiting for the child to exit with code 127.
#
# See e.g. http://repo.or.cz/w/glibc.git/blob/669704fd:/sysdeps/posix/spawni.c#l34
#
# We could work around this by doing the PATH search ourselves, but not sure
# it's worth it.
it "raises ChildProcess::LaunchError if the process can't be started", :posix_spawn_on_linux => false do
expect { invalid_process.start }.to raise_error(ChildProcess::LaunchError)
end
it 'raises ArgumentError if given a non-string argument' do
expect { ChildProcess.build(nil, "unlikelytoexist") }.to raise_error(ArgumentError)
expect { ChildProcess.build("foo", 1) }.to raise_error(ArgumentError)
end
it "knows if the process crashed" do
process = exit_with(1).start
process.wait
expect(process).to be_crashed
end
it "knows if the process didn't crash" do
process = exit_with(0).start
process.wait
expect(process).to_not be_crashed
end
it "can wait for a process to finish" do
process = exit_with(0).start
return_value = process.wait
expect(process).to_not be_alive
expect(return_value).to eq 0
end
it 'ignores #wait if process already finished' do
process = exit_with(0).start
sleep 0.01 until process.exited?
expect(process.wait).to eql 0
end
it "escalates if TERM is ignored" do
process = ignored('TERM').start
process.stop
expect(process).to be_exited
end
it "accepts a timeout argument to #stop" do
process = sleeping_ruby.start
process.stop(exit_timeout)
end
it "lets child process inherit the environment of the current process" do
Tempfile.open("env-spec") do |file|
with_env('INHERITED' => 'yes') do
process = write_env(file.path).start
process.wait
end
child_env = eval rewind_and_read(file)
expect(child_env['INHERITED']).to eql 'yes'
end
end
it "can override env vars only for the current process" do
Tempfile.open("env-spec") do |file|
process = write_env(file.path)
process.environment['CHILD_ONLY'] = '1'
process.start
expect(ENV['CHILD_ONLY']).to be_nil
process.wait
child_env = eval rewind_and_read(file)
expect(child_env['CHILD_ONLY']).to eql '1'
end
end
it "inherits the parent's env vars also when some are overridden" do
Tempfile.open("env-spec") do |file|
with_env('INHERITED' => 'yes', 'CHILD_ONLY' => 'no') do
process = write_env(file.path)
process.environment['CHILD_ONLY'] = 'yes'
process.start
process.wait
child_env = eval rewind_and_read(file)
expect(child_env['INHERITED']).to eq 'yes'
expect(child_env['CHILD_ONLY']).to eq 'yes'
end
end
end
it "can unset env vars" do
Tempfile.open("env-spec") do |file|
ENV['CHILDPROCESS_UNSET'] = '1'
process = write_env(file.path)
process.environment['CHILDPROCESS_UNSET'] = nil
process.start
process.wait
child_env = eval rewind_and_read(file)
expect(child_env).to_not have_key('CHILDPROCESS_UNSET')
end
end
it 'does not see env vars unset in parent' do
Tempfile.open('env-spec') do |file|
ENV['CHILDPROCESS_UNSET'] = nil
process = write_env(file.path)
process.start
process.wait
child_env = eval rewind_and_read(file)
expect(child_env).to_not have_key('CHILDPROCESS_UNSET')
end
end
it "passes arguments to the child" do
args = ["foo", "bar"]
Tempfile.open("argv-spec") do |file|
process = write_argv(file.path, *args).start
process.wait
expect(rewind_and_read(file)).to eql args.inspect
end
end
it "lets a detached child live on" do
p_pid = nil
c_pid = nil
Tempfile.open('grandparent_out') do |gp_file|
# Create a parent and detached child process that will spit out their PID. Make sure that the child process lasts longer than the parent.
p_process = ruby("require 'childprocess' ; c_process = ChildProcess.build('ruby', '-e', 'puts \\\"Child PID: \#{Process.pid}\\\" ; sleep 5') ; c_process.io.inherit! ; c_process.detach = true ; c_process.start ; puts \"Child PID: \#{c_process.pid}\" ; puts \"Parent PID: \#{Process.pid}\"")
p_process.io.stdout = p_process.io.stderr = gp_file
# Let the parent process die
p_process.start
p_process.wait
# Gather parent and child PIDs
pids = rewind_and_read(gp_file).split("\n")
pids.collect! { |pid| pid[/\d+/].to_i }
c_pid, p_pid = pids
end
# Check that the parent process has dies but the child process is still alive
expect(alive?(p_pid)).to_not be true
expect(alive?(c_pid)).to be true
end
it "preserves Dir.pwd in the child" do
Tempfile.open("dir-spec-out") do |file|
process = ruby("print Dir.pwd")
process.io.stdout = process.io.stderr = file
expected_dir = nil
Dir.chdir(Dir.tmpdir) do
expected_dir = Dir.pwd
process.start
end
process.wait
expect(rewind_and_read(file)).to eq expected_dir
end
end
it "can handle whitespace, special characters and quotes in arguments" do
args = ["foo bar", 'foo\bar', "'i-am-quoted'", '"i am double quoted"']
Tempfile.open("argv-spec") do |file|
process = write_argv(file.path, *args).start
process.wait
expect(rewind_and_read(file)).to eq args.inspect
end
end
it 'handles whitespace in the executable name' do
path = File.expand_path('foo bar')
with_executable_at(path) do |proc|
expect(proc.start).to eq proc
expect(proc).to be_alive
end
end
it "times out when polling for exit" do
process = sleeping_ruby.start
expect { process.poll_for_exit(0.1) }.to raise_error(ChildProcess::TimeoutError)
end
it "can change working directory" do
process = ruby "print Dir.pwd"
with_tmpdir { |dir|
process.cwd = dir
orig_pwd = Dir.pwd
Tempfile.open('cwd') do |file|
process.io.stdout = file
process.start
process.wait
expect(rewind_and_read(file)).to eq dir
end
expect(Dir.pwd).to eq orig_pwd
}
end
it 'kills the full process tree', :process_builder => false do
Tempfile.open('kill-process-tree') do |file|
process = write_pid_in_sleepy_grand_child(file.path)
process.leader = true
process.start
pid = wait_until(30) do
Integer(rewind_and_read(file)) rescue nil
end
process.stop
wait_until(3) { expect(alive?(pid)).to eql(false) }
end
end
it 'releases the GIL while waiting for the process' do
time = Time.now
threads = []
threads << Thread.new { sleeping_ruby(1).start.wait }
threads << Thread.new(time) { expect(Time.now - time).to be < 0.5 }
threads.each { |t| t.join }
end
it 'can check if a detached child is alive' do
proc = ruby_process("-e", "sleep")
proc.detach = true
proc.start
expect(proc).to be_alive
proc.stop(0)
expect(proc).to be_exited
end
it 'has a logger' do
expect(ChildProcess).to respond_to(:logger)
end
it 'can change its logger' do
expect(ChildProcess).to respond_to(:logger=)
original_logger = ChildProcess.logger
begin
ChildProcess.logger = :some_other_logger
expect(ChildProcess.logger).to eq(:some_other_logger)
ensure
ChildProcess.logger = original_logger
end
end
describe 'logger' do
before(:each) do
ChildProcess.logger = logger
end
after(:all) do
ChildProcess.logger = nil
end
context 'with the default logger' do
let(:logger) { nil }
it 'logs at INFO level by default' do
expect(ChildProcess.logger.level).to eq(Logger::INFO)
end
it 'logs at DEBUG level by default if $DEBUG is on' do
original_debug = $DEBUG
begin
$DEBUG = true
expect(ChildProcess.logger.level).to eq(Logger::DEBUG)
ensure
$DEBUG = original_debug
end
end
it "logs to stderr by default" do
cap = capture_std { generate_log_messages }
expect(cap.stdout).to be_empty
expect(cap.stderr).to_not be_empty
end
end
context 'with a custom logger' do
let(:logger) { Logger.new($stdout) }
it "logs to configured logger" do
cap = capture_std { generate_log_messages }
expect(cap.stdout).to_not be_empty
expect(cap.stderr).to be_empty
end
end
end
describe '#started?' do
subject { process.started? }
context 'when not started' do
let(:process) { sleeping_ruby(1) }
it { is_expected.to be false }
end
context 'when started' do
let(:process) { sleeping_ruby(1).start }
it { is_expected.to be true }
end
context 'when finished' do
before(:each) { process.wait }
let(:process) { sleeping_ruby(0).start }
it { is_expected.to be true }
end
end
end
| 25.67602 | 297 | 0.625236 |
d59c9d6c2b932ccbc637818c5e52344f3807cf16 | 99 | if defined?(Rack::Timeout)
Rack::Timeout::Logger.disable # these are verbose and unnecessary
end
| 24.75 | 67 | 0.767677 |
5d2114a77ca6e3653b1137a3c39a317940ed0bd5 | 147 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_PaciferousPanel_session'
| 36.75 | 85 | 0.816327 |
1a69edb4260870b02baa081fccb1e0dc11f5dffc | 390 | class MapReduce<%= name.camelcase %>
include SkynetDebugger
def self.run
job = Skynet::Job.new(
:mappers => 2, #change to whatever is a good number
:reducers => 1, #same
:map_reduce_class => self,
:map_data => [] #acquire your data!
)
job.run
end
def self.map(profiles)
#map code
end
def self.reduce(pairs)
#reduce code
end
end
| 16.956522 | 57 | 0.607692 |
1d671a35f3e013e0dd8e6824a77ba45ad8abd891 | 166 | class AddTransportationRequestToAttendees < ActiveRecord::Migration
def change
add_column :attendees, :transportation_request, :string, :limit => 500
end
end
| 27.666667 | 74 | 0.789157 |
abfb224a49149ec8e7ea943260f40d8cdfbdc363 | 2,106 | module Benchmarks; end
class Benchmarks::CLI
class CLIRB # Version 1.0.0, https://github.com/redding/cli.rb
Error = Class.new(RuntimeError);
HelpExit = Class.new(RuntimeError); VersionExit = Class.new(RuntimeError)
attr_reader :argv, :args, :opts, :data
def initialize(&block)
@options = []; instance_eval(&block) if block
require "optparse"
@data, @args, @opts = [], [], {}; @parser = OptionParser.new do |p|
p.banner = ""; @options.each do |o|
@opts[o.name] = o.value; p.on(*o.parser_args){ |v| @opts[o.name] = v }
end
p.on_tail("--version", ""){ |v| raise VersionExit, v.to_s }
p.on_tail("--help", ""){ |v| raise HelpExit, v.to_s }
end
end
def option(*args); @options << Option.new(*args); end
def parse!(argv)
@args = (argv || []).dup.tap do |args_list|
begin; @parser.parse!(args_list)
rescue OptionParser::ParseError => err; raise Error, err.message; end
end; @data = @args + [@opts]
end
def to_s; @parser.to_s; end
def inspect
"#<#{self.class}:#{"0x0%x" % (object_id << 1)} @data=#{@data.inspect}>"
end
class Option
attr_reader :name, :opt_name, :desc, :abbrev, :value, :klass, :parser_args
def initialize(name, *args)
settings, @desc = args.last.kind_of?(::Hash) ? args.pop : {}, args.pop || ""
@name, @opt_name, @abbrev = parse_name_values(name, settings[:abbrev])
@value, @klass = gvalinfo(settings[:value])
@parser_args = if [TrueClass, FalseClass, NilClass].include?(@klass)
["-#{@abbrev}", "--[no-]#{@opt_name}", @desc]
else
["-#{@abbrev}", "--#{@opt_name} #{@opt_name.upcase}", @klass, @desc]
end
end
private
def parse_name_values(name, custom_abbrev)
[ (processed_name = name.to_s.strip.downcase), processed_name.gsub("_", "-"),
custom_abbrev || processed_name.gsub(/[^a-z]/, "").chars.first || "a"
]
end
def gvalinfo(v); v.kind_of?(Class) ? [nil,v] : [v,v.class]; end
end
end
end
| 35.694915 | 85 | 0.57075 |
bfbe234e83b87546cdfb53d97bb7f85df03c893e | 221 | class Brie
attr_reader :item
def initialize(item)
@item = item
end
def tick
item.sell_in -= 1
return if item.quality >= 50
item.quality += 1
item.quality += 1 if item.sell_in <= 0
end
end
| 13.8125 | 42 | 0.615385 |
f85faf4a599c3eaf108f12afae634a8b20e6d6cb | 951 | module RSwift
class TemplateManager
include Thor::Base
include Thor::Actions
attr_reader :name
def self.source_root
File.dirname(__FILE__)
end
def create_files_for_template(name, template)
@name = name
current_directory_path = File.dirname(__FILE__)
template_directory = File.join(current_directory_path, 'templates/app', template.to_s)
Dir.glob("#{template_directory}/**/*.erb", File::FNM_DOTMATCH).each do |template_path|
relative_template_path = template_path.sub(current_directory_path + '/', '')
relative_erb_file_path = template_path.sub(template_directory, '')
file_name = File.basename(relative_erb_file_path, '.erb')
relative_directory_path = File.dirname(relative_erb_file_path)
relative_file_path = File.join(name, relative_directory_path, file_name)
template relative_template_path, relative_file_path
end
end
end
end
| 32.793103 | 92 | 0.719243 |
28223ebb8bd8562bfb74bf780e248b5cb2df5133 | 81 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'intact_case'
| 27 | 58 | 0.740741 |
1daf3302657ccfcad67942a6713a9ee100bcc328 | 416 | describe "issue_trackers/issue.txt.erb", type: 'view' do
let(:problem) do
problem = Fabricate(:problem)
Fabricate(:notice, err: Fabricate(:err, problem: problem))
problem
end
before do
allow(view).to receive(:problem).and_return(
ProblemDecorator.new(problem))
end
it "has the problem url" do
render
expect(rendered).to match(app_problem_url problem.app, problem)
end
end
| 23.111111 | 67 | 0.694712 |
1d7196badd33decc58e3dd07a136b79c7888f8d9 | 146 | class RemoveColumnStudentsCriminalExplanation < ActiveRecord::Migration
def change
remove_column :students, :criminal_explanation
end
end
| 24.333333 | 71 | 0.828767 |
1818fa76238bd01a7204c3d6dba55181d9bb92a3 | 2,444 | class Skopeo < Formula
desc "Work with remote images registries"
homepage "https://github.com/containers/skopeo"
url "https://github.com/containers/skopeo/archive/v0.1.40.tar.gz"
sha256 "ee1e33245938fcb622f5864fac860e2d8bfa2fa907af4b5ffc3704ed0db46bbf"
revision 1
bottle do
cellar :any
rebuild 1
sha256 "d382e6ed0f92e335d4b0c82bc535760dd7c32268f0b60dcb8ce7a09dbe4e1b5f" => :catalina
sha256 "2c766d15ff4b798021ddb5c2eff823cefbbb20afb92da2e86e8e747dc891abbd" => :mojave
sha256 "ae5437d6ba74269a0b82a85b365bbbdd0cf187107a44bf7119753d33e644947f" => :high_sierra
sha256 "720c24a373f5d5499b2ec30063f1e8ade7970fcfe883090f48aafc9a2f7be0d4" => :x86_64_linux
end
depends_on "go" => :build
depends_on "gpgme"
unless OS.mac?
depends_on "pkg-config" => :build
depends_on "device-mapper"
end
def install
ENV["GOPATH"] = buildpath
ENV["CGO_ENABLED"] = "1"
ENV.append "CGO_FLAGS", ENV.cppflags
ENV.append "CGO_FLAGS", Utils.popen_read("#{Formula["gpgme"].bin}/gpgme-config --cflags")
(buildpath/"src/github.com/containers/skopeo").install buildpath.children
cd buildpath/"src/github.com/containers/skopeo" do
buildtags = [
"containers_image_ostree_stub",
Utils.popen_read("hack/btrfs_tag.sh").chomp,
Utils.popen_read("hack/btrfs_installed_tag.sh").chomp,
Utils.popen_read("hack/libdm_tag.sh").chomp,
Utils.popen_read("hack/ostree_tag.sh").chomp,
].uniq.join(" ")
ldflags = [
"-X main.gitCommit=",
"-X github.com/containers/image/docker.systemRegistriesDirPath=#{etc/"containers/registries.d"}",
"-X github.com/containers/image/internal/tmpdir.unixTempDirForBigFiles=/var/tmp",
"-X github.com/containers/image/signature.systemDefaultPolicyPath=#{etc/"containers/policy.json"}",
"-X github.com/containers/image/pkg/sysregistriesv2.systemRegistriesConfPath=#{etc/"containers/registries.conf"}",
].join(" ")
system "go", "build", "-v", "-tags", buildtags, "-ldflags", ldflags, "-o", bin/"skopeo", "./cmd/skopeo"
(etc/"containers").install "default-policy.json" => "policy.json"
(etc/"containers/registries.d").install "default.yaml"
prefix.install_metafiles
end
end
test do
cmd = "#{bin}/skopeo --override-os linux inspect docker://busybox"
output = shell_output(cmd)
assert_match "docker.io/library/busybox", output
end
end
| 38.793651 | 122 | 0.709083 |
1db9f15b790f2ffd815b5202b46c7f0e06810f81 | 978 | require 'spec_helper'
describe Gitlab::Regex do
describe 'path regex' do
it { expect('gitlab-ce').to match(Gitlab::Regex.path_regex) }
it { expect('gitlab_git').to match(Gitlab::Regex.path_regex) }
it { expect('_underscore.js').to match(Gitlab::Regex.path_regex) }
it { expect('100px.com').to match(Gitlab::Regex.path_regex) }
it { expect('?gitlab').not_to match(Gitlab::Regex.path_regex) }
it { expect('git lab').not_to match(Gitlab::Regex.path_regex) }
it { expect('gitlab.git').not_to match(Gitlab::Regex.path_regex) }
end
describe 'project name regex' do
it { expect('gitlab-ce').to match(Gitlab::Regex.project_name_regex) }
it { expect('GitLab CE').to match(Gitlab::Regex.project_name_regex) }
it { expect('100 lines').to match(Gitlab::Regex.project_name_regex) }
it { expect('gitlab.git').to match(Gitlab::Regex.project_name_regex) }
it { expect('?gitlab').not_to match(Gitlab::Regex.project_name_regex) }
end
end
| 44.454545 | 75 | 0.695297 |
624fc176a54ee84e1b6b8dcf0626f0e0c21bb6b8 | 1,559 | require File.expand_path("../../Abstract/abstract-php-extension", __FILE__)
class Php53Blitz < AbstractPhp53Extension
init
homepage 'http://alexeyrybak.com/blitz/blitz_en.html'
url 'https://github.com/alexeyrybak/blitz/archive/v0.8.17.tar.gz'
sha1 '9c47e7daf585493dcf5f5bad9e202228c28ed748'
bottle do
root_url "https://homebrew.bintray.com/bottles-php"
sha1 "4d04794c8d1b20fadb8d6a029dc0e0462719277a" => :yosemite
sha1 "6a7148afc0c8c604ca582dff87b056a71765ff2c" => :mavericks
sha1 "0741534f10ce84a8396cf4e3068c5497c2b5ec44" => :mountain_lion
end
head 'https://github.com/alexeyrybak/blitz.git'
def install
safe_phpize
system "./configure", "--prefix=#{prefix}", phpconfig
system "make"
prefix.install "modules/blitz.so"
write_config_file if build.with? "config-file"
end
def config_file
super + <<-EOS.undent
blitz.auto_escape=0
blitz.check_recursion=1
blitz.comment_close="*/"
blitz.comment_open="/*"
blitz.enable_alternative_tags=1
blitz.enable_callbacks=1
blitz.enable_comments=0
blitz.enable_include=1
blitz.enable_php_callbacks=1
blitz.lower_case_method_names=1
blitz.path=""
blitz.php_callbacks_first=1
blitz.remove_spaces_around_context_tags=1
blitz.scope_lookup_limit=0
blitz.tag_close="}}"
blitz.tag_close_alt="-->"
blitz.tag_open="{{"
blitz.tag_open_alt="<!--"
blitz.throw_exceptions=0
blitz.var_prefix="$"
blitz.warn_context_duplicates=0
EOS
end
end
| 29.415094 | 75 | 0.708146 |
186b8e1ffe62fb77ba0eb7e80a396ff4a369dad4 | 97 | require 'simplecov'
SimpleCov.start do
add_filter '/spec/'
end
SimpleCov.minimum_coverage 90
| 12.125 | 29 | 0.783505 |
f8a6d9f808eeca4f90579e24f47e86eeed7b7059 | 4,011 | # frozen_string_literal: true
class Explore::ProjectsController < Explore::ApplicationController
include PageLimiter
include ParamsBackwardCompatibility
include RendersMemberAccess
include RendersProjectsList
include SortingHelper
include SortingPreference
MIN_SEARCH_LENGTH = 3
PAGE_LIMIT = 50
before_action :set_non_archived_param
before_action :set_sorting
# For background information on the limit, see:
# https://gitlab.com/gitlab-org/gitlab/-/issues/38357
# https://gitlab.com/gitlab-org/gitlab/-/issues/262682
before_action only: [:index, :trending, :starred] do
limit_pages(PAGE_LIMIT)
end
rescue_from PageOutOfBoundsError, with: :page_out_of_bounds
feature_category :projects
def index
show_alert_if_search_is_disabled
@projects = load_projects
respond_to do |format|
format.html
format.json do
render json: {
html: view_to_html_string("explore/projects/_projects", projects: @projects)
}
end
end
end
def trending
params[:trending] = true
@projects = load_projects
respond_to do |format|
format.html
format.json do
render json: {
html: view_to_html_string("explore/projects/_projects", projects: @projects)
}
end
end
end
# rubocop: disable CodeReuse/ActiveRecord
def starred
@projects = load_projects.reorder('star_count DESC')
respond_to do |format|
format.html
format.json do
render json: {
html: view_to_html_string("explore/projects/_projects", projects: @projects)
}
end
end
end
# rubocop: enable CodeReuse/ActiveRecord
def topics
load_project_counts
load_topics
end
def topic
load_topic
return render_404 unless @topic
params[:topic] = @topic.name
@projects = load_projects
end
private
def load_project_counts
@total_user_projects_count = ProjectsFinder.new(params: { non_public: true }, current_user: current_user).execute
@total_starred_projects_count = ProjectsFinder.new(params: { starred: true }, current_user: current_user).execute
end
def load_projects
load_project_counts
finder_params = {
minimum_search_length: MIN_SEARCH_LENGTH,
not_aimed_for_deletion: true
}
projects = ProjectsFinder.new(current_user: current_user, params: params.merge(finder_params)).execute
projects = preload_associations(projects)
projects = projects.page(params[:page]).without_count
prepare_projects_for_rendering(projects)
end
def load_topics
@topics = Projects::TopicsFinder.new(params: params.permit(:search)).execute.page(params[:page]).without_count
end
def load_topic
@topic = Projects::Topic.find_by_name(params[:topic_name])
end
# rubocop: disable CodeReuse/ActiveRecord
def preload_associations(projects)
projects.includes(:route, :creator, :group, :project_feature, :topics, namespace: [:route, :owner])
end
# rubocop: enable CodeReuse/ActiveRecord
def set_sorting
params[:sort] = set_sort_order
@sort = params[:sort]
end
def default_sort_order
sort_value_latest_activity
end
def sorting_field
Project::SORTING_PREFERENCE_FIELD
end
def page_out_of_bounds(error)
load_project_counts
@max_page_number = error.message
respond_to do |format|
format.html do
render "page_out_of_bounds", status: :bad_request
end
format.json do
render json: {
html: view_to_html_string("explore/projects/page_out_of_bounds")
}, status: :bad_request
end
end
end
def show_alert_if_search_is_disabled
return if current_user || params[:name].blank? && params[:search].blank? || !html_request? || Feature.disabled?(:disable_anonymous_project_search, type: :ops)
flash.now[:notice] = _('You must sign in to search for specific projects.')
end
end
Explore::ProjectsController.prepend_mod_with('Explore::ProjectsController')
| 25.06875 | 162 | 0.715532 |
21f9f7206b355788262da3b4483007335887b3eb | 4,314 | class Platformio < Formula
include Language::Python::Virtualenv
desc "Professional collaborative platform for embedded development"
homepage "https://platformio.org/"
url "https://files.pythonhosted.org/packages/d8/7b/a79280396d25daa2ce01000d77c45dd4703764fcab4d0ae4a73c8f97f183/platformio-5.0.3.tar.gz"
sha256 "5035e991dc00640aa06b47f01f2e1229fd3f98710bb94bdd5566e45247624946"
license "Apache-2.0"
livecheck do
url :stable
end
bottle do
cellar :any_skip_relocation
sha256 "3e43560843f14837cbe2904e4d77e753439c504a12705a5d0b2e1e16713ae799" => :big_sur
sha256 "4f93829a20a0de19d9747978ea92df108c8d6e57cd9cf0465fc68ec2e94cc003" => :catalina
sha256 "07d3fc46621f98ac4e348a0a4838ce8be5c8d4bd194298dcbe6c6c5d901e7dba" => :mojave
sha256 "72d0792c5b9458417cd38032dc7aae817185f35f65d3d732134971e8aa847c9d" => :high_sierra
end
depends_on "[email protected]"
resource "bottle" do
url "https://files.pythonhosted.org/packages/ea/80/3d2dca1562ffa1929017c74635b4cb3645a352588de89e90d0bb53af3317/bottle-0.12.19.tar.gz"
sha256 "a9d73ffcbc6a1345ca2d7949638db46349f5b2b77dac65d6494d45c23628da2c"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/e6/de/879cf857ae6f890dfa23c3d6239814c5471936b618c8fb0c8732ad5da885/certifi-2020.11.8.tar.gz"
sha256 "f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "click" do
url "https://files.pythonhosted.org/packages/27/6f/be940c8b1f1d69daceeb0032fee6c34d7bd70e3e649ccac0951500b4720e/click-7.1.2.tar.gz"
sha256 "d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"
end
resource "colorama" do
url "https://files.pythonhosted.org/packages/1f/bb/5d3246097ab77fa083a61bd8d3d527b7ae063c7d8e8671b1cf8c4ec10cbe/colorama-0.4.4.tar.gz"
sha256 "5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/ea/b7/e0e3c1c467636186c39925827be42f16fee389dc404ac29e930e9136be70/idna-2.10.tar.gz"
sha256 "b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"
end
resource "marshmallow" do
url "https://files.pythonhosted.org/packages/ea/ac/dc6ed04439ddfb58414a0587cfaa0a2f36b53caf8fadc41b3b4211647434/marshmallow-3.9.1.tar.gz"
sha256 "73facc37462dfc0b27f571bdaffbef7709e19f7a616beb3802ea425b07843f4e"
end
resource "pyelftools" do
url "https://files.pythonhosted.org/packages/6b/b5/f7022f2d950327ba970ec85fb8f85c79244031092c129b6f34ab17514ae0/pyelftools-0.27.tar.gz"
sha256 "cde854e662774c5457d688ca41615f6594187ba7067af101232df889a6b7a66b"
end
resource "pyserial" do
url "https://files.pythonhosted.org/packages/cc/74/11b04703ec416717b247d789103277269d567db575d2fd88f25d9767fe3d/pyserial-3.4.tar.gz"
sha256 "6e2d401fdee0eab996cf734e67773a0143b932772ca8b42451440cfed942c627"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/9f/14/4a6542a078773957aa83101336375c9597e6fe5889d20abda9c38f9f3ff2/requests-2.25.0.tar.gz"
sha256 "7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8"
end
resource "semantic-version" do
url "https://files.pythonhosted.org/packages/d4/52/3be868c7ed1f408cb822bc92ce17ffe4e97d11c42caafce0589f05844dd0/semantic_version-2.8.5.tar.gz"
sha256 "d2cb2de0558762934679b9a104e82eca7af448c9f4974d1f3eeccff651df8a54"
end
resource "tabulate" do
url "https://files.pythonhosted.org/packages/57/6f/213d075ad03c84991d44e63b6516dd7d185091df5e1d02a660874f8f7e1e/tabulate-0.8.7.tar.gz"
sha256 "db2723a20d04bcda8522165c73eea7c300eda74e0ce852d9022e0159d7895007"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/19/80/b2a19b372f16bc846fd156de8d9b3a9b1092aef1f1963d800b0f8c76a67a/urllib3-1.26.1.tar.gz"
sha256 "097116a6f16f13482d2a2e56792088b9b2920f4eb6b4f84a2c90555fb673db74"
end
def install
virtualenv_install_with_resources
end
test do
output = shell_output("#{bin}/pio boards ststm32")
assert_match "ST Nucleo F401RE", output
end
end
| 44.020408 | 146 | 0.824293 |
26e8f570fda017be685f97c44e4bb9a1c32b26ac | 687 | require "processor/environment"
require "processor/version"
require "processor/data/array_processor"
require "processor/data/batch_processor"
require "processor/data/null_processor"
require "processor/data/solr_pages_processor"
require "processor/data/csv_processor"
require "processor/data/active_record_batch_processor"
require "processor/observer/logger"
require "processor/observer/null_observer"
require "processor/process_runner/successive"
require "processor/process_runner/threads"
require "processor/subroutine/counter"
require "processor/subroutine/recursion"
require "processor/subroutine/name"
require "processor/runner"
require "processor/thread"
module Processor
end
| 26.423077 | 54 | 0.84425 |
0838de705d5c16346c2bf8092a48b45242ca7ba5 | 749 | module LeaderboardsHelper
def categories
['Achievements', 'Achievement Points', 'Mounts', 'Minions', 'Orchestrion', 'Emotes',
'Bardings', 'Hairstyles', 'Armoire'].freeze
end
def data_center(server)
Character.servers_by_data_center.each { |dc, servers| return dc if servers.include?(server) }
'Unknown'
end
def grouped_servers(server)
servers = Character.servers_by_data_center.flat_map do |dc, servers|
servers.map do |server|
[server, server, { class: dc.downcase }]
end
end
options_for_select(servers.sort, server)
end
def limit_options(limit)
options = [10, 100, 1000].map do |option|
["Top #{option}", option]
end
options_for_select(options, limit)
end
end
| 24.966667 | 97 | 0.675567 |
21e721250fcf182bebd3014c7e983df51beeb92e | 2,118 | module Restforce
module Bulk
class Job
include Restforce::Bulk::Attributes
JOB_CONTENT_TYPE_MAPPING = {
csv: 'CSV',
xml: 'XML',
zip_csv: 'ZIP_CSV',
zip_xml: 'ZIP_XML'
}
class << self
def create(operation, object_name, content_type=:xml)
builder = Restforce::Bulk::Builder::Xml.new(operation)
data = builder.job(object_name, JOB_CONTENT_TYPE_MAPPING[content_type.to_sym])
response = Restforce::Bulk.client.perform_request(:post, 'job', data)
new(response.body.jobInfo)
end
def find(id)
response = Restforce::Bulk.client.perform_request(:get, "job/#{id}")
new(response.body.jobInfo)
end
end
attr_accessor :id, :operation, :object, :created_by_id, :created_date,
:system_modstamp, :state, :content_type
def initialize(attributes={})
assign_attributes(attributes)
@batches = []
end
def content_type=(value)
@content_type = JOB_CONTENT_TYPE_MAPPING.invert[value] || value
end
def batches
@batches
end
def reload_batches
response = Restforce::Bulk.client.perform_request(:get, "job/#{id}/batch")
parser = Restforce::Bulk::Parser::Xml.new
@batches = parser.batches(response.body).map do |batch_info|
Restforce::Bulk::Batch.new(batch_info)
end
end
def add_batch(data)
Restforce::Bulk::Batch.create(id, data, operation, content_type).tap do |batch|
batches << batch
end
end
def close
builder = Restforce::Bulk::Builder::Xml.new(operation)
response = Restforce::Bulk.client.perform_request(:post, "job/#{id}", builder.close)
assign_attributes(response.body.jobInfo)
end
def abort
builder = Restforce::Bulk::Builder::Xml.new(operation)
response = Restforce::Bulk.client.perform_request(:post, "job/#{id}", builder.abort)
assign_attributes(response.body.jobInfo)
end
end
end
end
| 26.475 | 92 | 0.609537 |
5d404fca932bd94cda9620d95597415b4b64f05d | 8,718 | class GccAT8 < Formula
desc "GNU compiler collection"
homepage "https://gcc.gnu.org/"
url "https://ftp.gnu.org/gnu/gcc/gcc-8.5.0/gcc-8.5.0.tar.xz"
mirror "https://ftpmirror.gnu.org/gcc/gcc-8.5.0/gcc-8.5.0.tar.xz"
sha256 "d308841a511bb830a6100397b0042db24ce11f642dab6ea6ee44842e5325ed50"
license all_of: [
"LGPL-2.1-or-later",
"GPL-3.0-or-later" => { with: "GCC-exception-3.1" },
]
livecheck do
url :stable
regex(%r{href=.*?gcc[._-]v?(8(?:\.\d+)+)(?:/?["' >]|\.t)}i)
end
bottle do
sha256 big_sur: "c23c342d120580e8fbd897712b6ddce67fb0f0235ca8745736f4c00d8b0f2bd5"
sha256 catalina: "e031d1e8b3ac06f7fb3ae54e594254dcfdfd2e84e54b15ee370f570d4353db7c"
sha256 mojave: "5ddd8753dbd6a3a3841e3ef72f67608761e0ab574ca3218b4fed54f1399cc861"
sha256 x86_64_linux: "b745f8c0cc88d7f358894c8ce5fc482191469da586ba419d685f3b55c71049d3"
end
# The bottles are built on systems with the CLT installed, and do not work
# out of the box on Xcode-only systems due to an incorrect sysroot.
pour_bottle? only_if: :clt_installed
depends_on arch: :x86_64
depends_on "gmp"
depends_on "isl"
depends_on "libmpc"
depends_on "mpfr"
uses_from_macos "zlib"
on_linux do
depends_on "binutils"
end
# GCC bootstraps itself, so it is OK to have an incompatible C++ stdlib
cxxstdlib_check :skip
def version_suffix
version.major.to_s
end
def install
# GCC will suffer build errors if forced to use a particular linker.
ENV.delete "LD"
# Even when suffixes are appended, the info pages conflict when
# install-info is run so pretend we have an outdated makeinfo
# to prevent their build.
ENV["gcc_cv_prog_makeinfo_modern"] = "no"
# We avoiding building:
# - Ada, which requires a pre-existing GCC Ada compiler to bootstrap
# - Go, currently not supported on macOS
# - BRIG
languages = %w[c c++ objc obj-c++ fortran]
pkgversion = "Homebrew GCC #{pkg_version} #{build.used_options*" "}".strip
args = %W[
--prefix=#{prefix}
--libdir=#{lib}/gcc/#{version_suffix}
--disable-nls
--enable-checking=release
--enable-languages=#{languages.join(",")}
--program-suffix=-#{version_suffix}
--with-gmp=#{Formula["gmp"].opt_prefix}
--with-mpfr=#{Formula["mpfr"].opt_prefix}
--with-mpc=#{Formula["libmpc"].opt_prefix}
--with-isl=#{Formula["isl"].opt_prefix}
--with-pkgversion=#{pkgversion}
--with-bugurl=#{tap.issues_url}
]
if OS.mac?
args << "--build=x86_64-apple-darwin#{OS.kernel_version.major}"
args << "--with-system-zlib"
# Xcode 10 dropped 32-bit support
args << "--disable-multilib" if DevelopmentTools.clang_build_version >= 1000
# System headers may not be in /usr/include
sdk = MacOS.sdk_path_if_needed
if sdk
args << "--with-native-system-header-dir=/usr/include"
args << "--with-sysroot=#{sdk}"
end
# Workaround for Xcode 12.5 bug on Intel
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=100340
args << "--without-build-config" if DevelopmentTools.clang_build_version >= 1205
# Ensure correct install names when linking against libgcc_s;
# see discussion in https://github.com/Homebrew/legacy-homebrew/pull/34303
inreplace "libgcc/config/t-slibgcc-darwin", "@shlib_slibdir@", "#{HOMEBREW_PREFIX}/lib/gcc/#{version_suffix}"
else
# Fix Linux error: gnu/stubs-32.h: No such file or directory.
args << "--disable-multilib"
# Change the default directory name for 64-bit libraries to `lib`
# http://www.linuxfromscratch.org/lfs/view/development/chapter06/gcc.html
inreplace "gcc/config/i386/t-linux64", "m64=../lib64", "m64="
end
mkdir "build" do
system "../configure", *args
if OS.mac?
# Use -headerpad_max_install_names in the build,
# otherwise updated load commands won't fit in the Mach-O header.
# This is needed because `gcc` avoids the superenv shim.
system "make", "BOOT_LDFLAGS=-Wl,-headerpad_max_install_names"
system "make", "install"
else
system "make"
system "make", "install-strip"
end
end
# Handle conflicts between GCC formulae and avoid interfering
# with system compilers.
# Rename man7.
Dir.glob(man7/"*.7") { |file| add_suffix file, version_suffix }
# Even when we disable building info pages some are still installed.
info.rmtree
end
def add_suffix(file, suffix)
dir = File.dirname(file)
ext = File.extname(file)
base = File.basename(file, ext)
File.rename file, "#{dir}/#{base}-#{suffix}#{ext}"
end
def post_install
if OS.linux?
gcc = bin/"gcc-#{version_suffix}"
libgcc = Pathname.new(Utils.safe_popen_read(gcc, "-print-libgcc-file-name")).parent
raise "command failed: #{gcc} -print-libgcc-file-name" if $CHILD_STATUS.exitstatus.nonzero?
glibc = Formula["glibc"]
glibc_installed = glibc.any_version_installed?
# Symlink crt1.o and friends where gcc can find it.
crtdir = if glibc_installed
glibc.opt_lib
else
Pathname.new(Utils.safe_popen_read("/usr/bin/cc", "-print-file-name=crti.o")).parent
end
ln_sf Dir[crtdir/"*crt?.o"], libgcc
# Create the GCC specs file
# See https://gcc.gnu.org/onlinedocs/gcc/Spec-Files.html
# Locate the specs file
specs = libgcc/"specs"
ohai "Creating the GCC specs file: #{specs}"
specs_orig = Pathname.new("#{specs}.orig")
rm_f [specs_orig, specs]
system_header_dirs = ["#{HOMEBREW_PREFIX}/include"]
if glibc_installed
# https://github.com/Linuxbrew/brew/issues/724
system_header_dirs << glibc.opt_include
else
# Locate the native system header dirs if user uses system glibc
target = Utils.safe_popen_read(gcc, "-print-multiarch").chomp
raise "command failed: #{gcc} -print-multiarch" if $CHILD_STATUS.exitstatus.nonzero?
system_header_dirs += ["/usr/include/#{target}", "/usr/include"]
end
# Save a backup of the default specs file
specs_string = Utils.safe_popen_read(gcc, "-dumpspecs")
raise "command failed: #{gcc} -dumpspecs" if $CHILD_STATUS.exitstatus.nonzero?
specs_orig.write specs_string
# Set the library search path
# For include path:
# * `-isysroot #{HOMEBREW_PREFIX}/nonexistent` prevents gcc searching built-in
# system header files.
# * `-idirafter <dir>` instructs gcc to search system header
# files after gcc internal header files.
# For libraries:
# * `-nostdlib -L#{libgcc}` instructs gcc to use brewed glibc
# if applied.
# * `-L#{libdir}` instructs gcc to find the corresponding gcc
# libraries. It is essential if there are multiple brewed gcc
# with different versions installed.
# Noted that it should only be passed for the `gcc@*` formulae.
# * `-L#{HOMEBREW_PREFIX}/lib` instructs gcc to find the rest
# brew libraries.
libdir = HOMEBREW_PREFIX/"lib/gcc/#{version_suffix}"
specs.write specs_string + <<~EOS
*cpp_unique_options:
+ -isysroot #{HOMEBREW_PREFIX}/nonexistent #{system_header_dirs.map { |p| "-idirafter #{p}" }.join(" ")}
*link_libgcc:
#{glibc_installed ? "-nostdlib -L#{libgcc}" : "+"} -L#{libdir} -L#{HOMEBREW_PREFIX}/lib
*link:
+ --dynamic-linker #{HOMEBREW_PREFIX}/lib/ld.so -rpath #{libdir} -rpath #{HOMEBREW_PREFIX}/lib
EOS
end
end
test do
(testpath/"hello-c.c").write <<~EOS
#include <stdio.h>
int main()
{
puts("Hello, world!");
return 0;
}
EOS
system "#{bin}/gcc-#{version.major}", "-o", "hello-c", "hello-c.c"
assert_equal "Hello, world!\n", `./hello-c`
(testpath/"hello-cc.cc").write <<~EOS
#include <iostream>
struct exception { };
int main()
{
std::cout << "Hello, world!" << std::endl;
try { throw exception{}; }
catch (exception) { }
catch (...) { }
return 0;
}
EOS
system "#{bin}/g++-#{version.major}", "-o", "hello-cc", "hello-cc.cc"
assert_equal "Hello, world!\n", `./hello-cc`
(testpath/"test.f90").write <<~EOS
integer,parameter::m=10000
real::a(m), b(m)
real::fact=0.5
do concurrent (i=1:m)
a(i) = a(i) + fact*b(i)
end do
write(*,"(A)") "Done"
end
EOS
system "#{bin}/gfortran-#{version.major}", "-o", "test", "test.f90"
assert_equal "Done\n", `./test`
end
end
| 34.054688 | 115 | 0.635696 |
91063762f1b02ac6cdc826afeed5e8eb85f07503 | 535 | module PkgAdapter
class Doc < BaseAdapter
def parse
@name = File.basename(@path)
@ext = File.extname(@path).gsub('.', '')
end
def plat
@ext
end
def app_uniq_key
:build
end
def app_name
@name
end
def app_version
""
end
def app_build
File.mtime(@path).strftime("%Y-%m-%d %H:%M:%S")
end
def app_icon
end
def app_size
File.size(@path)
end
def app_bundle_id
@ext
end
def ext_info
end
end
end | 12.159091 | 53 | 0.528972 |
ffd571a5c9902abaf4b85c12cf45c1d5e161817a | 46,637 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module AppengineV1
class ApiConfigHandler
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApiEndpointHandler
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Application
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AuthorizedCertificate
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AuthorizedDomain
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AutomaticScaling
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BasicScaling
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BatchUpdateIngressRulesRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BatchUpdateIngressRulesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CertificateRawData
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CloudBuildOptions
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ContainerInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CpuUtilization
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CreateVersionMetadataV1
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CreateVersionMetadataV1Alpha
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CreateVersionMetadataV1Beta
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DebugInstanceRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Deployment
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DiskUtilization
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DomainMapping
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Empty
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class EndpointsApiService
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Entrypoint
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ErrorHandler
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FeatureSettings
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FileInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FirewallRule
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class GoogleAppengineV1betaLocationMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class HealthCheck
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class IdentityAwareProxy
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Instance
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Library
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListAuthorizedCertificatesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListAuthorizedDomainsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListDomainMappingsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListIngressRulesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListInstancesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListLocationsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListOperationsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListServicesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListVersionsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LivenessCheck
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Location
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LocationMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ManagedCertificate
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ManualScaling
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Network
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class NetworkSettings
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class NetworkUtilization
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Operation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class OperationMetadataV1
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class OperationMetadataV1Alpha
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class OperationMetadataV1Beta
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReadinessCheck
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class RepairApplicationRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class RequestUtilization
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ResourceRecord
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Resources
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ScriptHandler
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Service
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SslSettings
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StandardSchedulerSettings
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StaticFilesHandler
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Status
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TrafficSplit
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class UrlDispatchRule
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class UrlMap
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Version
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Volume
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class VpcAccessConnector
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ZipInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApiConfigHandler
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :auth_fail_action, as: 'authFailAction'
property :login, as: 'login'
property :script, as: 'script'
property :security_level, as: 'securityLevel'
property :url, as: 'url'
end
end
class ApiEndpointHandler
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :script_path, as: 'scriptPath'
end
end
class Application
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :auth_domain, as: 'authDomain'
property :code_bucket, as: 'codeBucket'
property :database_type, as: 'databaseType'
property :default_bucket, as: 'defaultBucket'
property :default_cookie_expiration, as: 'defaultCookieExpiration'
property :default_hostname, as: 'defaultHostname'
collection :dispatch_rules, as: 'dispatchRules', class: Google::Apis::AppengineV1::UrlDispatchRule, decorator: Google::Apis::AppengineV1::UrlDispatchRule::Representation
property :feature_settings, as: 'featureSettings', class: Google::Apis::AppengineV1::FeatureSettings, decorator: Google::Apis::AppengineV1::FeatureSettings::Representation
property :gcr_domain, as: 'gcrDomain'
property :iap, as: 'iap', class: Google::Apis::AppengineV1::IdentityAwareProxy, decorator: Google::Apis::AppengineV1::IdentityAwareProxy::Representation
property :id, as: 'id'
property :location_id, as: 'locationId'
property :name, as: 'name'
property :serving_status, as: 'servingStatus'
end
end
class AuthorizedCertificate
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :certificate_raw_data, as: 'certificateRawData', class: Google::Apis::AppengineV1::CertificateRawData, decorator: Google::Apis::AppengineV1::CertificateRawData::Representation
property :display_name, as: 'displayName'
property :domain_mappings_count, as: 'domainMappingsCount'
collection :domain_names, as: 'domainNames'
property :expire_time, as: 'expireTime'
property :id, as: 'id'
property :managed_certificate, as: 'managedCertificate', class: Google::Apis::AppengineV1::ManagedCertificate, decorator: Google::Apis::AppengineV1::ManagedCertificate::Representation
property :name, as: 'name'
collection :visible_domain_mappings, as: 'visibleDomainMappings'
end
end
class AuthorizedDomain
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :id, as: 'id'
property :name, as: 'name'
end
end
class AutomaticScaling
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cool_down_period, as: 'coolDownPeriod'
property :cpu_utilization, as: 'cpuUtilization', class: Google::Apis::AppengineV1::CpuUtilization, decorator: Google::Apis::AppengineV1::CpuUtilization::Representation
property :disk_utilization, as: 'diskUtilization', class: Google::Apis::AppengineV1::DiskUtilization, decorator: Google::Apis::AppengineV1::DiskUtilization::Representation
property :max_concurrent_requests, as: 'maxConcurrentRequests'
property :max_idle_instances, as: 'maxIdleInstances'
property :max_pending_latency, as: 'maxPendingLatency'
property :max_total_instances, as: 'maxTotalInstances'
property :min_idle_instances, as: 'minIdleInstances'
property :min_pending_latency, as: 'minPendingLatency'
property :min_total_instances, as: 'minTotalInstances'
property :network_utilization, as: 'networkUtilization', class: Google::Apis::AppengineV1::NetworkUtilization, decorator: Google::Apis::AppengineV1::NetworkUtilization::Representation
property :request_utilization, as: 'requestUtilization', class: Google::Apis::AppengineV1::RequestUtilization, decorator: Google::Apis::AppengineV1::RequestUtilization::Representation
property :standard_scheduler_settings, as: 'standardSchedulerSettings', class: Google::Apis::AppengineV1::StandardSchedulerSettings, decorator: Google::Apis::AppengineV1::StandardSchedulerSettings::Representation
end
end
class BasicScaling
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :idle_timeout, as: 'idleTimeout'
property :max_instances, as: 'maxInstances'
end
end
class BatchUpdateIngressRulesRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :ingress_rules, as: 'ingressRules', class: Google::Apis::AppengineV1::FirewallRule, decorator: Google::Apis::AppengineV1::FirewallRule::Representation
end
end
class BatchUpdateIngressRulesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :ingress_rules, as: 'ingressRules', class: Google::Apis::AppengineV1::FirewallRule, decorator: Google::Apis::AppengineV1::FirewallRule::Representation
end
end
class CertificateRawData
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :private_key, as: 'privateKey'
property :public_certificate, as: 'publicCertificate'
end
end
class CloudBuildOptions
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :app_yaml_path, as: 'appYamlPath'
property :cloud_build_timeout, as: 'cloudBuildTimeout'
end
end
class ContainerInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :image, as: 'image'
end
end
class CpuUtilization
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :aggregation_window_length, as: 'aggregationWindowLength'
property :target_utilization, as: 'targetUtilization'
end
end
class CreateVersionMetadataV1
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cloud_build_id, as: 'cloudBuildId'
end
end
class CreateVersionMetadataV1Alpha
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cloud_build_id, as: 'cloudBuildId'
end
end
class CreateVersionMetadataV1Beta
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cloud_build_id, as: 'cloudBuildId'
end
end
class DebugInstanceRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :ssh_key, as: 'sshKey'
end
end
class Deployment
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cloud_build_options, as: 'cloudBuildOptions', class: Google::Apis::AppengineV1::CloudBuildOptions, decorator: Google::Apis::AppengineV1::CloudBuildOptions::Representation
property :container, as: 'container', class: Google::Apis::AppengineV1::ContainerInfo, decorator: Google::Apis::AppengineV1::ContainerInfo::Representation
hash :files, as: 'files', class: Google::Apis::AppengineV1::FileInfo, decorator: Google::Apis::AppengineV1::FileInfo::Representation
property :zip, as: 'zip', class: Google::Apis::AppengineV1::ZipInfo, decorator: Google::Apis::AppengineV1::ZipInfo::Representation
end
end
class DiskUtilization
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :target_read_bytes_per_second, as: 'targetReadBytesPerSecond'
property :target_read_ops_per_second, as: 'targetReadOpsPerSecond'
property :target_write_bytes_per_second, as: 'targetWriteBytesPerSecond'
property :target_write_ops_per_second, as: 'targetWriteOpsPerSecond'
end
end
class DomainMapping
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :id, as: 'id'
property :name, as: 'name'
collection :resource_records, as: 'resourceRecords', class: Google::Apis::AppengineV1::ResourceRecord, decorator: Google::Apis::AppengineV1::ResourceRecord::Representation
property :ssl_settings, as: 'sslSettings', class: Google::Apis::AppengineV1::SslSettings, decorator: Google::Apis::AppengineV1::SslSettings::Representation
end
end
class Empty
# @private
class Representation < Google::Apis::Core::JsonRepresentation
end
end
class EndpointsApiService
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :config_id, as: 'configId'
property :disable_trace_sampling, as: 'disableTraceSampling'
property :name, as: 'name'
property :rollout_strategy, as: 'rolloutStrategy'
end
end
class Entrypoint
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :shell, as: 'shell'
end
end
class ErrorHandler
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :error_code, as: 'errorCode'
property :mime_type, as: 'mimeType'
property :static_file, as: 'staticFile'
end
end
class FeatureSettings
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :split_health_checks, as: 'splitHealthChecks'
property :use_container_optimized_os, as: 'useContainerOptimizedOs'
end
end
class FileInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :mime_type, as: 'mimeType'
property :sha1_sum, as: 'sha1Sum'
property :source_url, as: 'sourceUrl'
end
end
class FirewallRule
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :action, as: 'action'
property :description, as: 'description'
property :priority, as: 'priority'
property :source_range, as: 'sourceRange'
end
end
class GoogleAppengineV1betaLocationMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :flexible_environment_available, as: 'flexibleEnvironmentAvailable'
property :search_api_available, as: 'searchApiAvailable'
property :standard_environment_available, as: 'standardEnvironmentAvailable'
end
end
class HealthCheck
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :check_interval, as: 'checkInterval'
property :disable_health_check, as: 'disableHealthCheck'
property :healthy_threshold, as: 'healthyThreshold'
property :host, as: 'host'
property :restart_threshold, as: 'restartThreshold'
property :timeout, as: 'timeout'
property :unhealthy_threshold, as: 'unhealthyThreshold'
end
end
class IdentityAwareProxy
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :enabled, as: 'enabled'
property :oauth2_client_id, as: 'oauth2ClientId'
property :oauth2_client_secret, as: 'oauth2ClientSecret'
property :oauth2_client_secret_sha256, as: 'oauth2ClientSecretSha256'
end
end
class Instance
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :app_engine_release, as: 'appEngineRelease'
property :availability, as: 'availability'
property :average_latency, as: 'averageLatency'
property :errors, as: 'errors'
property :id, as: 'id'
property :memory_usage, :numeric_string => true, as: 'memoryUsage'
property :name, as: 'name'
property :qps, as: 'qps'
property :requests, as: 'requests'
property :start_time, as: 'startTime'
property :vm_debug_enabled, as: 'vmDebugEnabled'
property :vm_id, as: 'vmId'
property :vm_ip, as: 'vmIp'
property :vm_liveness, as: 'vmLiveness'
property :vm_name, as: 'vmName'
property :vm_status, as: 'vmStatus'
property :vm_zone_name, as: 'vmZoneName'
end
end
class Library
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :version, as: 'version'
end
end
class ListAuthorizedCertificatesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :certificates, as: 'certificates', class: Google::Apis::AppengineV1::AuthorizedCertificate, decorator: Google::Apis::AppengineV1::AuthorizedCertificate::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListAuthorizedDomainsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :domains, as: 'domains', class: Google::Apis::AppengineV1::AuthorizedDomain, decorator: Google::Apis::AppengineV1::AuthorizedDomain::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListDomainMappingsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :domain_mappings, as: 'domainMappings', class: Google::Apis::AppengineV1::DomainMapping, decorator: Google::Apis::AppengineV1::DomainMapping::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListIngressRulesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :ingress_rules, as: 'ingressRules', class: Google::Apis::AppengineV1::FirewallRule, decorator: Google::Apis::AppengineV1::FirewallRule::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListInstancesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :instances, as: 'instances', class: Google::Apis::AppengineV1::Instance, decorator: Google::Apis::AppengineV1::Instance::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListLocationsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :locations, as: 'locations', class: Google::Apis::AppengineV1::Location, decorator: Google::Apis::AppengineV1::Location::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListOperationsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :next_page_token, as: 'nextPageToken'
collection :operations, as: 'operations', class: Google::Apis::AppengineV1::Operation, decorator: Google::Apis::AppengineV1::Operation::Representation
end
end
class ListServicesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :next_page_token, as: 'nextPageToken'
collection :services, as: 'services', class: Google::Apis::AppengineV1::Service, decorator: Google::Apis::AppengineV1::Service::Representation
end
end
class ListVersionsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :next_page_token, as: 'nextPageToken'
collection :versions, as: 'versions', class: Google::Apis::AppengineV1::Version, decorator: Google::Apis::AppengineV1::Version::Representation
end
end
class LivenessCheck
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :check_interval, as: 'checkInterval'
property :failure_threshold, as: 'failureThreshold'
property :host, as: 'host'
property :initial_delay, as: 'initialDelay'
property :path, as: 'path'
property :success_threshold, as: 'successThreshold'
property :timeout, as: 'timeout'
end
end
class Location
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :display_name, as: 'displayName'
hash :labels, as: 'labels'
property :location_id, as: 'locationId'
hash :metadata, as: 'metadata'
property :name, as: 'name'
end
end
class LocationMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :flexible_environment_available, as: 'flexibleEnvironmentAvailable'
property :search_api_available, as: 'searchApiAvailable'
property :standard_environment_available, as: 'standardEnvironmentAvailable'
end
end
class ManagedCertificate
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :last_renewal_time, as: 'lastRenewalTime'
property :status, as: 'status'
end
end
class ManualScaling
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :instances, as: 'instances'
end
end
class Network
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :forwarded_ports, as: 'forwardedPorts'
property :instance_tag, as: 'instanceTag'
property :name, as: 'name'
property :session_affinity, as: 'sessionAffinity'
property :subnetwork_name, as: 'subnetworkName'
end
end
class NetworkSettings
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :ingress_traffic_allowed, as: 'ingressTrafficAllowed'
end
end
class NetworkUtilization
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :target_received_bytes_per_second, as: 'targetReceivedBytesPerSecond'
property :target_received_packets_per_second, as: 'targetReceivedPacketsPerSecond'
property :target_sent_bytes_per_second, as: 'targetSentBytesPerSecond'
property :target_sent_packets_per_second, as: 'targetSentPacketsPerSecond'
end
end
class Operation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :done, as: 'done'
property :error, as: 'error', class: Google::Apis::AppengineV1::Status, decorator: Google::Apis::AppengineV1::Status::Representation
hash :metadata, as: 'metadata'
property :name, as: 'name'
hash :response, as: 'response'
end
end
class OperationMetadataV1
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :create_version_metadata, as: 'createVersionMetadata', class: Google::Apis::AppengineV1::CreateVersionMetadataV1, decorator: Google::Apis::AppengineV1::CreateVersionMetadataV1::Representation
property :end_time, as: 'endTime'
property :ephemeral_message, as: 'ephemeralMessage'
property :insert_time, as: 'insertTime'
property :method_prop, as: 'method'
property :target, as: 'target'
property :user, as: 'user'
collection :warning, as: 'warning'
end
end
class OperationMetadataV1Alpha
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :create_version_metadata, as: 'createVersionMetadata', class: Google::Apis::AppengineV1::CreateVersionMetadataV1Alpha, decorator: Google::Apis::AppengineV1::CreateVersionMetadataV1Alpha::Representation
property :end_time, as: 'endTime'
property :ephemeral_message, as: 'ephemeralMessage'
property :insert_time, as: 'insertTime'
property :method_prop, as: 'method'
property :target, as: 'target'
property :user, as: 'user'
collection :warning, as: 'warning'
end
end
class OperationMetadataV1Beta
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :create_version_metadata, as: 'createVersionMetadata', class: Google::Apis::AppengineV1::CreateVersionMetadataV1Beta, decorator: Google::Apis::AppengineV1::CreateVersionMetadataV1Beta::Representation
property :end_time, as: 'endTime'
property :ephemeral_message, as: 'ephemeralMessage'
property :insert_time, as: 'insertTime'
property :method_prop, as: 'method'
property :target, as: 'target'
property :user, as: 'user'
collection :warning, as: 'warning'
end
end
class ReadinessCheck
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :app_start_timeout, as: 'appStartTimeout'
property :check_interval, as: 'checkInterval'
property :failure_threshold, as: 'failureThreshold'
property :host, as: 'host'
property :path, as: 'path'
property :success_threshold, as: 'successThreshold'
property :timeout, as: 'timeout'
end
end
class RepairApplicationRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
end
end
class RequestUtilization
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :target_concurrent_requests, as: 'targetConcurrentRequests'
property :target_request_count_per_second, as: 'targetRequestCountPerSecond'
end
end
class ResourceRecord
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :rrdata, as: 'rrdata'
property :type, as: 'type'
end
end
class Resources
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cpu, as: 'cpu'
property :disk_gb, as: 'diskGb'
property :kms_key_reference, as: 'kmsKeyReference'
property :memory_gb, as: 'memoryGb'
collection :volumes, as: 'volumes', class: Google::Apis::AppengineV1::Volume, decorator: Google::Apis::AppengineV1::Volume::Representation
end
end
class ScriptHandler
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :script_path, as: 'scriptPath'
end
end
class Service
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :id, as: 'id'
property :name, as: 'name'
property :network_settings, as: 'networkSettings', class: Google::Apis::AppengineV1::NetworkSettings, decorator: Google::Apis::AppengineV1::NetworkSettings::Representation
property :split, as: 'split', class: Google::Apis::AppengineV1::TrafficSplit, decorator: Google::Apis::AppengineV1::TrafficSplit::Representation
end
end
class SslSettings
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :certificate_id, as: 'certificateId'
property :pending_managed_certificate_id, as: 'pendingManagedCertificateId'
property :ssl_management_type, as: 'sslManagementType'
end
end
class StandardSchedulerSettings
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :max_instances, as: 'maxInstances'
property :min_instances, as: 'minInstances'
property :target_cpu_utilization, as: 'targetCpuUtilization'
property :target_throughput_utilization, as: 'targetThroughputUtilization'
end
end
class StaticFilesHandler
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :application_readable, as: 'applicationReadable'
property :expiration, as: 'expiration'
hash :http_headers, as: 'httpHeaders'
property :mime_type, as: 'mimeType'
property :path, as: 'path'
property :require_matching_file, as: 'requireMatchingFile'
property :upload_path_regex, as: 'uploadPathRegex'
end
end
class Status
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :code, as: 'code'
collection :details, as: 'details'
property :message, as: 'message'
end
end
class TrafficSplit
# @private
class Representation < Google::Apis::Core::JsonRepresentation
hash :allocations, as: 'allocations'
property :shard_by, as: 'shardBy'
end
end
class UrlDispatchRule
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :domain, as: 'domain'
property :path, as: 'path'
property :service, as: 'service'
end
end
class UrlMap
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :api_endpoint, as: 'apiEndpoint', class: Google::Apis::AppengineV1::ApiEndpointHandler, decorator: Google::Apis::AppengineV1::ApiEndpointHandler::Representation
property :auth_fail_action, as: 'authFailAction'
property :login, as: 'login'
property :redirect_http_response_code, as: 'redirectHttpResponseCode'
property :script, as: 'script', class: Google::Apis::AppengineV1::ScriptHandler, decorator: Google::Apis::AppengineV1::ScriptHandler::Representation
property :security_level, as: 'securityLevel'
property :static_files, as: 'staticFiles', class: Google::Apis::AppengineV1::StaticFilesHandler, decorator: Google::Apis::AppengineV1::StaticFilesHandler::Representation
property :url_regex, as: 'urlRegex'
end
end
class Version
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :api_config, as: 'apiConfig', class: Google::Apis::AppengineV1::ApiConfigHandler, decorator: Google::Apis::AppengineV1::ApiConfigHandler::Representation
property :automatic_scaling, as: 'automaticScaling', class: Google::Apis::AppengineV1::AutomaticScaling, decorator: Google::Apis::AppengineV1::AutomaticScaling::Representation
property :basic_scaling, as: 'basicScaling', class: Google::Apis::AppengineV1::BasicScaling, decorator: Google::Apis::AppengineV1::BasicScaling::Representation
hash :beta_settings, as: 'betaSettings'
hash :build_env_variables, as: 'buildEnvVariables'
property :create_time, as: 'createTime'
property :created_by, as: 'createdBy'
property :default_expiration, as: 'defaultExpiration'
property :deployment, as: 'deployment', class: Google::Apis::AppengineV1::Deployment, decorator: Google::Apis::AppengineV1::Deployment::Representation
property :disk_usage_bytes, :numeric_string => true, as: 'diskUsageBytes'
property :endpoints_api_service, as: 'endpointsApiService', class: Google::Apis::AppengineV1::EndpointsApiService, decorator: Google::Apis::AppengineV1::EndpointsApiService::Representation
property :entrypoint, as: 'entrypoint', class: Google::Apis::AppengineV1::Entrypoint, decorator: Google::Apis::AppengineV1::Entrypoint::Representation
property :env, as: 'env'
hash :env_variables, as: 'envVariables'
collection :error_handlers, as: 'errorHandlers', class: Google::Apis::AppengineV1::ErrorHandler, decorator: Google::Apis::AppengineV1::ErrorHandler::Representation
collection :handlers, as: 'handlers', class: Google::Apis::AppengineV1::UrlMap, decorator: Google::Apis::AppengineV1::UrlMap::Representation
property :health_check, as: 'healthCheck', class: Google::Apis::AppengineV1::HealthCheck, decorator: Google::Apis::AppengineV1::HealthCheck::Representation
property :id, as: 'id'
collection :inbound_services, as: 'inboundServices'
property :instance_class, as: 'instanceClass'
collection :libraries, as: 'libraries', class: Google::Apis::AppengineV1::Library, decorator: Google::Apis::AppengineV1::Library::Representation
property :liveness_check, as: 'livenessCheck', class: Google::Apis::AppengineV1::LivenessCheck, decorator: Google::Apis::AppengineV1::LivenessCheck::Representation
property :manual_scaling, as: 'manualScaling', class: Google::Apis::AppengineV1::ManualScaling, decorator: Google::Apis::AppengineV1::ManualScaling::Representation
property :name, as: 'name'
property :network, as: 'network', class: Google::Apis::AppengineV1::Network, decorator: Google::Apis::AppengineV1::Network::Representation
property :nobuild_files_regex, as: 'nobuildFilesRegex'
property :readiness_check, as: 'readinessCheck', class: Google::Apis::AppengineV1::ReadinessCheck, decorator: Google::Apis::AppengineV1::ReadinessCheck::Representation
property :resources, as: 'resources', class: Google::Apis::AppengineV1::Resources, decorator: Google::Apis::AppengineV1::Resources::Representation
property :runtime, as: 'runtime'
property :runtime_api_version, as: 'runtimeApiVersion'
property :runtime_channel, as: 'runtimeChannel'
property :runtime_main_executable_path, as: 'runtimeMainExecutablePath'
property :service_account, as: 'serviceAccount'
property :serving_status, as: 'servingStatus'
property :threadsafe, as: 'threadsafe'
property :version_url, as: 'versionUrl'
property :vm, as: 'vm'
property :vpc_access_connector, as: 'vpcAccessConnector', class: Google::Apis::AppengineV1::VpcAccessConnector, decorator: Google::Apis::AppengineV1::VpcAccessConnector::Representation
collection :zones, as: 'zones'
end
end
class Volume
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :size_gb, as: 'sizeGb'
property :volume_type, as: 'volumeType'
end
end
class VpcAccessConnector
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :egress_setting, as: 'egressSetting'
property :name, as: 'name'
end
end
class ZipInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :files_count, as: 'filesCount'
property :source_url, as: 'sourceUrl'
end
end
end
end
end
| 38.039967 | 222 | 0.648005 |
e2f4528c1638c8bf0c9bac0797caf41b7def5589 | 8,751 | # Stole this from Rails; rather copy the source than introduce
# ActiveSupport as a dependency
unless Hash.instance_methods.include? :deep_merge
class Hash
# Returns a new hash with +self+ and +other_hash+ merged recursively.
#
# h1 = { a: true, b: { c: [1, 2, 3] } }
# h2 = { a: false, b: { x: [3, 4, 5] } }
#
# h1.deep_merge(h2) # => { a: false, b: { c: [1, 2, 3], x: [3, 4, 5] } }
#
# Like with Hash#merge in the standard library, a block can be provided
# to merge values:
#
# h1 = { a: 100, b: 200, c: { c1: 100 } }
# h2 = { b: 250, c: { c1: 200 } }
# h1.deep_merge(h2) { |key, this_val, other_val| this_val + other_val }
# # => { a: 100, b: 450, c: { c1: 300 } }
def deep_merge(other_hash, &block)
dup.deep_merge!(other_hash, &block)
end
# Same as +deep_merge+, but modifies +self+.
def deep_merge!(other_hash, &block)
other_hash.each_pair do |current_key, other_value|
this_value = self[current_key]
self[current_key] = if this_value.is_a?(Hash) && other_value.is_a?(Hash)
this_value.deep_merge(other_value, &block)
else
if block_given? && key?(current_key)
block.call(current_key, this_value, other_value)
else
other_value
end
end
end
self
end
end
end
unless Hash.instance_methods.include? :deep_symbolize_keys
# If hash doesn't respond to :deep_symbolize_keys, we can assume
# it needs all the key transformation methods
class Hash
# Returns a new hash with all keys converted using the +block+ operation.
#
# hash = { name: 'Rob', age: '28' }
#
# hash.transform_keys { |key| key.to_s.upcase } # => {"NAME"=>"Rob", "AGE"=>"28"}
#
# If you do not provide a +block+, it will return an Enumerator
# for chaining with other methods:
#
# hash.transform_keys.with_index { |k, i| [k, i].join } # => {"name0"=>"Rob", "age1"=>"28"}
def transform_keys
return enum_for(:transform_keys) { size } unless block_given?
result = {}
each_key do |key|
result[yield(key)] = self[key]
end
result
end
# Destructively converts all keys using the +block+ operations.
# Same as +transform_keys+ but modifies +self+.
def transform_keys!
return enum_for(:transform_keys!) { size } unless block_given?
keys.each do |key|
self[yield(key)] = delete(key)
end
self
end
# Returns a new hash with all keys converted to strings.
#
# hash = { name: 'Rob', age: '28' }
#
# hash.stringify_keys
# # => {"name"=>"Rob", "age"=>"28"}
def stringify_keys
transform_keys(&:to_s)
end
# Destructively converts all keys to strings. Same as
# +stringify_keys+, but modifies +self+.
def stringify_keys!
transform_keys!(&:to_s)
end
# Returns a new hash with all keys converted to symbols, as long as
# they respond to +to_sym+.
#
# hash = { 'name' => 'Rob', 'age' => '28' }
#
# hash.symbolize_keys
# # => {:name=>"Rob", :age=>"28"}
def symbolize_keys
transform_keys{ |key| key.to_sym rescue key }
end
alias_method :to_options, :symbolize_keys
# Destructively converts all keys to symbols, as long as they respond
# to +to_sym+. Same as +symbolize_keys+, but modifies +self+.
def symbolize_keys!
transform_keys!{ |key| key.to_sym rescue key }
end
alias_method :to_options!, :symbolize_keys!
# Validates all keys in a hash match <tt>*valid_keys</tt>, raising
# +ArgumentError+ on a mismatch.
#
# Note that keys are treated differently than HashWithIndifferentAccess,
# meaning that string and symbol keys will not match.
#
# { name: 'Rob', years: '28' }.assert_valid_keys(:name, :age) # => raises "ArgumentError: Unknown key: :years. Valid keys are: :name, :age"
# { name: 'Rob', age: '28' }.assert_valid_keys('name', 'age') # => raises "ArgumentError: Unknown key: :name. Valid keys are: 'name', 'age'"
# { name: 'Rob', age: '28' }.assert_valid_keys(:name, :age) # => passes, raises nothing
def assert_valid_keys(*valid_keys)
valid_keys.flatten!
each_key do |k|
unless valid_keys.include?(k)
raise ArgumentError.new("Unknown key: #{k.inspect}. Valid keys are: #{valid_keys.map(&:inspect).join(', ')}")
end
end
end
# Returns a new hash with all keys converted by the block operation.
# This includes the keys from the root hash and from all
# nested hashes and arrays.
#
# hash = { person: { name: 'Rob', age: '28' } }
#
# hash.deep_transform_keys{ |key| key.to_s.upcase }
# # => {"PERSON"=>{"NAME"=>"Rob", "AGE"=>"28"}}
def deep_transform_keys(&block)
_deep_transform_keys_in_object(self, &block)
end
# Destructively converts all keys by using the block operation.
# This includes the keys from the root hash and from all
# nested hashes and arrays.
def deep_transform_keys!(&block)
_deep_transform_keys_in_object!(self, &block)
end
# Returns a new hash with all keys converted to strings.
# This includes the keys from the root hash and from all
# nested hashes and arrays.
#
# hash = { person: { name: 'Rob', age: '28' } }
#
# hash.deep_stringify_keys
# # => {"person"=>{"name"=>"Rob", "age"=>"28"}}
def deep_stringify_keys
deep_transform_keys(&:to_s)
end
# Destructively converts all keys to strings.
# This includes the keys from the root hash and from all
# nested hashes and arrays.
def deep_stringify_keys!
deep_transform_keys!(&:to_s)
end
# Returns a new hash with all keys converted to symbols, as long as
# they respond to +to_sym+. This includes the keys from the root hash
# and from all nested hashes and arrays.
#
# hash = { 'person' => { 'name' => 'Rob', 'age' => '28' } }
#
# hash.deep_symbolize_keys
# # => {:person=>{:name=>"Rob", :age=>"28"}}
def deep_symbolize_keys
deep_transform_keys{ |key| key.to_sym rescue key }
end
# Destructively converts all keys to symbols, as long as they respond
# to +to_sym+. This includes the keys from the root hash and from all
# nested hashes and arrays.
def deep_symbolize_keys!
deep_transform_keys!{ |key| key.to_sym rescue key }
end
private
# support methods for deep transforming nested hashes and arrays
def _deep_transform_keys_in_object(object, &block)
case object
when Hash
object.each_with_object({}) do |(key, value), result|
result[yield(key)] = _deep_transform_keys_in_object(value, &block)
end
when Array
object.map {|e| _deep_transform_keys_in_object(e, &block) }
else
object
end
end
def _deep_transform_keys_in_object!(object, &block)
case object
when Hash
object.keys.each do |key|
value = object.delete(key)
object[yield(key)] = _deep_transform_keys_in_object!(value, &block)
end
object
when Array
object.map! {|e| _deep_transform_keys_in_object!(e, &block)}
else
object
end
end
end
end
# Useful hash method necessary for determining if a dataset
# is valid; we remove all empty elements until just key/vals
# with non-empty error message arrays are left
class Hash
# Returns a hash, removing all elements that
# respond to and return true from :empty?
# Iterates recursively over nested hashes
# Will continue to call itself until the second run
# does not differ from the first (kind of gross)
# TODO: Try to make this less gross
def deep_compact
result = dup.deep_compact!
result2 = result.dup.deep_compact!
if result != result2
result = result2
result.deep_compact
end
result
end
def deep_compact!
each_pair do |current_key, value|
this_value = self[current_key]
if this_value.respond_to?(:empty?)
if this_value.empty?
self.delete current_key
elsif this_value.is_a?(Hash)
self[current_key] = this_value.deep_compact
elsif this_value.is_a?(Array)
if this_value.all? { |v| v.respond_to?(:empty?) && v.empty? }
self.delete current_key
elsif this_value.all? { |v| v.respond_to?(:deep_compact) }
self[current_key] = this_value.map(&:deep_compact)
end
end
else
self.delete current_key if this_value.nil?
end
end
end
end | 33.022642 | 146 | 0.616501 |
e9a0841ac75c1dea9b080d2570dc3c24e74d2e99 | 16,696 | require "concurrent/atomic/atomic_reference"
require "logstash/plugin_mixins/elasticsearch/noop_license_checker"
module LogStash; module Outputs; class ElasticSearch; class HttpClient;
class Pool
class NoConnectionAvailableError < Error; end
class BadResponseCodeError < Error
attr_reader :url, :response_code, :request_body, :response_body
def initialize(response_code, url, request_body, response_body)
@response_code = response_code
@url = url
@request_body = request_body
@response_body = response_body
end
def message
"Got response code '#{response_code}' contacting Elasticsearch at URL '#{@url}'"
end
end
class HostUnreachableError < Error;
attr_reader :original_error, :url
def initialize(original_error, url)
@original_error = original_error
@url = url
end
def message
"Elasticsearch Unreachable: [#{@url}][#{original_error.class}] #{original_error.message}"
end
end
attr_reader :logger, :adapter, :sniffing, :sniffer_delay, :resurrect_delay, :healthcheck_path, :sniffing_path, :bulk_path
attr_reader :license_checker # license_checker is used by the pool specs
ROOT_URI_PATH = '/'.freeze
LICENSE_PATH = '/_license'.freeze
VERSION_6_TO_7 = ::Gem::Requirement.new([">= 6.0.0", "< 7.0.0"])
VERSION_7_TO_7_14 = ::Gem::Requirement.new([">= 7.0.0", "< 7.14.0"])
DEFAULT_OPTIONS = {
:healthcheck_path => ROOT_URI_PATH,
:sniffing_path => "/_nodes/http",
:bulk_path => "/_bulk",
:scheme => 'http',
:resurrect_delay => 5,
:sniffing => false,
:sniffer_delay => 10,
}.freeze
def initialize(logger, adapter, initial_urls=[], options={})
@logger = logger
@adapter = adapter
@metric = options[:metric]
@initial_urls = initial_urls
raise ArgumentError, "No URL Normalizer specified!" unless options[:url_normalizer]
@url_normalizer = options[:url_normalizer]
DEFAULT_OPTIONS.merge(options).tap do |merged|
@bulk_path = merged[:bulk_path]
@sniffing_path = merged[:sniffing_path]
@healthcheck_path = merged[:healthcheck_path]
@resurrect_delay = merged[:resurrect_delay]
@sniffing = merged[:sniffing]
@sniffer_delay = merged[:sniffer_delay]
end
# Used for all concurrent operations in this class
@state_mutex = Mutex.new
# Holds metadata about all URLs
@url_info = {}
@stopping = false
@license_checker = options[:license_checker] || LogStash::PluginMixins::ElasticSearch::NoopLicenseChecker::INSTANCE
@last_es_version = Concurrent::AtomicReference.new
end
def start
update_initial_urls
start_resurrectionist
start_sniffer if @sniffing
end
def update_initial_urls
update_urls(@initial_urls)
end
def close
@state_mutex.synchronize { @stopping = true }
logger.debug "Stopping sniffer"
stop_sniffer
logger.debug "Stopping resurrectionist"
stop_resurrectionist
logger.debug "Waiting for in use manticore connections"
wait_for_in_use_connections
logger.debug("Closing adapter #{@adapter}")
@adapter.close
end
def wait_for_in_use_connections
until in_use_connections.empty?
logger.info "Blocked on shutdown to in use connections #{@state_mutex.synchronize {@url_info}}"
sleep 1
end
end
def in_use_connections
@state_mutex.synchronize { @url_info.values.select {|v| v[:in_use] > 0 } }
end
def alive_urls_count
@state_mutex.synchronize { @url_info.values.select {|v| v[:state] == :alive }.count }
end
def url_info
@state_mutex.synchronize { @url_info }
end
def urls
url_info.keys
end
def until_stopped(task_name, delay)
last_done = Time.now
until @state_mutex.synchronize { @stopping }
begin
now = Time.now
if (now - last_done) >= delay
last_done = now
yield
end
sleep 1
rescue => e
logger.warn(
"Error while performing #{task_name}",
:error_message => e.message,
:class => e.class.name,
:backtrace => e.backtrace
)
end
end
end
def start_sniffer
@sniffer = Thread.new do
until_stopped("sniffing", sniffer_delay) do
begin
sniff!
rescue NoConnectionAvailableError => e
@state_mutex.synchronize { # Synchronize around @url_info
logger.warn("Elasticsearch output attempted to sniff for new connections but cannot. No living connections are detected. Pool contains the following current URLs", :url_info => @url_info) }
end
end
end
end
# Sniffs the cluster then updates the internal URLs
def sniff!
update_urls(check_sniff)
end
ES1_SNIFF_RE_URL = /\[([^\/]*)?\/?([^:]*):([0-9]+)\]/
ES2_AND_ABOVE_SNIFF_RE_URL = /([^\/]*)?\/?([^:]*):([0-9]+)/
# Sniffs and returns the results. Does not update internal URLs!
def check_sniff
_, url_meta, resp = perform_request(:get, @sniffing_path)
@metric.increment(:sniff_requests)
parsed = LogStash::Json.load(resp.body)
nodes = parsed['nodes']
if !nodes || nodes.empty?
@logger.warn("Sniff returned no nodes! Will not update hosts.")
return nil
else
sniff(nodes)
end
end
def major_version(version_string)
version_string.split('.').first.to_i
end
def sniff(nodes)
nodes.map do |id,info|
# Skip master-only nodes
next if info["roles"] && info["roles"] == ["master"]
address_str_to_uri(info["http"]["publish_address"]) if info["http"]
end.compact
end
def address_str_to_uri(addr_str)
matches = addr_str.match(ES1_SNIFF_RE_URL) || addr_str.match(ES2_AND_ABOVE_SNIFF_RE_URL)
if matches
host = matches[1].empty? ? matches[2] : matches[1]
::LogStash::Util::SafeURI.new("#{host}:#{matches[3]}")
end
end
def stop_sniffer
@sniffer.join if @sniffer
end
def sniffer_alive?
@sniffer ? @sniffer.alive? : nil
end
def start_resurrectionist
@resurrectionist = Thread.new do
until_stopped("resurrection", @resurrect_delay) do
healthcheck!(false)
end
end
end
# Retrieve ES node license information
# @param url [LogStash::Util::SafeURI] ES node URL
# @return [Hash] deserialized license document or empty Hash upon any error
def get_license(url)
response = perform_request_to_url(url, :get, LICENSE_PATH)
LogStash::Json.load(response.body)
rescue => e
logger.error("Unable to get license information", url: url.sanitized.to_s, exception: e.class, message: e.message)
{}
end
def health_check_request(url)
logger.debug("Running health check to see if an ES connection is working", url: url.sanitized.to_s, path: @healthcheck_path)
perform_request_to_url(url, :head, @healthcheck_path)
end
def healthcheck!(register_phase = true)
# Try to keep locking granularity low such that we don't affect IO...
@state_mutex.synchronize { @url_info.select {|url,meta| meta[:state] != :alive } }.each do |url,meta|
begin
health_check_request(url)
# when called from resurrectionist skip the product check done during register phase
if register_phase
if !elasticsearch?(url)
raise LogStash::ConfigurationError, "Could not connect to a compatible version of Elasticsearch"
end
end
# If no exception was raised it must have succeeded!
logger.warn("Restored connection to ES instance", url: url.sanitized.to_s)
# We reconnected to this node, check its ES version
es_version = get_es_version(url)
@state_mutex.synchronize do
meta[:version] = es_version
set_last_es_version(es_version, url)
alive = @license_checker.appropriate_license?(self, url)
meta[:state] = alive ? :alive : :dead
end
rescue HostUnreachableError, BadResponseCodeError => e
logger.warn("Attempted to resurrect connection to dead ES instance, but got an error", url: url.sanitized.to_s, exception: e.class, message: e.message)
end
end
end
def elasticsearch?(url)
begin
response = perform_request_to_url(url, :get, ROOT_URI_PATH)
rescue ::LogStash::Outputs::ElasticSearch::HttpClient::Pool::BadResponseCodeError => e
return false if response.code == 401 || response.code == 403
raise e
end
version_info = LogStash::Json.load(response.body)
return false if version_info['version'].nil?
version = ::Gem::Version.new(version_info["version"]['number'])
return false if version < ::Gem::Version.new('6.0.0')
if VERSION_6_TO_7.satisfied_by?(version)
return valid_tagline?(version_info)
elsif VERSION_7_TO_7_14.satisfied_by?(version)
build_flavor = version_info["version"]['build_flavor']
return false if build_flavor.nil? || build_flavor != 'default' || !valid_tagline?(version_info)
else
# case >= 7.14
lower_headers = response.headers.transform_keys {|key| key.to_s.downcase }
product_header = lower_headers['x-elastic-product']
return false if product_header != 'Elasticsearch'
end
return true
rescue => e
logger.error("Unable to retrieve Elasticsearch version", url: url.sanitized.to_s, exception: e.class, message: e.message)
false
end
def valid_tagline?(version_info)
tagline = version_info['tagline']
tagline == "You Know, for Search"
end
def stop_resurrectionist
@resurrectionist.join if @resurrectionist
end
def resurrectionist_alive?
@resurrectionist ? @resurrectionist.alive? : nil
end
def perform_request(method, path, params={}, body=nil)
with_connection do |url, url_meta|
resp = perform_request_to_url(url, method, path, params, body)
[url, url_meta, resp]
end
end
[:get, :put, :post, :delete, :patch, :head].each do |method|
define_method(method) do |path, params={}, body=nil|
_, _, response = perform_request(method, path, params, body)
response
end
end
def perform_request_to_url(url, method, path, params={}, body=nil)
res = @adapter.perform_request(url, method, path, params, body)
rescue *@adapter.host_unreachable_exceptions => e
raise HostUnreachableError.new(e, url), "Could not reach host #{e.class}: #{e.message}"
end
def normalize_url(uri)
u = @url_normalizer.call(uri)
if !u.is_a?(::LogStash::Util::SafeURI)
raise "URL Normalizer returned a '#{u.class}' rather than a SafeURI! This shouldn't happen!"
end
u
end
def update_urls(new_urls)
return if new_urls.nil?
# Normalize URLs
new_urls = new_urls.map(&method(:normalize_url))
# Used for logging nicely
state_changes = {:removed => [], :added => []}
@state_mutex.synchronize do
# Add new connections
new_urls.each do |url|
# URI objects don't have real hash equality! So, since this isn't perf sensitive we do a linear scan
unless @url_info.keys.include?(url)
state_changes[:added] << url
add_url(url)
end
end
# Delete connections not in the new list
@url_info.each do |url,_|
unless new_urls.include?(url)
state_changes[:removed] << url
remove_url(url)
end
end
end
if state_changes[:removed].size > 0 || state_changes[:added].size > 0
logger.info? && logger.info("Elasticsearch pool URLs updated", :changes => state_changes)
end
# Run an inline healthcheck anytime URLs are updated
# This guarantees that during startup / post-startup
# sniffing we don't have idle periods waiting for the
# periodic sniffer to allow new hosts to come online
healthcheck!
end
def size
@state_mutex.synchronize { @url_info.size }
end
def add_url(url)
@url_info[url] ||= empty_url_meta
end
def remove_url(url)
@url_info.delete(url)
end
def empty_url_meta
{
:in_use => 0,
:state => :unknown
}
end
def with_connection
url, url_meta = get_connection
# Custom error class used here so that users may retry attempts if they receive this error
# should they choose to
raise NoConnectionAvailableError, "No Available connections" unless url
yield url, url_meta
rescue HostUnreachableError => e
# Mark the connection as dead here since this is likely not transient
mark_dead(url, e)
raise e
rescue BadResponseCodeError => e
# These aren't discarded from the pool because these are often very transient
# errors
raise e
ensure
return_connection(url)
end
def mark_dead(url, error)
@state_mutex.synchronize do
meta = @url_info[url]
# In case a sniff happened removing the metadata just before there's nothing to mark
# This is an extreme edge case, but it can happen!
return unless meta
logger.warn("Marking url as dead. Last error: [#{error.class}] #{error.message}",
:url => url, :error_message => error.message, :error_class => error.class.name)
meta[:state] = :dead
meta[:last_error] = error
meta[:last_errored_at] = Time.now
end
end
def url_meta(url)
@state_mutex.synchronize do
@url_info[url]
end
end
def get_connection
@state_mutex.synchronize do
# The goal here is to pick a random connection from the least-in-use connections
# We want some randomness so that we don't hit the same node over and over, but
# we also want more 'fair' behavior in the event of high concurrency
eligible_set = nil
lowest_value_seen = nil
@url_info.each do |url,meta|
meta_in_use = meta[:in_use]
next if meta[:state] == :dead
if lowest_value_seen.nil? || meta_in_use < lowest_value_seen
lowest_value_seen = meta_in_use
eligible_set = [[url, meta]]
elsif lowest_value_seen == meta_in_use
eligible_set << [url, meta]
end
end
return nil if eligible_set.nil?
pick, pick_meta = eligible_set.sample
pick_meta[:in_use] += 1
[pick, pick_meta]
end
end
def return_connection(url)
@state_mutex.synchronize do
info = @url_info[url]
info[:in_use] -= 1 if info # Guard against the condition where the connection has already been deleted
end
end
def get_es_version(url)
request = perform_request_to_url(url, :get, ROOT_URI_PATH)
LogStash::Json.load(request.body)["version"]["number"] # e.g. "7.10.0"
end
def last_es_version
@last_es_version.get
end
def maximum_seen_major_version
@state_mutex.synchronize { @maximum_seen_major_version }
end
private
# @private executing within @state_mutex
def set_last_es_version(version, url)
@last_es_version.set(version)
major = major_version(version)
if @maximum_seen_major_version.nil?
@logger.info("Elasticsearch version determined (#{version})", es_version: major)
set_maximum_seen_major_version(major)
elsif major > @maximum_seen_major_version
warn_on_higher_major_version(major, url)
@maximum_seen_major_version = major
end
end
def set_maximum_seen_major_version(major)
if major >= 6
@logger.warn("Detected a 6.x and above cluster: the `type` event field won't be used to determine the document _type", es_version: major)
end
@maximum_seen_major_version = major
end
def warn_on_higher_major_version(major, url)
@logger.warn("Detected a node with a higher major version than previously observed, " +
"this could be the result of an Elasticsearch cluster upgrade",
previous_major: @maximum_seen_major_version, new_major: major, node_url: url.sanitized.to_s)
end
end
end; end; end; end;
| 32.48249 | 203 | 0.639974 |
18249a754e9f73f2357906032ad5e9237947263c | 2,432 | module ActiveWindow
# The ListItem is created on the fly
# * to oo-abstract methods to its TreeIter
# * for iteration in TreeController
# * as object in ActiveTreeStore
class ListedItem
def initialize(opts = {})
@traversed = false
self
end
def sort
"item-#{iter}"
end
def icon_name
nil
end
def icon
VimMate::Icons.by_name icon_name
end
# New by Gtk::TreeRowReference
def self.new_by_reference(reference)
new(:iter => reference.iter)
end
# New by Gtk::TreeIter
def self.new_by_iter(iter)
new(:iter => iter)
end
def message?
referenced_type == 'VimMate::Message'
end
def separator?
referenced_type == 'VimMate::Separator'
end
def file?
false
end
def directory?
false
end
def file_or_directory?
false
end
def show!
self.visible = true
# TODO hide seperator
#if directory? and Config[:file_directory_separator]
# if iter.next!
# tree.row_for_iter(iter).show!
# end
#end
end
def hide!
self.visible = false
# TODO hide seperator
#if directory? and Config[:file_directory_separator]
# if iter.next!
# tree.row_for_iter(iter).hide!
# end
#end
end
def visible?
visible
end
def matches?(str)
name.index(str)
end
def refresh
end
def reference
@reference ||= Gtk::TreeRowReference.new(tree.store, iter.path)
end
def reference=(new_ref)
if new_ref
@reference = new_ref
@iter = tree.store.get_iter(new_ref.path)
end
end
def self.build(attrs)
if full_path = attrs[:full_path]
if ::File.directory? full_path
'ListedDirectory'
elsif ::File.file? full_path
'ListedFile'
else
self
end
elsif iter = attrs[:iter] and !iter[REFERENCED_TYPE].nil?
iter[REFERENCED_TYPE]
else
self
end.constantize.new attrs
end
def to_s
"#{self.class} [#{iter.path}]"
end
def self.modify_icon(scope, &block)
with = "icon_name_with_#{scope}"
without = "icon_name_without_#{scope}"
define_method with do
block.call(self)
end
alias_method without, :icon_name
alias_method :icon_name, with
end
end
end
| 19 | 69 | 0.585526 |
ab87355a0075cb370cd7e61e9c0de7918aaceab4 | 446 | # frozen_string_literal: true
module FakeActivity
class DateValidator
attr_reader :date
def initialize(date)
@date = date
end
def valid?
date_exist? && date_valid?
end
def message
"#{date} is an invalid argument" unless valid?
end
private
def date_exist?
date
end
def date_valid?
Date.parse(date)
true
rescue ArgumentError
false
end
end
end
| 13.515152 | 52 | 0.609865 |
28f36c4796245103f9d8d45082eb3402a5583ebf | 12,132 | require 'rails_helper'
require 'locale'
RSpec.describe Tokens::Type::AuthorizationCode, type: :oauth2 do
include Locale
before(:all) do
Tokens::Type::AuthorizationCode.send(
:public, *Tokens::Type::AuthorizationCode.protected_instance_methods)
end
subject(:auth_code_token) { Tokens::Type::AuthorizationCode.new }
let(:redirect_url) { 'http://test.com' }
let(:client) { create :client, pkce: false }
let(:grant_type) { 'authorization_code' }
describe '.type_name' do
subject { auth_code_token.type_name }
it { is_expected.to eq(grant_type) }
end
describe '.access_token' do
let(:expired_token) do
create :access_token, token: SecureRandom.uuid,
expires: (Time.now - 10.minutes),
refresh: false, grant_type: grant_type
end
let(:authorization) do
create(:authorization_code, client: client,
redirect_url: redirect_url,
access_tokens: [expired_token],
code: SecureRandom.uuid, expires: Time.now + 10.minutes)
end
subject { auth_code_token.access_token(authorization.code) }
it { is_expected.to_not be_empty }
it { is_expected.to have_key(:access_token) }
it { is_expected.to have_key(:expires_in) }
it { expect(subject[:expires_in]).to_not be_nil }
it { expect(subject[:access_token]).to_not be_nil }
it { expect(subject[:expires_in]).to_not eq(Time.now) }
it { expect(subject[:access_token]).to_not eq(expired_token[:access_token]) }
end
describe '.refresh_token' do
let(:expired_token) do
create :access_token, token: SecureRandom.uuid,
expires: (Time.now - 10.minutes),
refresh: true, grant_type: grant_type
end
let(:authorization) do
create(:authorization_code, client: client,
redirect_url: redirect_url,
access_tokens: [expired_token],
code: SecureRandom.uuid, expires: Time.now + 10.minutes)
end
subject { auth_code_token.refresh_token(authorization.code) }
it { is_expected.to_not be_empty }
it { is_expected.to have_key(:access_token) }
it { is_expected.to have_key(:expires_in) }
it { expect(subject[:expires_in]).to_not be_nil }
it { expect(subject[:access_token]).to_not be_nil }
it { expect(subject[:expires_in]).to_not eq(Time.now) }
it { expect(subject[:access_token]).to_not eq(expired_token[:access_token]) }
end
describe '.refresh_validate' do
let(:auth_params) { AuthParams.new({ refresh_token: 'token' }, {}) }
let(:errors) { [user_err(:refresh_invalid_token)] }
subject { auth_code_token.refresh_validate(auth_params) }
it { is_expected.to match_array(errors) }
end
describe '.token_validate' do
context 'with invalid authorization code' do
let(:params) { { code: 'code' } }
let(:auth_params) { AuthParams.new(params, {}) }
let(:errors) { [user_err(:auth_code_invalid)] }
subject { auth_code_token.token_validate(auth_params) }
it { is_expected.to match_array(errors) }
end
context 'with a valid authorization code but invalid headers' do
let(:authorization) do
create(:authorization_code, client: client,
redirect_url: redirect_url,
code: SecureRandom.uuid, expires: Time.now + 10.minutes)
end
let(:auth_params) { AuthParams.new({ code: authorization.code }, {}) }
let(:errors) { [user_err(:auth_code_invalid_client_or_secret)] }
subject { auth_code_token.token_validate(auth_params) }
it { is_expected.to match_array(errors) }
end
context 'with an expired authorization code and invalid headers' do
let(:authorization) do
create(:authorization_code, client: client,
redirect_url: redirect_url,
code: SecureRandom.uuid, expires: Time.now - 10.minutes)
end
let(:auth_params) { AuthParams.new({ code: authorization.code }, {}) }
let(:errors) { [user_err(:auth_code_invalid_client_or_secret), user_err(:auth_code_expired)] }
subject { auth_code_token.token_validate(auth_params) }
it { is_expected.to match_array(errors) }
end
context 'with an invalid client id and secret' do
let(:authorization) do
create(:authorization_code, client: client,
redirect_url: redirect_url,
code: SecureRandom.uuid, expires: Time.now + 10.minutes)
end
let(:auth_params) do
AuthParams.new(
{ code: authorization.code },
'Authorization' => 'err:err'
)
end
let(:errors) { [user_err(:auth_code_invalid_client_or_secret)] }
subject { auth_code_token.token_validate(auth_params) }
it { is_expected.to match_array(errors) }
end
context 'with an invalid code verifier' do
let(:client) { create :client, pkce: true, secret: 'secret' }
let(:code_challenge) { SecureRandom.uuid }
let(:authorization) do
create(:authorization_code, client: client,
redirect_url: redirect_url,
code_challenge_method: 'SHA256',
code_challenge: code_challenge,
code: SecureRandom.uuid, expires: Time.now + 10.minutes)
end
let(:auth_params) do
AuthParams.new({code: authorization.code,
code_verifier: SecureRandom.uuid},
'Authorization'=>"#{client.uid}:#{Base64.encode64(client.secret)}")
end
let(:errors) { [user_err(:auth_code_invalid_grant_error)] }
subject { auth_code_token.token_validate(auth_params) }
it { is_expected.to match_array(errors) }
end
end
describe '.is_valid' do
context 'with the action :index it validates an access token request' do
let(:params) { { code: 'code', action: :index.to_s } }
let(:auth_params) { AuthParams.new(params, {}) }
let(:errors) { [user_err(:auth_code_invalid)] }
subject { auth_code_token.token_validate(auth_params) }
it { is_expected.to match_array(errors) }
end
context 'with the action :create it validates a refresh request' do
let(:auth_params) do
AuthParams.new({ refresh_token: 'token', action: :create.to_s }, {})
end
let(:errors) { [user_err(:refresh_invalid_token)] }
subject { auth_code_token.refresh_validate(auth_params) }
it { is_expected.to match_array(errors) }
end
context 'with the action :destroy it validates a revoke request' do
pending
end
context 'with an invalid action it raises an exception' do
let(:auth_params) do
AuthParams.new({ refresh_token: 'token' }, {})
end
subject { auth_code_token.refresh_validate(auth_params) }
end
end
describe '.token' do
let(:authorization) do
create(:authorization_code, client: client,
redirect_url: redirect_url,
code: SecureRandom.uuid, expires: Time.now + 10.minutes)
end
context 'with a required refresh token' do
let(:params) { { code: authorization.code, refresh: true } }
let(:auth_params) { AuthParams.new(params, {}) }
subject { auth_code_token.token(auth_params) }
it { is_expected.to_not be_empty }
it { is_expected.to have_key(:access_token) }
it { is_expected.to have_key(:expires_in) }
it { is_expected.to have_key(:refresh_token) }
it { expect(subject[:expires_in]).to_not be_nil }
it { expect(subject[:access_token]).to_not be_nil }
it { expect(subject[:expires_in]).to_not eq(Time.now) }
end
context 'with a correlated refresh token' do
let(:params) { { code: authorization.code, refresh: true } }
let(:auth_params) { AuthParams.new(params, {}) }
let(:token) { auth_code_token.token(auth_params) }
let(:correlation_uid) do
::AccessToken.find_by_token(token[:access_token]).correlation_uid
end
subject do
::AccessToken.find_by_token(token[:refresh_token]).correlation_uid
end
it { is_expected.to eq(correlation_uid) }
end
context 'without a refresh token' do
let(:params) { { code: authorization.code, refresh: false } }
let(:auth_params) { AuthParams.new(params, {}) }
subject { auth_code_token.token(auth_params) }
it { is_expected.to_not be_empty }
it { is_expected.to have_key(:access_token) }
it { is_expected.to have_key(:expires_in) }
it { is_expected.to_not have_key(:refresh_token) }
it { expect(subject[:expires_in]).to_not be_nil }
it { expect(subject[:access_token]).to_not be_nil }
it { expect(subject[:expires_in]).to_not eq(Time.now) }
end
end
describe '.refresh' do
let(:refresh_token) do
create :access_token, token: SecureRandom.uuid,
expires: (Time.now + 10.minutes),
refresh: true, grant_type: grant_type,
correlation_uid: SecureRandom.uuid
end
let(:authorization) do
create(:authorization_code, client: client,
redirect_url: redirect_url,
access_tokens: [refresh_token],
code: SecureRandom.uuid, expires: Time.now + 10.minutes)
end
describe 'generate a valid access token' do
let(:params) do
{ refresh_token: authorization.access_tokens.first.token,
refresh: true }
end
let(:auth_params) { AuthParams.new(params, {}) }
subject { auth_code_token.refresh(auth_params) }
it { is_expected.to_not be_empty }
it { is_expected.to have_key(:access_token) }
it { is_expected.to have_key(:expires_in) }
it { is_expected.to have_key(:refresh_token) }
it { expect(subject[:expires_in]).to_not be_nil }
it { expect(subject[:access_token]).to_not be_nil }
it { expect(subject[:expires_in]).to_not eq(Time.now) }
end
describe 'generates a correlated access token' do
let(:params) do
{ refresh_token: authorization.access_tokens.first.token }
end
let(:auth_params) { AuthParams.new(params, {}) }
let(:token) { auth_code_token.refresh(auth_params) }
subject do
::AccessToken.find_by_token(token[:access_token]).correlation_uid
end
it { is_expected.to_not be_nil }
it { is_expected.to eq(refresh_token.correlation_uid) }
end
end
describe '.code_validate' do
pending "add some examples to (or delete) #{__FILE__}"
end
describe '.client_validate' do
pending "add some examples to (or delete) #{__FILE__}"
end
describe '.pkce_validate' do
pending "add some examples to (or delete) #{__FILE__}"
end
describe '.revoke_validate' do
context 'with an invalid authentication header' do
let(:params) do
{ token: '' }
end
let(:auth_params) { AuthParams.new(params, {}) }
subject { auth_code_token.revoke_validate(auth_params) }
let(:errors) { [user_err(:bad_auth_header)] }
it { is_expected.to match_array(errors) }
end
context 'with an invalid token' do
let(:access_token) do
create :access_token, token: SecureRandom.uuid,
expires: (Time.now + 10.minutes),
grant_type: grant_type,
correlation_uid: SecureRandom.uuid
end
let(:params) do
{ token: '' }
end
let(:auth_params) do
AuthParams.new(
params,
'Authorization' => "Bearer #{access_token.token}"
)
end
subject { auth_code_token.revoke_validate(auth_params) }
let(:errors) { [user_err(:token_invalid)] }
it { is_expected.to match_array(errors) }
end
end
describe '.generate_code_challenge' do
context 'with PLAIN code generation method' do
let(:code_verifier) { 'verified' }
subject{ auth_code_token.generate_code_challenge('PLAIN', code_verifier) }
it { is_expected.to eq(code_verifier) }
end
context 'with SHA256 code generation method' do
let(:code_verifier) { 'verified' }
subject { auth_code_token.generate_code_challenge('SHA256', code_verifier) }
it { is_expected.to_not be_nil }
end
end
end
| 39.647059 | 100 | 0.653396 |
ab5b9cabbaaa8961ce7bca1b7102f39fb64ca520 | 1,078 | module Petstore
#
class Category < BaseObject
attr_accessor :id, :name
# attribute mapping from ruby-style variable name to JSON key
def self.attribute_map
{
#
:'id' => :'id',
#
:'name' => :'name'
}
end
# attribute type
def self.swagger_types
{
:'id' => :'Integer',
:'name' => :'String'
}
end
def initialize(attributes = {})
return if !attributes.is_a?(Hash) || attributes.empty?
# convert string to symbol for hash key
attributes = attributes.inject({}){|memo,(k,v)| memo[k.to_sym] = v; memo}
if attributes[:'id']
self.id = attributes[:'id']
end
if attributes[:'name']
self.name = attributes[:'name']
end
end
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id &&
name == o.name
end
def eql?(o)
self == o
end
def hash
[id, name].hash
end
end
end
| 17.966667 | 79 | 0.479592 |
bf86bc1b906ed478277e33c3f6c1519b4c2b5ebf | 1,754 | # frozen_string_literal: true
require "rails_helper"
describe AntivirusValidator do
let(:clean_file) { fixture_file_upload("files/clean_file.pdf") }
let(:infected_file) { fixture_file_upload("files/infected_file.pdf") }
before do
Ratonvirus.configure do |config|
config.scanner = :eicar
config.storage = :active_storage
end
end
context "with single clean file" do
it "is valid" do
a = Article.new
a.activestorage_file.attach(clean_file)
expect(a).to be_valid
end
end
context "with single infected file" do
it "is not valid" do
a = Article.new
a.activestorage_file.attach(infected_file)
expect(a).not_to be_valid
end
end
context "with multiple clean files" do
it "is valid" do
a = Article.new
10.times do
a.activestorage_files.attach(clean_file)
end
expect(a).to be_valid
end
end
context "with multiple infected files" do
it "is not valid" do
a = Article.new
10.times do
a.activestorage_files.attach(infected_file)
end
expect(a).not_to be_valid
end
end
context "with multiple files containing single infected file" do
it "is not valid" do
a = Article.new
10.times do
a.activestorage_files.attach(clean_file)
end
a.activestorage_files.attach(infected_file)
expect(a).not_to be_valid
end
end
context "with multiple files containing multiple infected files" do
it "is not valid" do
a = Article.new
10.times do
a.activestorage_files.attach(clean_file)
end
10.times do
a.activestorage_files.attach(infected_file)
end
expect(a).not_to be_valid
end
end
end
| 21.390244 | 72 | 0.660205 |
0894b93c461fe4643d3d1c88bac80db49bf95efa | 2,428 | # This now builds a version of JACKv1 which matches the current API
# for JACKv2. JACKv2 is not buildable on a number of macOS
# distributions, and the JACK team instead suggests installation of
# JACKOSX, a pre-built binary form for which the source is not available.
# If you require JACKv2, you should use that. Otherwise, this formula should
# operate fine.
# Please see https://github.com/Homebrew/homebrew/pull/22043 for more info
class Jack < Formula
desc "Audio Connection Kit"
homepage "http://jackaudio.org"
url "http://jackaudio.org/downloads/jack-audio-connection-kit-0.125.0.tar.gz"
sha256 "3517b5bff82139a76b2b66fe2fd9a3b34b6e594c184f95a988524c575b11d444"
revision 1
bottle do
sha256 "0a993d32dd74ce014e0c0aa5a04e632a7e4bca7bc6ced4afa9a7d717cc893f06" => :sierra
sha256 "abb9fc993cda86b4daf45f0d2a8c775716fec08fc016facd8151787ac06e60e4" => :el_capitan
sha256 "de96b9c43cb77f57d42ba02c1373b31a421ec485eafe401c11cc27c8c8c1838f" => :yosemite
sha256 "5ab5409b416b61fd92c1f5186b568a449ea86f3471c008d44995938dba3d4c87" => :x86_64_linux # glibc 2.19
end
depends_on "pkg-config" => :build
depends_on "berkeley-db"
depends_on "libsndfile"
depends_on "libsamplerate"
depends_on "util-linux" if OS.linux? # for libuuid
def install
# Makefile hardcodes Carbon header location
inreplace Dir["drivers/coreaudio/Makefile.{am,in}"],
"/System/Library/Frameworks/Carbon.framework/Headers/Carbon.h",
"#{MacOS.sdk_path}/System/Library/Frameworks/Carbon.framework/Headers/Carbon.h"
ENV["LINKFLAGS"] = ENV.ldflags
system "./configure", "--prefix=#{prefix}"
system "make", "install"
end
plist_options :manual => "jackd -d coreaudio"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>WorkingDirectory</key>
<string>#{prefix}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/jackd</string>
<string>-d</string>
<string>coreaudio</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
</dict>
</plist>
EOS
end
test do
assert_match version.to_s, shell_output("#{bin}/jackd --version")
end
end
| 35.188406 | 107 | 0.707578 |
61e527384dab2079d5dbe941f8089655a8dc49a4 | 995 | module DataAggregation::Index
module EventData
module Transformer
def self.json
JSON
end
def self.instance(raw_data)
instance = EventSource::EventData::Write.build
instance.id = raw_data[:event_id]
instance.type = raw_data[:event_type]
instance.data = raw_data[:data]
instance.metadata = raw_data[:metadata]
instance
end
def self.raw_data(instance)
{
:event_id => instance.id,
:event_type => instance.type,
:data => instance.data,
:metadata => instance.metadata
}
end
module JSON
def self.read(text)
formatted_data = ::JSON.parse text, symbolize_names: true
Casing::Underscore.(formatted_data)
end
def self.write(raw_data)
formatted_data = Casing::Camel.(raw_data, symbol_to_string: true)
::JSON.pretty_generate formatted_data
end
end
end
end
end
| 24.875 | 75 | 0.59397 |
b9f43483efc587b948a96280eb69649c786e4453 | 2,091 | require 'spec_helper'
require 'pact_broker/pacts/pact_params'
module PactBroker
module Pacts
describe PactParams do
let(:body) { load_fixture('a_consumer-a_provider.json') }
let(:consumer_version_number) { '1.2.3' }
let(:headers) { { 'X-Pact-Consumer-Version' => consumer_version_number, 'Host' => 'example.org' } }
describe "from_request" do
context "from a PUT request" do
let(:request) { Webmachine::Request.new("PUT", "/", headers, body)}
let(:path_info) do
{
consumer_name: 'Consumer',
provider_name: 'Provider',
consumer_version_number: '1.2.3'
}
end
subject { PactParams.from_request(request, path_info) }
it "extracts the consumer name from the path" do
expect(subject.consumer_name).to eq "Consumer"
end
it "extracts the provider name from the path" do
expect(subject.provider_name).to eq "Provider"
end
it "extracts the consumer_version_number from the path" do
expect(subject.consumer_version_number).to eq "1.2.3"
end
it "extracts the json_content" do
expect(subject.json_content).to eq body
end
it "extracts the consumer name from the pact" do
expect(subject.consumer_name_in_pact).to eq "A Consumer"
end
it "extracts the provider name from the pact" do
expect(subject.provider_name_in_pact).to eq "A Provider"
end
context "with missing data" do
let(:body){ '' }
it "the consumer name from the pact is nil" do
expect(subject.consumer_name_in_pact).to be nil
end
it "the provider name from the pact is nil" do
expect(subject.provider_name_in_pact).to be nil
end
it "extracts the json_content" do
expect(subject.json_content).to eq ''
end
end
end
end
end
end
end
| 29.871429 | 105 | 0.58154 |
ffca618ff2074a0ef3ccc05050472c4528adde86 | 438 | require "hamster/deque"
require "hamster/read_copy_update"
module Hamster
# @api private
class MutableQueue
include ReadCopyUpdate
def self.[](*items)
MutableQueue.new(Deque[*items])
end
def enqueue(item)
transform { |queue| queue.enqueue(item) }
end
def dequeue
head = nil
transform do |queue|
head = queue.head
queue.dequeue
end
head
end
end
end
| 16.222222 | 47 | 0.618721 |
38df9f2db7fbb2d3a72afc005a7001cb6674a023 | 2,338 | #
# vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
#
# Copyright (c) 2016-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
use_inline_resources
def whyrun_supported?
true
end
def get_hdparm_value(param, device)
cmd = Mixlib::ShellOut.new("hdparm #{param} #{device}").run_command
cmd.error!
output = cmd.stdout
# Strip whitespace to make regex much cleaner
output.gsub!(/\s+/, '')
# Match anything besides whitespace between '=' and paren
re = /=([^\s])+\(/m
match_obj = re.match(output)
unless match_obj
fail 'Could not parse the output of hdparm'
end
match_data = match_obj[1]
if match_data =~ /^\s*$/
fail "Could not get hdparm value for: #{param}"
end
match_data.to_s.strip
end
def set_hdparm_values(values, device)
values.each do |key, val|
command = "hdparm #{key} #{val} #{device}"
s = Mixlib::ShellOut.new(command).run_command
s.error!
Chef::Log.info("Successfully set hdparm #{key} to #{val}.")
end
end
action :set do
values_to_set = {}
supported_opts = ['-W']
root_device = node.device_of_mount('/')
if root_device.start_with?('/dev/fio', '/dev/vd', '/dev/nvme')
Chef::Log.warn("Device #{root_device} is not supported by fb_hdparm.")
return
end
settings = node['fb_hdparm']['settings'].to_hash
settings.each do |option, desired_value|
unless supported_opts.include?(option)
Chef::Log.warn("Option #{option} is not yet supported by fb_hdparm." +
'Skipping.')
next
end
desired_value = desired_value.to_s
current_value = get_hdparm_value(option, root_device)
# Don't bother setting it if it's already correct
if current_value == desired_value
Chef::Log.debug("hdparm #{option} value already set to " +
"#{desired_value}.")
else
values_to_set[option] = desired_value
end
end
if values_to_set.empty?
Chef::Log.debug('All hdparm params are already set correctly')
else
converge_by "Set hdparm values for #{root_device}" do
set_hdparm_values(values_to_set, root_device)
end
end
end
| 29.225 | 77 | 0.682207 |
ac8b3c76f298449c3558f0d8840e073b90312561 | 960 | #
# Cookbook Name:: rabbitmq_test
# Recipe:: default
#
# Copyright 2012-2013, Opscode, Inc. <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
chef_gem "bunny"
include_recipe "rabbitmq::default"
# hack to give rabbit time to spin up before the tests, it seems
# to be responding that it has started before it really has
execute "sleep 10" do
action :nothing
subscribes :run, "service[#{node['rabbitmq']['service_name']}]", :delayed
end
| 32 | 75 | 0.746875 |
7a3e9d14ae48e347e95420f19f4277603134b329 | 2,452 | HeadsUp::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# Code is not reloaded between requests
config.cache_classes = true
# Full error reports are disabled and caching is turned on
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable Rails's static asset server (Apache or nginx will already do this)
config.serve_static_assets = false
# Compress JavaScripts and CSS
config.assets.compress = true
# Don't fallback to assets pipeline if a precompiled asset is missed
config.assets.compile = false
# Generate digests for assets URLs
config.assets.digest = true
# Defaults to Rails.root.join("public/assets")
# config.assets.manifest = YOUR_PATH
# Specifies the header that your server uses for sending files
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# See everything in the log (default is :info)
# config.log_level = :debug
# Prepend all log lines with the following tags
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets (application.js, application.css, and all non-JS/CSS are already added)
# config.assets.precompile += %w( search.js )
# Disable delivery errors, bad email addresses will be ignored
# config.action_mailer.raise_delivery_errors = false
# Enable threaded mode
# config.threadsafe!
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found)
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners
config.active_support.deprecation = :notify
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
# config.active_record.auto_explain_threshold_in_seconds = 0.5
end
| 36.058824 | 104 | 0.758564 |
f72562e3800036fc9842324199ea037e43c3b380 | 147 | require 'logger'
module Logging
def logger
Logging.logger
end
def self.logger
@logger ||= Logger.new 'goldenpeasant.log'
end
end
| 12.25 | 46 | 0.687075 |
38916f7376c0417d3ef278f9f44c67a5171e9e61 | 6,132 | require "active_record"
require "rails"
require "active_model/railtie"
# For now, action_controller must always be present with
# rails, so let's make sure that it gets required before
# here. This is needed for correctly setting up the middleware.
# In the future, this might become an optional require.
require "action_controller/railtie"
module ActiveRecord
# = Active Record Railtie
class Railtie < Rails::Railtie # :nodoc:
config.active_record = ActiveSupport::OrderedOptions.new
config.app_generators.orm :active_record, :migration => true,
:timestamps => true
config.app_middleware.insert_after ::ActionDispatch::Callbacks,
ActiveRecord::QueryCache
config.app_middleware.insert_after ::ActionDispatch::Callbacks,
ActiveRecord::ConnectionAdapters::ConnectionManagement
config.action_dispatch.rescue_responses.merge!(
'ActiveRecord::RecordNotFound' => :not_found,
'ActiveRecord::StaleObjectError' => :conflict,
'ActiveRecord::RecordInvalid' => :unprocessable_entity,
'ActiveRecord::RecordNotSaved' => :unprocessable_entity
)
config.active_record.use_schema_cache_dump = true
config.active_record.maintain_test_schema = true
config.eager_load_namespaces << ActiveRecord
rake_tasks do
namespace :db do
task :load_config do
ActiveRecord::Tasks::DatabaseTasks.database_configuration = Rails.application.config.database_configuration
if defined?(ENGINE_PATH) && engine = Rails::Engine.find(ENGINE_PATH)
if engine.paths['db/migrate'].existent
ActiveRecord::Tasks::DatabaseTasks.migrations_paths += engine.paths['db/migrate'].to_a
end
end
end
end
load "active_record/railties/databases.rake"
end
# When loading console, force ActiveRecord::Base to be loaded
# to avoid cross references when loading a constant for the
# first time. Also, make it output to STDERR.
console do |app|
require "active_record/railties/console_sandbox" if app.sandbox?
require "active_record/base"
unless ActiveSupport::Logger.logger_outputs_to?(Rails.logger, STDERR, STDOUT)
console = ActiveSupport::Logger.new(STDERR)
Rails.logger.extend ActiveSupport::Logger.broadcast console
end
end
runner do
require "active_record/base"
end
initializer "active_record.initialize_timezone" do
ActiveSupport.on_load(:active_record) do
self.time_zone_aware_attributes = true
self.default_timezone = :utc
self.time_zone_aware_types = ActiveRecord::Base.time_zone_aware_types
end
end
initializer "active_record.logger" do
ActiveSupport.on_load(:active_record) { self.logger ||= ::Rails.logger }
end
initializer "active_record.migration_error" do
if config.active_record.delete(:migration_error) == :page_load
config.app_middleware.insert_after ::ActionDispatch::Callbacks,
ActiveRecord::Migration::CheckPending
end
end
initializer "active_record.check_schema_cache_dump" do
if config.active_record.delete(:use_schema_cache_dump)
config.after_initialize do |app|
ActiveSupport.on_load(:active_record) do
filename = File.join(app.config.paths["db"].first, "schema_cache.dump")
if File.file?(filename)
cache = Marshal.load File.binread filename
if cache.version == ActiveRecord::Migrator.current_version
self.connection.schema_cache = cache
self.connection_pool.schema_cache = cache.dup
else
warn "Ignoring db/schema_cache.dump because it has expired. The current schema version is #{ActiveRecord::Migrator.current_version}, but the one in the cache is #{cache.version}."
end
end
end
end
end
end
initializer "active_record.warn_on_records_fetched_greater_than" do
if config.active_record.warn_on_records_fetched_greater_than
ActiveSupport.on_load(:active_record) do
require 'active_record/relation/record_fetch_warning'
end
end
end
initializer "active_record.set_configs" do |app|
ActiveSupport.on_load(:active_record) do
app.config.active_record.each do |k,v|
send "#{k}=", v
end
end
end
# This sets the database configuration from Configuration#database_configuration
# and then establishes the connection.
initializer "active_record.initialize_database" do
ActiveSupport.on_load(:active_record) do
self.configurations = Rails.application.config.database_configuration
begin
establish_connection
rescue ActiveRecord::NoDatabaseError
warn <<-end_warning
Oops - You have a database configured, but it doesn't exist yet!
Here's how to get started:
1. Configure your database in config/database.yml.
2. Run `bin/rails db:create` to create the database.
3. Run `bin/rails db:setup` to load your database schema.
end_warning
raise
end
end
end
# Expose database runtime to controller for logging.
initializer "active_record.log_runtime" do
require "active_record/railties/controller_runtime"
ActiveSupport.on_load(:action_controller) do
include ActiveRecord::Railties::ControllerRuntime
end
end
initializer "active_record.set_reloader_hooks" do |app|
hook = app.config.reload_classes_only_on_change ? :to_prepare : :to_cleanup
ActiveSupport.on_load(:active_record) do
ActionDispatch::Reloader.send(hook) do
if ActiveRecord::Base.connected?
ActiveRecord::Base.clear_cache!
ActiveRecord::Base.clear_reloadable_connections!
end
end
end
end
initializer "active_record.add_watchable_files" do |app|
path = app.paths["db"].first
config.watchable_files.concat ["#{path}/schema.rb", "#{path}/structure.sql"]
end
end
end
| 35.04 | 195 | 0.691944 |
213661829f3ea5c258c1472c14a8db751ccfcecc | 632 | require 'test_helper'
module Pickwick
module API
module Models
class ConsumerTest < Test::Unit::TestCase
context "Consumer class" do
should "be able to find consumer by token" do
result = stub(records: [ FactoryGirl.build(:consumer, token: '123') ])
Consumer.expects(:search).with do |search|
search = MultiJson.load(search)
assert_equal '123', search['query']['match']['token']['query']
end.returns(result)
assert_equal '123', Consumer.find_by_token('123').token
end
end
end
end
end
end
| 22.571429 | 82 | 0.583861 |
f87ee6c7c2c61d2c488884077e113d3cd0bf9381 | 2,191 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
require File.expand_path('../shared/new', __FILE__)
describe "IO.open" do
it_behaves_like :io_new, :open
end
describe "IO.open" do
it_behaves_like :io_new_errors, :open
end
# These specs use a special mock helper to avoid mock
# methods from preventing IO#close from running and
# which would prevent the file referenced by @fd from
# being deleted on Windows.
describe "IO.open" do
before :each do
@name = tmp("io_open.txt")
@fd = new_fd @name
ScratchPad.clear
end
after :each do
rm_r @name
end
it "calls #close after yielding to the block" do
IO.open(@fd, "w") do |io|
IOSpecs.io_mock(io, :close) do
super()
ScratchPad.record :called
end
io.closed?.should be_false
end
ScratchPad.recorded.should == :called
end
it "propagates an exception raised by #close that is not a StandardError" do
lambda do
IO.open(@fd, "w") do |io|
IOSpecs.io_mock(io, :close) do
super()
ScratchPad.record :called
raise Exception
end
end
end.should raise_error(Exception)
ScratchPad.recorded.should == :called
end
it "propagates an exception raised by #close that is a StandardError" do
lambda do
IO.open(@fd, "w") do |io|
IOSpecs.io_mock(io, :close) do
super()
ScratchPad.record :called
raise StandardError
end
end
end.should raise_error(StandardError)
ScratchPad.recorded.should == :called
end
it "does not propagate a IOError with 'closed stream' message raised by #close" do
IO.open(@fd, "w") do |io|
IOSpecs.io_mock(io, :close) do
super()
ScratchPad.record :called
raise IOError, 'closed stream'
end
end
ScratchPad.recorded.should == :called
end
it "does not set last error when a IOError with 'closed stream' raised by #close" do
IO.open(@fd, "w") do |io|
IOSpecs.io_mock(io, :close) do
super()
raise IOError, 'closed stream'
end
end
$!.should == nil
end
end
| 25.183908 | 86 | 0.638065 |
39fdb8b8a7ff53779778457be495e2d0a9fb13c8 | 1,579 | #!/usr/bin/env ruby
#
# Filesize metrics
# ===
#
# Simple wrapper around `stat` for getting file size stats,
# in both, bytes and blocks.
#
# Based on: Curl HTTP Timings metric (Sensu Community Plugins) by Joe Miller
#
# Copyright 2014 Pablo Figue
#
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'socket'
require 'sensu-plugin/metric/cli'
class FilesizeMetrics < Sensu::Plugin::Metric::CLI::Graphite
option :filepath,
:short => '-f PATH',
:long => '--file PATH',
:description => 'Absolute path to file to measure',
:required => true
option :omitblocks,
:short => '-o',
:long => '--blocksno',
:description => 'Don\'t report size in blocks',
:required => true,
:default => false
option :omitbytes,
:short => '-b',
:long => '--bytesno',
:description => 'Don\'t report size in bytes',
:required => true,
:default => false
option :scheme,
:description => "Metric naming scheme, text to prepend to metric",
:short => "-s SCHEME",
:long => "--scheme SCHEME",
:required => true,
:default => "#{Socket.gethostname}.filesize"
def run
cmd = "/usr/bin/stat --format=\"%s,%b,\" #{config[:filepath]}"
output = `#{cmd}`
(bytes, blocks, _) = output.split(',')
unless config[:omitbytes]
output "#{config[:scheme]}.#{config[:filepath]}.bytes", bytes
end
unless config[:omitblocks]
output "#{config[:scheme]}.#{config[:filepath]}.blocks", blocks
end
ok
end
end
| 23.924242 | 76 | 0.618746 |
8749bf3ca333b237aa605256583670b37302c960 | 736 | # frozen_string_literal: true
require 'erb'
module GenronSF
module EBook
module TemplateUtil
CSS_FILE_PATH = File.expand_path('../../../assets/main.css', __dir__)
class << self
def title_xhtml(title)
ERB.new(title_template).result_with_hash(title: title)
end
def work_xhtml(work)
ERB.new(work_template).result_with_hash(work: work)
end
private
def title_template
@title_template ||= File.read(File.expand_path('../../../assets/title.xhtml.erb', __dir__))
end
def work_template
@work_template ||= File.read(File.expand_path('../../../assets/work.xhtml.erb', __dir__))
end
end
end
end
end
| 23 | 101 | 0.608696 |
38d2a2d5c4a046b4d782675bb4a822578975a65c | 3,206 | class AccountController < ApplicationController
before_action :authenticate_user!, except: [:consent, :privacy_policy, :terms_and_conditions]
before_action :fetch_social_profile, only: [:account, :update, :change_password]
# def view_consent
# @pc = YAML.load_file(Rails.root.join('lib', 'data', 'content', "consent.#{I18n.locale}.yml"))
# end
def privacy_policy
if params[:privacy_policy_read]
current_user.update_attribute(:accepted_privacy_policy_at, Time.zone.now)
if current_user.ready_for_research?
redirect_to (session[:return_to].present? ? session.delete(:return_to) : health_data_path), notice: "You have now signed the consent and are ready to participate in research. You can opt out any time by visiting your user account settings."
else
redirect_to consent_path, notice: "Please read over and accept the research consent before participating in research."
end
elsif params[:declined_to_participate]
current_user.revoke_consent
redirect_to health_data_path, notice: "You are not enrolled in research. If you ever change your mind, just visit your account settings to view the research consent and privacy policy again."
else
end
end
def consent
if params[:consent_read]
current_user.update_attribute(:accepted_consent_at, Time.zone.now)
if current_user.ready_for_research?
redirect_to (session[:return_to].present? ? session.delete(:return_to) : health_data_path), notice: "You have now signed the consent and are ready to participate in research."
else
redirect_to privacy_path, notice: "Please read over and accept the privacy policy before participating in research. You can opt out any time by visiting your user account settings."
end
elsif params[:declined_to_participate]
current_user.revoke_consent
redirect_to health_data_path, notice: "You are not enrolled in research. If you ever change your mind, just visit your account settings to view the research consent and privacy policy again."
else
end
end
def dashboard
end
def account
@user = current_user
@active_top_nav_link = :my_account
render layout: "account"
end
def fetch_social_profile
@social_profile = current_user.social_profile
end
def terms_and_conditions
end
def update
@user = User.find(current_user.id)
if @user.update(user_params)
redirect_to account_path, notice: "Your account settings have been successfully changed."
else
@update_for = :user_info
render "account"
end
end
def change_password
@user = User.find(current_user.id)
if @user.update_with_password(user_params)
# Sign in the user by passing validation in case his password changed
sign_in @user, :bypass => true
redirect_to account_path, alert: "Your password has been changed."
else
@update_for = :password
render "account"
end
end
private
def user_params
# NOTE: Using `strong_parameters` gem
params.required(:user).permit(:email, :first_name, :last_name, :zip_code, :year_of_birth, :password, :password_confirmation, :current_password)
end
end
| 35.230769 | 248 | 0.733001 |
6a672712c51e939bf2bdef3f043ec1d7abc1cb9a | 1,911 | class Fish < Formula
desc "User-friendly command-line shell for UNIX-like operating systems"
homepage "https://fishshell.com"
url "https://github.com/fish-shell/fish-shell/releases/download/3.2.0/fish-3.2.0.tar.xz"
sha256 "4f0293ed9f6a6b77e47d41efabe62f3319e86efc8bf83cc58733044fbc6f9211"
license "GPL-2.0-only"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "7071739f327b442f6d1bec65332c33d29cb0833fd601a215d4d3492dbf188614"
sha256 cellar: :any, big_sur: "9891254ae3507ac79a050fc5ef5a837820ab78f06ad7ab5495a61a3e83bfb970"
sha256 cellar: :any, catalina: "dffc718a031961c893db21b189b2ae81a4ed65b7f1d1ae77ac7a83fd6a62038a"
sha256 cellar: :any, mojave: "274a7590ffb5f2252ed2fc1ca97164c8ec77808312e48a2a0d794987e692767b"
sha256 cellar: :any_skip_relocation, x86_64_linux: "4b697491896f4bbe1c094dc6feeae03743ec849d976beabdf0a4d9b7dddd6419"
end
head do
url "https://github.com/fish-shell/fish-shell.git", shallow: false
depends_on "sphinx-doc" => :build
end
depends_on "cmake" => :build
depends_on "pcre2"
uses_from_macos "ncurses"
def install
# In Homebrew's 'superenv' sed's path will be incompatible, so
# the correct path is passed into configure here.
args = %W[
-Dextra_functionsdir=#{HOMEBREW_PREFIX}/share/fish/vendor_functions.d
-Dextra_completionsdir=#{HOMEBREW_PREFIX}/share/fish/vendor_completions.d
-Dextra_confdir=#{HOMEBREW_PREFIX}/share/fish/vendor_conf.d
]
system "cmake", ".", *std_cmake_args, *args
system "make", "install"
end
def post_install
(pkgshare/"vendor_functions.d").mkpath
(pkgshare/"vendor_completions.d").mkpath
(pkgshare/"vendor_conf.d").mkpath
end
test do
system "#{bin}/fish", "-c", "echo"
end
end
| 35.388889 | 122 | 0.70853 |
e908d7242528e0c2cc7b6cb843e029d0da44978c | 19,184 | # frozen_string_literal: true
require "spec_helper"
describe GraphQL::Schema::Resolver do
module ResolverTest
class LazyBlock
def initialize
@get_value = Proc.new
end
def value
@get_value.call
end
end
class BaseResolver < GraphQL::Schema::Resolver
end
class Resolver1 < BaseResolver
argument :value, Integer, required: false
type [Integer, null: true], null: false
def initialize(object:, context:)
super
if defined?(@value)
raise "The instance should start fresh"
end
@value = [100]
end
def resolve(value: nil)
@value << value
@value
end
end
class Resolver2 < Resolver1
argument :extra_value, Integer, required: true
def resolve(extra_value:, **_rest)
value = super(_rest)
value << extra_value
value
end
end
class Resolver3 < Resolver1
end
class Resolver4 < BaseResolver
type Integer, null: false
extras [:ast_node]
def resolve(ast_node:)
object.value + ast_node.name.size
end
end
class Resolver5 < Resolver4
end
class Resolver6 < Resolver1
type Integer, null: false
def resolve
self.class.complexity
end
end
class Resolver7 < Resolver6
complexity 2
end
class Resolver8 < Resolver7
end
class PrepResolver1 < BaseResolver
argument :int, Integer, required: true
undef_method :load_int
def load_int(i)
i * 10
end
type Integer, null: false
def resolve(int:)
int
end
private
def check_for_magic_number(int)
if int == 13
raise GraphQL::ExecutionError, "13 is unlucky!"
elsif int > 99
raise GraphQL::UnauthorizedError, "Top secret big number: #{int}"
else
int
end
end
end
class PrepResolver2 < PrepResolver1
def load_int(i)
LazyBlock.new {
super - 35
}
end
end
class PrepResolver3 < PrepResolver1
type Integer, null: true
def load_int(i)
check_for_magic_number(i)
end
end
class PrepResolver4 < PrepResolver3
def load_int(i)
LazyBlock.new {
super
}
end
end
class PrepResolver5 < PrepResolver1
type Integer, null: true
def ready?(int:)
check_for_magic_number(int)
end
end
class PrepResolver6 < PrepResolver5
def ready?(**args)
LazyBlock.new {
super
}
end
end
class PrepResolver7 < GraphQL::Schema::Mutation
argument :int, Integer, required: true
field :errors, [String], null: true
field :int, Integer, null: true
def ready?(int:)
if int == 13
return false, { errors: ["Bad number!"] }
else
true
end
end
def resolve(int:)
{ int: int }
end
end
module HasValue
include GraphQL::Schema::Interface
field :value, Integer, null: false
def self.resolve_type(obj, ctx)
if obj.is_a?(Integer)
IntegerWrapper
else
raise "Unexpected: #{obj.inspect}"
end
end
end
class IntegerWrapper < GraphQL::Schema::Object
implements HasValue
field :value, Integer, null: false, method: :object
end
class PrepResolver9 < BaseResolver
argument :int_id, ID, required: true, loads: HasValue
# Make sure the lazy object is resolved properly:
type HasValue, null: false
def object_from_id(type, id, ctx)
# Make sure a lazy object is handled appropriately
LazyBlock.new {
# Make sure that the right type ends up here
id.to_i + type.graphql_name.length
}
end
def resolve(int:)
int * 3
end
end
class PrepResolver9Array < BaseResolver
argument :int_ids, [ID], required: true, loads: HasValue, as: :ints
# Make sure the lazy object is resolved properly:
type [HasValue], null: false
def object_from_id(type, id, ctx)
# Make sure a lazy object is handled appropriately
LazyBlock.new {
# Make sure that the right type ends up here
id.to_i + type.graphql_name.length
}
end
def resolve(ints:)
ints.map { |int| int * 3}
end
end
class PrepResolver10 < BaseResolver
argument :int1, Integer, required: true
argument :int2, Integer, required: true, as: :integer_2
type Integer, null: true
def authorized?(int1:, integer_2:)
if int1 + integer_2 > context[:max_int]
raise GraphQL::ExecutionError, "Inputs too big"
elsif context[:min_int] && (int1 + integer_2 < context[:min_int])
false
else
true
end
end
def resolve(int1:, integer_2:)
int1 + integer_2
end
end
class PrepResolver11 < PrepResolver10
def authorized?(int1:, integer_2:)
LazyBlock.new { super(int1: int1 * 2, integer_2: integer_2) }
end
end
class PrepResolver12 < GraphQL::Schema::Mutation
argument :int1, Integer, required: true
argument :int2, Integer, required: true
field :error_messages, [String], null: true
field :value, Integer, null: true
def authorized?(int1:, int2:)
if int1 + int2 > context[:max_int]
return false, { error_messages: ["Inputs must be less than #{context[:max_int]} (but you provided #{int1 + int2})"] }
else
true
end
end
def resolve(int1:, int2:)
{ value: int1 + int2 }
end
end
class PrepResolver13 < PrepResolver12
def authorized?(int1:, int2:)
# Increment the numbers so we can be sure they're passing through here
LazyBlock.new { super(int1: int1 + 1, int2: int2 + 1) }
end
end
class PrepResolver14 < GraphQL::Schema::RelayClassicMutation
field :number, Integer, null: false
def authorized?
true
end
def resolve
{ number: 1 }
end
end
class MutationWithNullableLoadsArgument < GraphQL::Schema::Mutation
argument :label_id, ID, required: false, loads: HasValue
argument :label_ids, [ID], required: false, loads: HasValue
field :inputs, String, null: false
def resolve(**inputs)
{
inputs: JSON.dump(inputs)
}
end
end
class MutationWithRequiredLoadsArgument < GraphQL::Schema::Mutation
argument :label_id, ID, required: true, loads: HasValue
field :inputs, String, null: false
def resolve(**inputs)
{
inputs: JSON.dump(inputs)
}
end
end
class Mutation < GraphQL::Schema::Object
field :mutation_with_nullable_loads_argument, mutation: MutationWithNullableLoadsArgument
field :mutation_with_required_loads_argument, mutation: MutationWithRequiredLoadsArgument
end
class Query < GraphQL::Schema::Object
class CustomField < GraphQL::Schema::Field
def resolve_field(*args)
value = super
if @name == "resolver3"
value << -1
end
value
end
end
field_class(CustomField)
field :resolver_1, resolver: Resolver1
field :resolver_2, resolver: Resolver2
field :resolver_3, resolver: Resolver3
field :resolver_3_again, resolver: Resolver3, description: "field desc"
field :resolver_4, "Positional description", resolver: Resolver4
field :resolver_5, resolver: Resolver5
field :resolver_6, resolver: Resolver6
field :resolver_7, resolver: Resolver7
field :resolver_8, resolver: Resolver8
field :prep_resolver_1, resolver: PrepResolver1
field :prep_resolver_2, resolver: PrepResolver2
field :prep_resolver_3, resolver: PrepResolver3
field :prep_resolver_4, resolver: PrepResolver4
field :prep_resolver_5, resolver: PrepResolver5
field :prep_resolver_6, resolver: PrepResolver6
field :prep_resolver_7, resolver: PrepResolver7
field :prep_resolver_9, resolver: PrepResolver9
field :prep_resolver_9_array, resolver: PrepResolver9Array
field :prep_resolver_10, resolver: PrepResolver10
field :prep_resolver_11, resolver: PrepResolver11
field :prep_resolver_12, resolver: PrepResolver12
field :prep_resolver_13, resolver: PrepResolver13
field :prep_resolver_14, resolver: PrepResolver14
end
class Schema < GraphQL::Schema
query(Query)
mutation(Mutation)
lazy_resolve LazyBlock, :value
orphan_types IntegerWrapper
def object_from_id(id, ctx)
if id == "invalid"
nil
else
1
end
end
end
end
def exec_query(*args)
ResolverTest::Schema.execute(*args)
end
describe ".path" do
it "is the name" do
assert_equal "Resolver1", ResolverTest::Resolver1.path
end
it "is used for arguments and fields" do
assert_equal "Resolver1.value", ResolverTest::Resolver1.arguments["value"].path
assert_equal "PrepResolver7.int", ResolverTest::PrepResolver7.fields["int"].path
end
it "works on instances" do
r = ResolverTest::Resolver1.new(object: nil, context: nil)
assert_equal "Resolver1", r.path
end
end
it "gets initialized for each resolution" do
# State isn't shared between calls:
res = exec_query " { r1: resolver1(value: 1) r2: resolver1 }"
assert_equal [100, 1], res["data"]["r1"]
assert_equal [100, nil], res["data"]["r2"]
end
it "inherits type and arguments" do
res = exec_query " { r1: resolver2(value: 1, extraValue: 2) r2: resolver2(extraValue: 3) }"
assert_equal [100, 1, 2], res["data"]["r1"]
assert_equal [100, nil, 3], res["data"]["r2"]
end
it "uses the object's field_class" do
res = exec_query " { r1: resolver3(value: 1) r2: resolver3 }"
assert_equal [100, 1, -1], res["data"]["r1"]
assert_equal [100, nil, -1], res["data"]["r2"]
end
describe "resolve method" do
it "has access to the application object" do
res = exec_query " { resolver4 } ", root_value: OpenStruct.new(value: 4)
assert_equal 13, res["data"]["resolver4"]
end
it "gets extras" do
res = exec_query " { resolver4 } ", root_value: OpenStruct.new(value: 0)
assert_equal 9, res["data"]["resolver4"]
end
end
describe "extras" do
it "is inherited" do
res = exec_query " { resolver4 resolver5 } ", root_value: OpenStruct.new(value: 0)
assert_equal 9, res["data"]["resolver4"]
assert_equal 9, res["data"]["resolver5"]
end
end
describe "complexity" do
it "has default values" do
res = exec_query " { resolver6 } ", root_value: OpenStruct.new(value: 0)
assert_equal 1, res["data"]["resolver6"]
end
it "is inherited" do
res = exec_query " { resolver7 resolver8 } ", root_value: OpenStruct.new(value: 0)
assert_equal 2, res["data"]["resolver7"]
assert_equal 2, res["data"]["resolver8"]
end
end
describe "when applied to a field" do
it "gets the field's description" do
assert_nil ResolverTest::Schema.find("Query.resolver3").description
assert_equal "field desc", ResolverTest::Schema.find("Query.resolver3Again").description
assert_equal "Positional description", ResolverTest::Schema.find("Query.resolver4").description
end
it "gets the field's name" do
# Matching name:
assert ResolverTest::Schema.find("Query.resolver3")
# Mismatched name:
assert ResolverTest::Schema.find("Query.resolver3Again")
end
end
describe "preparing inputs" do
# Add assertions for a given field, assuming the behavior of `check_for_magic_number`
def add_error_assertions(field_name, description)
res = exec_query("{ int: #{field_name}(int: 13) }")
assert_nil res["data"].fetch("int"), "#{description}: no result for execution error"
assert_equal ["13 is unlucky!"], res["errors"].map { |e| e["message"] }, "#{description}: top-level error is added"
res = exec_query("{ int: #{field_name}(int: 200) }")
assert_nil res["data"].fetch("int"), "#{description}: No result for authorization error"
refute res.key?("errors"), "#{description}: silent auth failure (no top-level error)"
end
describe "ready?" do
it "can raise errors" do
res = exec_query("{ int: prepResolver5(int: 5) }")
assert_equal 50, res["data"]["int"]
add_error_assertions("prepResolver5", "ready?")
end
it "can raise errors in lazy sync" do
res = exec_query("{ int: prepResolver6(int: 5) }")
assert_equal 50, res["data"]["int"]
add_error_assertions("prepResolver6", "lazy ready?")
end
it "can return false and data" do
res = exec_query("{ int: prepResolver7(int: 13) { errors int } }")
assert_equal ["Bad number!"], res["data"]["int"]["errors"]
res = exec_query("{ int: prepResolver7(int: 213) { errors int } }")
assert_equal 213, res["data"]["int"]["int"]
end
end
describe "loading arguments" do
it "calls load methods and injects the return value" do
res = exec_query("{ prepResolver1(int: 5) }")
assert_equal 50, res["data"]["prepResolver1"], "The load multiplier was called"
end
it "supports lazy values" do
res = exec_query("{ prepResolver2(int: 5) }")
assert_equal 15, res["data"]["prepResolver2"], "The load multiplier was called"
end
it "supports raising GraphQL::UnauthorizedError and GraphQL::ExecutionError" do
res = exec_query("{ prepResolver3(int: 5) }")
assert_equal 5, res["data"]["prepResolver3"]
add_error_assertions("prepResolver3", "load_ hook")
end
it "supports raising errors from promises" do
res = exec_query("{ prepResolver4(int: 5) }")
assert_equal 5, res["data"]["prepResolver4"]
add_error_assertions("prepResolver4", "lazy load_ hook")
end
end
describe "validating arguments" do
describe ".authorized?" do
it "can raise an error to halt" do
res = exec_query("{ prepResolver10(int1: 5, int2: 6) }", context: { max_int: 9 })
assert_equal ["Inputs too big"], res["errors"].map { |e| e["message"] }
res = exec_query("{ prepResolver10(int1: 5, int2: 6) }", context: { max_int: 90 })
assert_equal 11, res["data"]["prepResolver10"]
end
it "uses the argument name provided in `as:`" do
res = exec_query("{ prepResolver10(int1: 5, int2: 6) }", context: { max_int: 90 })
assert_equal 11, res["data"]["prepResolver10"]
end
it "can return a lazy object" do
# This is too big because it's modified in the overridden authorized? hook:
res = exec_query("{ prepResolver11(int1: 3, int2: 5) }", context: { max_int: 9 })
assert_equal ["Inputs too big"], res["errors"].map { |e| e["message"] }
res = exec_query("{ prepResolver11(int1: 3, int2: 5) }", context: { max_int: 90 })
assert_equal 8, res["data"]["prepResolver11"]
end
it "can return data early" do
res = exec_query("{ prepResolver12(int1: 9, int2: 5) { errorMessages } }", context: { max_int: 9 })
assert_equal ["Inputs must be less than 9 (but you provided 14)"], res["data"]["prepResolver12"]["errorMessages"]
# This works
res = exec_query("{ prepResolver12(int1: 2, int2: 5) { value } }", context: { max_int: 9 })
assert_equal 7, res["data"]["prepResolver12"]["value"]
end
it "can return data early in a promise" do
# This is too big because it's modified in the overridden authorized? hook:
res = exec_query("{ prepResolver13(int1: 4, int2: 4) { errorMessages } }", context: { max_int: 9 })
assert_equal ["Inputs must be less than 9 (but you provided 10)"], res["data"]["prepResolver13"]["errorMessages"]
# This works
res = exec_query("{ prepResolver13(int1: 2, int2: 5) { value } }", context: { max_int: 9 })
assert_equal 7, res["data"]["prepResolver13"]["value"]
end
it "can return false to halt" do
str = <<-GRAPHQL
{
prepResolver10(int1: 5, int2: 10)
prepResolver11(int1: 3, int2: 5)
}
GRAPHQL
res = exec_query(str, context: { max_int: 100, min_int: 20 })
assert_equal({ "prepResolver10" => nil, "prepResolver11" => nil }, res["data"])
end
it "works with no arguments for RelayClassicMutation" do
res = exec_query("{ prepResolver14(input: {}) { number } }")
assert_equal 1, res["data"]["prepResolver14"]["number"]
end
end
end
describe "Loading inputs" do
it "calls object_from_id" do
res = exec_query('{ prepResolver9(intId: "5") { value } }')
# (5 + 8) * 3
assert_equal 39, res["data"]["prepResolver9"]["value"]
end
it "supports loading array of ids" do
res = exec_query('{ prepResolver9Array(intIds: ["1", "10", "100"]) { value } }')
# (1 + 8) * 3
# (10 + 8) * 3
# (100 + 8) * 3
assert_equal [27, 54, 324], res["data"]["prepResolver9Array"].map { |v| v["value"] }
end
it "preserves `nil` when nullable argument is provided `null`" do
res = exec_query("mutation { mutationWithNullableLoadsArgument(labelId: null) { inputs } }")
assert_equal nil, res["errors"]
assert_equal '{"label":null}', res["data"]["mutationWithNullableLoadsArgument"]["inputs"]
end
it "preserves `nil` when nullable list argument is provided `null`" do
res = exec_query("mutation { mutationWithNullableLoadsArgument(labelIds: null) { inputs } }")
assert_equal nil, res["errors"]
assert_equal '{"labels":null}', res["data"]["mutationWithNullableLoadsArgument"]["inputs"]
end
it "omits omitted nullable argument" do
res = exec_query("mutation { mutationWithNullableLoadsArgument { inputs } }")
assert_equal nil, res["errors"]
assert_equal "{}", res["data"]["mutationWithNullableLoadsArgument"]["inputs"]
end
it "returns an error when nullable argument is provided an invalid value" do
res = exec_query('mutation { mutationWithNullableLoadsArgument(labelId: "invalid") { inputs } }')
assert res["errors"]
assert_equal 'No object found for `labelId: "invalid"`', res["errors"][0]["message"]
end
it "returns an error when a non-nullable argument is provided an invalid value" do
res = exec_query('mutation { mutationWithRequiredLoadsArgument(labelId: "invalid") { inputs } }')
assert res["errors"]
assert_equal 'No object found for `labelId: "invalid"`', res["errors"][0]["message"]
end
end
end
end
| 31.193496 | 127 | 0.615826 |
4a101ac103f5264d89ba3d799f1dca05cf5d7f4b | 577 | module WoodShop
ActiveAdmin.register AdminUser, as: 'AdminUser' do
permit_params :email, :password, :password_confirmation
index do
selectable_column
id_column
column :email
column :current_sign_in_at
column :sign_in_count
column :created_at
actions
end
filter :email
filter :current_sign_in_at
filter :sign_in_count
filter :created_at
form do |f|
f.inputs do
f.input :email
f.input :password
f.input :password_confirmation
end
f.actions
end
end
end | 19.233333 | 59 | 0.646447 |
f8f4d6c66563e00d34feb64906b878264710ba04 | 3,191 | require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "sprockets/railtie"
require "rails/test_unit/railtie"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Captar
class Application < Rails::Application
# Heroku
config.assets.initialize_on_precompile = false
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.autoload_paths += %W(#{config.root}/app/exhibits)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.default_locale = :'pt-BR'
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
# config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
| 43.121622 | 100 | 0.738013 |
623e26f5efdc05055c44fbe4bee2aecf5e6efc89 | 499 | # Get twilio-ruby from twilio.com/docs/ruby/install
require 'rubygems' # This line not needed for ruby > 1.8
require 'twilio-ruby'
# Get your Account Sid and Auth Token from twilio.com/user/account
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
@client = Twilio::REST::Client.new account_sid, auth_token
caller_id = @client.account.outgoing_caller_ids.create(
friendly_name: 'My Home Phone Number',
phone_number: '+14158675309'
)
puts caller_id.phone_number
| 33.266667 | 66 | 0.787575 |
01f977b73d187d4d437fc5d9bd31649e1e786122 | 324 | #!/usr/bin/env ruby
require 'qiniu'
Qiniu.establish_connection! access_key: 'Access_Key',
secret_key: 'Secret_Key'
# 你要测试的空间, 并且这个key在你空间中存在
bucket = 'Bucket_Name'
key = 'ruby-logo.png'
# 删除资源
success = Qiniu.delete(
bucket, # 存储空间
key # 资源名
)
puts success # 返回布尔值表示是否成功
| 17.052632 | 53 | 0.623457 |
bfca4475456c67ee6820bd6456c368c857ed50e1 | 790 | require "spec_helper"
require "controllers/rpt/shared_examples_for_reports"
describe Rpt::OutstandingBalanceReportsController do
render_views
it_behaves_like "a report", %w[html]
describe "#show" do
it "shows users with non-zero balances" do
sign_in create :admin
# One user paid too little
a1 = create :attendee
a1.plans << create(:plan)
paid_too_little = a1.user
# One user paid too much
a2 = create :attendee
paid_too_much = a2.user
paid_too_much.transactions << create(:tr_sale)
# The other paid juuuuuust right
a3 = create :attendee
paid_exactly = a3.user
get :show, :year => Time.now.year
assigns(:users).map(&:id).should =~ [paid_too_little.id, paid_too_much.id]
end
end
end
| 23.939394 | 80 | 0.66962 |
ab0f255e70ac28ea311ffa215515984161531cc1 | 861 | require File.dirname(__FILE__) + '/../../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
describe "Breakpoint#disable" do
before :each do
@cm = BreakpointSpecs::Debuggee.instance_method(:simple_method).compiled_method
@cm.bytecodes = BreakpointSpecs::Debuggee.orig_bytecodes.dup
@orig = @cm.bytecodes.decode[4]
@bp = GlobalBreakpoint.new(@cm,8) {} # The fifth instruction has an IP of 8
@bp.install
end
it "removes the yield_debugger instruction at the location specified by @ip" do
dc = @cm.bytecodes.decode
dc[4].first.should == :yield_debugger
@bp.remove
dc = @cm.bytecodes.decode
dc[4].first.should_not == :yield_debugger
dc[4].should == @orig
end
it "sets the enabled flag to false" do
@bp.installed?.should == true
@bp.remove
@bp.installed?.should == false
end
end | 30.75 | 83 | 0.692218 |
03f0c7a1289832b75ca606de1ebcaa79d1cfc306 | 402 | require "rails_helper"
RSpec.describe 'login_info/new', :type => :view do
it 'renders the new template for sign up' do
assign(:login_info, LoginInfo.create!({
:email => "[email protected]",
:password => "password",
:password_confirmation => "password"
}))
render
# rendered.should match '[email protected]'
# rendered.should match 'password'
end
end | 25.125 | 51 | 0.661692 |
b9ca402722169eb864fc88790de472131371a6d8 | 3,226 | class User < ApplicationRecord
has_many :microposts, dependent: :destroy
has_many :active_relationships, class_name: "Relationship",
foreign_key: "follower_id",
dependent: :destroy
has_many :passive_relationships, class_name: "Relationship",
foreign_key: "followed_id",
dependent: :destroy
has_many :following, through: :active_relationships, source: :followed
has_many :followers, through: :passive_relationships, source: :follower
attr_accessor :remember_token, :activation_token, :reset_token
before_save :downcase_email
before_create :create_activation_digest
validates :name, presence:true, length:{maximum:50}
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-]+(\.[a-z\d\-]+)*\.[a-z]+\z/i
validates :email, presence: true, length: {maximum:255}, format: { with: VALID_EMAIL_REGEX }, uniqueness: true
has_secure_password
validates :password, presence: true, length: { minimum: 6 }, allow_nil: true
class << self
# 渡された文字列のハッシュ値を返す
def digest(string)
cost = ActiveModel::SecurePassword.min_cost ? BCrypt::Engine::MIN_COST:
BCrypt::Engine.cost
BCrypt::Password.create(string, cost: cost)
end
# ランダムなトークンを返す
def new_token
SecureRandom.urlsafe_base64
end
end
# 永続セッションのためにユーザーをデータベースに記憶する
def remember
self.remember_token = User.new_token
update_attribute(:remember_digest, User.digest(remember_token))
end
# トークンがダイジェストと一致したらtrueを返す
def authenticated?(attribute, token)
digest = send("#{attribute}_digest")
return false if digest.nil?
BCrypt::Password.new(digest).is_password?(token)
end
# ユーザーのログイン情報を破棄する
def forget
update_attribute(:remember_digest,nil)
end
# アカウントを有効にする
def activate
update_columns(activated: true, activated_at: Time.zone.now)
end
# 有効化用のメールを送信する
def send_activation_email
UserMailer.account_activation(self).deliver_now
end
# パスワード再設定の属性を設定する
def create_reset_digest
self.reset_token = User.new_token
update_columns(reset_digest: User.digest(reset_token), reset_sent_at: Time.zone.now )
end
# パスワード再設定のメールを送信する
def send_password_reset_email
UserMailer.password_reset(self).deliver_now
end
# パスワード再設定の期限が切れている場合はtrueを返す
def password_reset_expired?
reset_sent_at < 2.hours.ago
end
# ユーザーのステータスフィードを返す
def feed
following_ids = "SELECT followed_id FROM relationships
WHERE follower_id = :user_id"
Micropost.where("user_id IN (#{following_ids})
OR user_id = :user_id", user_id: id)
end
# ユーザーをフォローする
def follow(other_user)
following << other_user
end
# ユーザーをフォロー解除する
def unfollow(other_user)
active_relationships.find_by(followed_id: other_user.id).destroy
end
# 現在のユーザーがフォローしてたらtrueを返す
def following?(other_user)
following.include?(other_user)
end
private
# メールアドレスをすべて小文字にする
def downcase_email
email.downcase!
end
# 有効化トークンとダイジェストを作成および代入する
def create_activation_digest
self.activation_token = User.new_token
self.activation_digest = User.digest(activation_token)
end
end
| 27.338983 | 112 | 0.703658 |
387b482731dde65ecad5f9bf4ba356ac49c67c81 | 5,173 | # This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require 'simplecov'
SimpleCov.start 'rails'
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# Load all 'factories' and clear action mailer deliveries
config.before(:all) do
FactoryBot.definition_file_paths << File.join(File.dirname(__FILE__), 'factories')
FactoryBot.reload
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# # This allows you to limit a spec run to individual examples or groups
# # you care about by tagging them with `:focus` metadata. When nothing
# # is tagged with `:focus`, all examples get run. RSpec also provides
# # aliases for `it`, `describe`, and `context` that include `:focus`
# # metadata: `fit`, `fdescribe` and `fcontext`, respectively.
# config.filter_run_when_matching :focus
#
# # Allows RSpec to persist some state between runs in order to support
# # the `--only-failures` and `--next-failure` CLI options. We recommend
# # you configure your source control system to ignore this file.
# config.example_status_persistence_file_path = "spec/examples.txt"
#
# # Limits the available syntax to the non-monkey patched syntax that is
# # recommended. For more details, see:
# # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
# config.disable_monkey_patching!
#
# # Many RSpec users commonly either run the entire suite or an individual
# # file, and it's useful to allow more verbose output when running an
# # individual spec file.
# if config.files_to_run.one?
# # Use the documentation formatter for detailed output,
# # unless a formatter has already been configured
# # (e.g. via a command-line flag).
# config.default_formatter = "doc"
# end
#
# # Print the 10 slowest examples and example groups at the
# # end of the spec run, to help surface which specs are running
# # particularly slow.
# config.profile_examples = 10
#
# # Run specs in random order to surface order dependencies. If you find an
# # order dependency and want to debug it, you can fix the order by providing
# # the seed, which is printed after each run.
# # --seed 1234
# config.order = :random
#
# # Seed global randomization in this process using the `--seed` CLI option.
# # Setting this allows you to use `--seed` to deterministically reproduce
# # test failures related to randomization by passing the same `--seed` value
# # as the one that triggered the failure.
# Kernel.srand config.seed
end
| 49.740385 | 96 | 0.718539 |
03112d360e43301f090c8fc7efaac34b9ea238bc | 556 | # frozen_string_literal: true
class StringDictionary
attr_reader :sentence, :dictionary
def initialize(sentence, dictionary)
@sentence = sentence
@dictionary = dictionary
end
def substrings
sentence.downcase!
dictionary.each_with_object(Hash.new(0)) do |word, hash|
hash[word] = count_word_in_sentence(word) if word_include_in_sentence?(word)
end
end
private
def word_include_in_sentence?(word)
sentence.include?(word)
end
def count_word_in_sentence(word)
sentence.scan(/#{word}/).count
end
end
| 19.172414 | 82 | 0.726619 |
e965e768c968ba9078cbce1175407f4aba09a284 | 3,116 | class DnscryptProxy < Formula
desc "Secure communications between a client and a DNS resolver"
homepage "https://github.com/jedisct1/dnscrypt-proxy"
url "https://github.com/jedisct1/dnscrypt-proxy/archive/2.0.22.tar.gz"
sha256 "ac8ad326b6da47bb1e515d29a354511a8dc9a5ebfcf4be816b6791532d02d564"
head "https://github.com/jedisct1/dnscrypt-proxy.git"
bottle do
cellar :any_skip_relocation
sha256 "f82d05fd2ff6ce1d80b4a2511a832d3745dcde7b1287b2c11a6297df37d50c3a" => :mojave
sha256 "9921e47dd8ad7e69f9f8426a99e2af65c35e1822415a25af88ece16809a7effa" => :high_sierra
sha256 "9f5f908aa7638d5f614c834ace6180a7a1cdca138d00053acc801e5b88e1cf46" => :sierra
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
prefix.install_metafiles
dir = buildpath/"src/github.com/jedisct1/dnscrypt-proxy"
dir.install buildpath.children
cd dir/"dnscrypt-proxy" do
system "go", "build", "-ldflags", "-X main.version=#{version}", "-o",
sbin/"dnscrypt-proxy"
pkgshare.install Dir["example*"]
etc.install pkgshare/"example-dnscrypt-proxy.toml" => "dnscrypt-proxy.toml"
end
end
def caveats; <<~EOS
After starting dnscrypt-proxy, you will need to point your
local DNS server to 127.0.0.1. You can do this by going to
System Preferences > "Network" and clicking the "Advanced..."
button for your interface. You will see a "DNS" tab where you
can click "+" and enter 127.0.0.1 in the "DNS Servers" section.
By default, dnscrypt-proxy runs on localhost (127.0.0.1), port 53,
balancing traffic across a set of resolvers. If you would like to
change these settings, you will have to edit the configuration file:
#{etc}/dnscrypt-proxy.toml
To check that dnscrypt-proxy is working correctly, open Terminal and enter the
following command. Replace en1 with whatever network interface you're using:
sudo tcpdump -i en1 -vvv 'port 443'
You should see a line in the result that looks like this:
resolver.dnscrypt.info
EOS
end
plist_options :startup => true
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>KeepAlive</key>
<true/>
<key>RunAtLoad</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/dnscrypt-proxy</string>
<string>-config</string>
<string>#{etc}/dnscrypt-proxy.toml</string>
</array>
<key>UserName</key>
<string>root</string>
<key>StandardErrorPath</key>
<string>/dev/null</string>
<key>StandardOutPath</key>
<string>/dev/null</string>
</dict>
</plist>
EOS
end
test do
config = "-config #{etc}/dnscrypt-proxy.toml"
output = shell_output("#{sbin}/dnscrypt-proxy #{config} -list 2>&1")
assert_match "public-resolvers.md] loaded", output
end
end
| 34.241758 | 106 | 0.673941 |
1c5848ec63f83738dbaf7dfed8a85c34ad52a439 | 515 | require 'bio-ucsc'
describe "Bio::Ucsc::Hg19::WgEncodeBroadHistoneNhekH3k4me3StdPk" do
describe "#find_by_interval" do
context "given range chr1:1-800,000" do
it 'returns a record (r.chrom == "chr1")' do
Bio::Ucsc::Hg19::DBConnection.default
Bio::Ucsc::Hg19::DBConnection.connect
i = Bio::GenomicInterval.parse("chr1:1-800,000")
r = Bio::Ucsc::Hg19::WgEncodeBroadHistoneNhekH3k4me3StdPk.find_by_interval(i)
r.chrom.should == "chr1"
end
end
end
end
| 30.294118 | 85 | 0.666019 |
1892203d4ac41a6ca14f47e0f16bc00979bc204d | 5,477 | require 'spec_helper'
require 'rack'
RSpec.describe Warden::Cognito::TokenAuthenticatableStrategy do
include_context 'fixtures'
include_context 'configuration'
let(:jwt_token) { 'FakeJwtToken' }
let(:authorization_header) { { 'HTTP_AUTHORIZATION' => "Bearer #{jwt_token}" } }
let(:headers) { authorization_header }
let(:path) { '/v1/resource' }
let(:env) { Rack::MockRequest.env_for(path, method: 'GET').merge(headers) }
let(:issuer) { "https://cognito-idp.#{region}.amazonaws.com/#{pool_id}" }
let(:decoded_token) do
[
{
'sub' => 'CognitoUserId',
'iss' => issuer
}
]
end
let(:client) { double 'Client' }
subject(:strategy) { described_class.new(env) }
before do
allow(Aws::CognitoIdentityProvider::Client).to receive(:new).and_return client
allow(JWT).to receive(:decode).with(jwt_token, any_args).and_return(decoded_token)
allow(strategy).to receive(:jwks).and_return []
end
describe '.valid?' do
it 'grab the token from the Authorization header' do
expect(JWT).to receive(:decode).with(jwt_token, nil, true, any_args)
strategy.valid?
end
context 'with a token issued by another entity' do
before { allow(JWT).to receive(:decode).with(jwt_token, nil, true, any_args).and_raise(JWT::InvalidIssuerError) }
it 'returns false' do
expect(strategy.valid?).to be_falsey
end
end
context 'with a token issued by Cognito' do
it 'returns true' do
expect(strategy.valid?).to be_truthy
end
context 'expired' do
before { allow(JWT).to receive(:decode).with(jwt_token, nil, true, any_args).and_raise(JWT::ExpiredSignature) }
it 'returns true' do
expect(strategy.valid?).to be_truthy
end
end
end
context 'with multiple pools configured' do
let(:client_id_pool_a) { 'AWS Cognito Client ID Specific Pool' }
let(:user_pool_configurations) do
{
pool_a: { region: region, pool_id: pool_id, client_id: client_id_pool_a },
"#{pool_identifier}": { region: region, pool_id: pool_id, client_id: client_id }
}
end
let(:headers) { authorization_header.merge({ 'HTTP_X_AUTHORIZATION_POOL_IDENTIFIER' => specified_pool }) }
context 'when specified a configured pool' do
let(:specified_pool) { pool_identifier }
it 'returns true' do
expect(strategy.valid?).to be_truthy
end
context 'when the pool is not configured' do
let(:specified_pool) { 'Non existing/configured pool' }
it 'return false' do
expect(strategy.valid?).to be_falsey
end
end
end
context 'when no pool is specified' do
let(:specified_pool) { nil }
context 'when one issuer matches' do
it 'returns true' do
expect(strategy.valid?).to be_truthy
end
end
context 'when no issuer matches' do
let(:issuer) { 'http://google_issued_token.url' }
it 'returns false' do
expect(strategy.valid?).to be_falsey
end
end
end
end
end
describe '.authenticate' do
it 'grab the token from the Authorization header' do
expect(JWT).to receive(:decode).with(jwt_token, nil, true, any_args)
strategy.valid?
end
context 'with an expired token' do
before { allow(JWT).to receive(:decode).with(jwt_token, nil, true, any_args).and_raise(JWT::ExpiredSignature) }
it 'fails and halts all authentication strategies' do
expect(strategy).to receive(:fail!).with(:token_expired)
strategy.authenticate!
end
end
context 'with a valid token' do
before { allow(client).to receive(:get_user).and_return cognito_user }
context 'referencing an existing (local) user' do
it 'succeeds with the user instance' do
expect(config.user_repository).to receive(:find_by_cognito_attribute).with(local_identifier,
pool_identifier).and_call_original
expect(strategy).to receive(:success!).with(user)
strategy.authenticate!
end
end
context 'referencing a new user' do
before do
config.user_repository = nil_user_repository
end
it 'calls the `after_local_user_not_found` callback' do
expect(config.after_local_user_not_found).to receive(:call).with(cognito_user,
pool_identifier).and_call_original
strategy.authenticate!
end
context 'with `after_local_user_not_found` returning nil' do
before do
config.after_local_user_not_found = Fixtures::Callback.after_user_local_not_found_nil
end
it 'fails! with :unknown_user' do
expect(strategy).to receive(:fail!).with(:unknown_user)
strategy.authenticate!
end
end
context 'with `after_local_user_not_found` returning a user' do
before do
config.after_local_user_not_found = Fixtures::Callback.after_user_local_not_found_user
end
it 'success! with the given user' do
expect(strategy).to receive(:success!).with(user)
strategy.authenticate!
end
end
end
end
end
end
| 32.60119 | 119 | 0.624429 |
62f036b0fe85b15824407d15e607cd3fbfd4c458 | 362 | module TwoFactorAuthentication
class AuthAppPolicy
def initialize(user)
@user = user
end
def configured?
user&.auth_app_configurations&.any?
end
def enabled?
configured?
end
def visible?
true
end
def enrollable?
available? && !enabled?
end
private
attr_reader :user
end
end
| 12.928571 | 41 | 0.610497 |
f7bd137e5ec5ae7f92ca376700247fb0eceebe68 | 2,882 | require 'test_helper'
class LocaleTest < ActiveSupport::TestCase
fixtures :all
test "turning locale without nested phrases into a hash" do
assert_equal({ "se" => { "hello_world" => "Hejsan Verdon" } }, tolk_locales(:se).to_hash)
end
test "turning locale with nested phrases into a hash" do
assert_equal({ "en" => {
"hello_world" => "Hello World",
"nested" => {
"hello_world" => "Nested Hello World",
"hello_country" => "Nested Hello Country"
},
"number" => {
"human" => {
"format" => {
"precision" => 1
}
},
"currency" => {
"format" => {
"significant" => false
}
}
}
}}, tolk_locales(:en).to_hash)
end
test "phrases without translations" do
assert tolk_locales(:en).phrases_without_translation.include?(tolk_phrases(:cozy))
end
test "searching phrases without translations" do
assert !tolk_locales(:en).search_phrases_without_translation("cozy").include?(tolk_phrases(:hello_world))
end
test "paginating phrases without translations" do
Tolk::Phrase.paginates_per(2)
locale = tolk_locales(:se)
page1 = locale.phrases_without_translation
assert_equal [4, 3], page1.map(&:id)
page2 = locale.phrases_without_translation(2)
assert_equal [2, 6], page2.map(&:id)
page3 = locale.phrases_without_translation(3)
assert_equal [5], page3.map(&:id)
page4 = locale.phrases_without_translation(4)
assert page4.blank?
end
test "paginating phrases with translations" do
Tolk::Phrase.paginates_per(5)
locale = tolk_locales(:en)
page1 = locale.phrases_with_translation
assert_equal [1, 3, 2, 6, 5], page1.map(&:id)
page2 = locale.phrases_with_translation(2)
assert page2.blank?
end
test "counting missing translations" do
assert_equal 2, tolk_locales(:da).count_phrases_without_translation
assert_equal 5, tolk_locales(:se).count_phrases_without_translation
end
test "dumping all locales to yml" do
Tolk::Locale.primary_locale_name = 'en'
Tolk::Locale.primary_locale(true)
begin
FileUtils.mkdir_p(Rails.root.join("../../tmp/locales"))
Tolk::Locale.dump_all(Rails.root.join("../../tmp/locales"))
%w( da se ).each do |locale|
assert_equal \
File.read(Rails.root.join("../locales/basic/#{locale}.yml")),
File.read(Rails.root.join("../../tmp/locales/#{locale}.yml"))
end
# Make sure dump doesn't generate en.yml
assert ! File.exist?(Rails.root.join("../../tmp/locales/en.yml"))
ensure
FileUtils.rm_rf(Rails.root.join("../../tmp/locales"))
end
end
test "human language name" do
assert_equal 'English', tolk_locales(:en).language_name
assert_equal 'pirate', Tolk::Locale.new(:name => 'pirate').language_name
end
end
| 28.82 | 109 | 0.645732 |
b913a1ed11e2d7fbaad7e6757d16065debf782be | 268 | cask 'mixxx' do
version '2.1.0'
sha256 '9c5b8d49939ee2591245f015602256bb258089ce5225eb9adde46298028d920a'
url "https://downloads.mixxx.org/mixxx-#{version}/mixxx-#{version}-osxintel.dmg"
name 'Mixxx'
homepage 'https://www.mixxx.org/'
app 'Mixxx.app'
end
| 24.363636 | 82 | 0.738806 |
edb07309c3ef2e9ebea7c5c24535f07fe9ca5261 | 175 | class AddDeletedAtToMinistries < ActiveRecord::Migration[5.0]
def change
add_column :ministries, :deleted_at, :datetime
add_index :ministries, :deleted_at
end
end
| 25 | 61 | 0.765714 |
089eecbc98fe1cd9169f3e80b45b66efe2b5d369 | 1,518 | class ScrapeItemWorker
include Sidekiq::Worker
def perform(url)
browser = Watir::Browser.new
browser.goto url
browser.div(id: 'packing-header').click
sleep 0.1
code_wrapper = browser.div(id: 'preplist_aurora').div(class: 'drug__list-filterresult').ul.text
codes = code_wrapper.scan(/EAN\s\d*/).join(' ').gsub('EAN', '')
if codes.length.positive?
f = File.open("#{codes}.html", 'w')
f.write("<!DOCTYPE html>\n<html lang='en'>\n<head>\n<meta charset='UTF-8'>\n<title>Title</title>\n</head>\n<body>\n")
f.write("<h1>Штрихкоды: #{codes.gsub(' ', ', ')}</h1>\n")
h1 = browser.h1.text.to_s
f.write("<h2>Название: #{h1}</h2>\n")
content = browser.div(class: 'drug__content')
h2s = content.h2s
h2s.each do |h2|
break if h2.text == 'Цены в аптеках Москвы'
next if h2.text == 'Содержание'
break if h2.text == 'Синонимы нозологических групп'
p h2.text
f.write("<h2>#{h2.text}</h2>\n")
next_tag = ''
i = 0
while next_tag != 'h2'
inner = h2.next_sibling(index: i)
break if inner.class_name.include? 'noprint'
f.write("<#{inner.tag_name}>#{inner.inner_html}</#{inner.tag_name}>\n")
i += 1
next_tag = h2.next_sibling(index: i + 1).tag_name
end
end
f.write("\n</body>\n</html>")
browser.close
Drug.create(name: h1, code: codes)
else
puts "Нет штрихкодов"
browser.close
end
end
end
| 32.297872 | 123 | 0.575758 |
7a0f437e0d64f29c422a7dfafe3beb218f274b8e | 6,134 | Rails.application.routes.draw do
# AUTH STARTS
match 'auth/:provider/callback', to: 'home#callback', via: [:get, :post]
mount_devise_token_auth_for 'User', at: 'auth', controllers: {
confirmations: 'devise_overrides/confirmations',
passwords: 'devise_overrides/passwords',
sessions: 'devise_overrides/sessions',
token_validations: 'devise_overrides/token_validations'
}, via: [:get, :post]
root to: 'dashboard#index'
get '/app', to: 'dashboard#index'
get '/app/*params', to: 'dashboard#index'
get '/app/accounts/:account_id/settings/inboxes/new/twitter', to: 'dashboard#index', as: 'app_new_twitter_inbox'
get '/app/accounts/:account_id/settings/inboxes/new/:inbox_id/agents', to: 'dashboard#index', as: 'app_twitter_inbox_agents'
resource :widget, only: [:show]
get '/api', to: 'api#index'
namespace :api, defaults: { format: 'json' } do
namespace :v1 do
# ----------------------------------
# start of account scoped api routes
resources :accounts, only: [:create, :show, :update], module: :accounts do
namespace :actions do
resource :contact_merge, only: [:create]
end
resources :agents, except: [:show, :edit, :new]
resources :callbacks, only: [] do
collection do
post :register_facebook_page
get :register_facebook_page
post :facebook_pages
post :reauthorize_page
end
end
resources :canned_responses, except: [:show, :edit, :new]
namespace :channels do
resource :twilio_channel, only: [:create]
end
resources :conversations, only: [:index, :create, :show] do
get 'meta', on: :collection
scope module: :conversations do
resources :messages, only: [:index, :create]
resources :assignments, only: [:create]
resources :labels, only: [:create, :index]
end
member do
post :toggle_status
post :toggle_typing_status
post :update_last_seen
end
end
resources :contacts, only: [:index, :show, :update, :create] do
scope module: :contacts do
resources :conversations, only: [:index]
end
end
resources :facebook_indicators, only: [] do
collection do
post :mark_seen
post :typing_on
post :typing_off
end
end
resources :inboxes, only: [:index, :create, :update, :destroy] do
post :set_agent_bot, on: :member
end
resources :inbox_members, only: [:create, :show], param: :inbox_id
resources :labels, only: [:index] do
collection do
get :most_used
end
end
resources :notifications, only: [:index, :update]
resource :notification_settings, only: [:show, :update]
# this block is only required if subscription via chargebee is enabled
resources :subscriptions, only: [:index] do
collection do
get :summary
end
end
resources :webhooks, except: [:show]
end
# end of account scoped api routes
# ----------------------------------
resource :profile, only: [:show, :update]
resource :notification_subscriptions, only: [:create]
resources :agent_bots, only: [:index]
namespace :widget do
resources :events, only: [:create]
resources :messages, only: [:index, :create, :update]
resources :conversations do
collection do
post :toggle_typing
end
end
resource :contact, only: [:update]
resources :inbox_members, only: [:index]
resources :labels, only: [:create, :destroy]
end
resources :webhooks, only: [] do
collection do
post :chargebee
end
end
end
namespace :v2 do
resources :accounts, only: [], module: :accounts do
resources :reports, only: [] do
collection do
get :account
end
member do
get :account_summary
end
end
end
end
end
namespace :twitter do
resource :authorization, only: [:create]
resource :callback, only: [:show]
end
namespace :twilio do
resources :callback, only: [:create]
end
# ----------------------------------------------------------------------
# Used in mailer templates
resource :app, only: [:index] do
resources :accounts do
resources :conversations, only: [:show]
end
end
# ----------------------------------------------------------------------
# Routes for social integrations
mount Facebook::Messenger::Server, at: 'bot'
get 'webhooks/twitter', to: 'api/v1/webhooks#twitter_crc'
post 'webhooks/twitter', to: 'api/v1/webhooks#twitter_events'
# ----------------------------------------------------------------------
# Routes for testing
resources :widget_tests, only: [:index] unless Rails.env.production?
# ----------------------------------------------------------------------
# Routes for external service verifications
get 'apple-app-site-association' => 'apple_app#site_association'
# ----------------------------------------------------------------------
# Internal Monitoring Routes
require 'sidekiq/web'
devise_for :super_admins, path: 'super_admin', controllers: { sessions: 'super_admin/devise/sessions' }
devise_scope :super_admin do
get 'super_admin/logout', to: 'super_admin/devise/sessions#destroy'
namespace :super_admin do
resources :users
resources :accounts
resources :account_users
resources :super_admins
resources :access_tokens
root to: 'users#index'
end
authenticated :super_admin do
mount Sidekiq::Web => '/monitoring/sidekiq'
end
end
# ---------------------------------------------------------------------
# Routes for swagger docs
get '/swagger/*path', to: 'swagger#respond'
get '/swagger', to: 'swagger#respond'
end
| 31.782383 | 126 | 0.564069 |
abe605ed2e31213bd9f095438cbacf6759f47d38 | 528 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require_relative '../../aws-sdk-core/spec/shared_spec_helper'
$:.unshift(File.expand_path('../../lib', __FILE__))
$:.unshift(File.expand_path('../../../aws-sdk-core/lib', __FILE__))
$:.unshift(File.expand_path('../../../aws-sigv4/lib', __FILE__))
require 'rspec'
require 'webmock/rspec'
require 'aws-sdk-mediapackage'
| 31.058824 | 74 | 0.723485 |
260159e0b3899af15ee8e6c9152028250f4b8d41 | 14,887 | # frozen_string_literal: true
ASCIIDOCTOR_TEST_DIR = File.absolute_path __dir__
ASCIIDOCTOR_LIB_DIR = ENV['ASCIIDOCTOR_LIB_DIR'] || (File.join ASCIIDOCTOR_TEST_DIR, '../lib')
require 'simplecov' if ENV['COVERAGE'] == 'true'
require File.join ASCIIDOCTOR_LIB_DIR, 'asciidoctor'
Dir.chdir Asciidoctor::ROOT_DIR
require 'nokogiri'
# NOTE rouge has all sorts of warnings we don't want to see, so silence them
proc do
old_verbose, $VERBOSE = $VERBOSE, nil
require 'rouge'
$VERBOSE = old_verbose
end.call
require 'socket'
require 'tempfile'
require 'tmpdir'
autoload :FileUtils, 'fileutils'
autoload :Pathname, 'pathname'
RE_XMLNS_ATTRIBUTE = / xmlns="[^"]+"/
RE_DOCTYPE = /\s*<!DOCTYPE (.*)/
require 'minitest/autorun'
# Minitest 4 doesn't have Minitest::Test
Minitest::Test = MiniTest::Unit::TestCase unless defined? Minitest::Test
class Minitest::Test
def jruby?
RUBY_ENGINE == 'jruby'
end
def self.jruby_9_1_windows?
RUBY_ENGINE == 'jruby' && windows? && (JRUBY_VERSION.start_with? '9.1.')
end
def jruby_9_1_windows?
Minitest::Test.jruby_9_1_windows?
end
def self.windows?
/mswin|msys|mingw/.match? RbConfig::CONFIG['host_os']
end
def windows?
Minitest::Test.windows?
end
def disk_root
%(#{windows? ? (Asciidoctor::ROOT_DIR.partition '/')[0] : ''}/)
end
def empty_document options = {}
options[:parse] ? (Asciidoctor::Document.new [], options).parse : (Asciidoctor::Document.new [], options)
end
def empty_safe_document options = {}
Asciidoctor::Document.new [], (options.merge safe: :safe)
end
def sample_doc_path name
unless (name = name.to_s).include? '.'
%w(adoc asciidoc txt).each do |ext|
if File.exist? fixture_path %(#{name}.#{ext})
name = %(#{name}.#{ext})
break
end
end
end
fixture_path name
end
def bindir
File.join Asciidoctor::ROOT_DIR, 'bin'
end
def testdir
ASCIIDOCTOR_TEST_DIR
end
def fixturedir
File.join testdir, 'fixtures'
end
def fixture_path name
File.join fixturedir, name
end
def example_document name, opts = {}
document_from_string (File.read (sample_doc_path name), mode: Asciidoctor::FILE_READ_MODE), opts
end
def xmlnodes_at_css css, content, count = nil
xmlnodes_at_path :css, css, content, count
end
def xmlnodes_at_xpath xpath, content, count = nil
xmlnodes_at_path :xpath, xpath, content, count
end
def xmlnodes_at_path type, path, content, count = nil
doc = xmldoc_from_string content
case type
when :xpath
namespaces = (doc.respond_to? :root) ? doc.root.namespaces : {}
results = doc.xpath %(#{path.sub '/', './'}), namespaces
when :css
results = doc.css path
end
count == 1 ? results.first : results
end
# Generate an xpath attribute matcher that matches a name in the class attribute
def contains_class name
%(contains(concat(' ', normalize-space(@class), ' '), ' #{name} '))
end
def assert_css css, content, count = nil
assert_path :css, css, content, count
end
def assert_xpath xpath, content, count = nil
assert_path :xpath, xpath, content, count
end
def assert_path type, path, content, count = nil
case type
when :xpath
type_name = 'XPath'
when :css
type_name = 'CSS'
end
results = xmlnodes_at_path type, path, content
if count == true || count == false
if count == results
assert true
else
flunk %(#{type_name} #{path} yielded #{results} rather than #{count} for:\n#{content})
end
elsif count && results.size != count
flunk %(#{type_name} #{path} yielded #{results.size} elements rather than #{count} for:\n#{content})
elsif count.nil? && results.empty?
flunk %(#{type_name} #{path} not found in:\n#{content})
else
assert true
end
end
def assert_include expected, actual
assert_includes actual, expected
end
def refute_include not_expected, actual
refute_includes actual, not_expected
end
def assert_message logger, severity, expected_message, kind = String, idx = nil
unless idx
assert_equal 1, logger.messages.size
idx = 0
end
message = logger.messages[idx]
assert_equal severity, message[:severity]
assert_kind_of kind, message[:message]
if kind == String
actual_message = message[:message]
else
refute_nil message[:message][:source_location]
actual_message = message[:message].inspect
end
if expected_message.start_with? '~'
assert_includes actual_message, expected_message[1..-1]
else
assert_equal expected_message, actual_message
end
end
def assert_messages logger, expected_messages
assert_equal expected_messages.size, logger.messages.size
expected_messages.each_with_index do |expected_message_details, idx|
severity, expected_message, kind = expected_message_details
assert_message logger, severity, expected_message, (kind || String), idx
end
end
def xmldoc_from_string content
if (content.start_with? '<?xml ') || (RE_XMLNS_ATTRIBUTE.match? content)
Nokogiri::XML::Document.parse content
elsif !(RE_DOCTYPE =~ content)
Nokogiri::HTML::DocumentFragment.parse content
elsif $1.start_with? 'html'
Nokogiri::HTML::Document.parse content
else
Nokogiri::XML::Document.parse content
end
end
def document_from_string src, opts = {}
assign_default_test_options opts
opts[:parse] ? (Asciidoctor::Document.new src.lines, opts).parse : (Asciidoctor::Document.new src.lines, opts)
end
def block_from_string src, opts = {}
(document_from_string src, (opts.merge standalone: false)).blocks.first
end
def convert_string src, opts = {}
keep_namespaces = opts.delete :keep_namespaces
if keep_namespaces
(document_from_string src, opts).convert
else
# this is required because nokogiri is easily confused by namespaces
result = (document_from_string src, opts).convert
result ? (result.sub RE_XMLNS_ATTRIBUTE, '') : result
end
end
def convert_string_to_embedded src, opts = {}
(document_from_string src, (opts.merge standalone: false)).convert
end
def convert_inline_string src, opts = {}
(document_from_string src, (opts.merge doctype: :inline)).convert
end
def parse_header_metadata source, doc = nil
reader = Asciidoctor::Reader.new source.split Asciidoctor::LF
[(Asciidoctor::Parser.parse_header_metadata reader, doc), reader]
end
def assign_default_test_options opts
opts[:standalone] = true unless opts.key? :standalone
opts[:parse] = true unless opts.key? :parse
if opts[:standalone]
# don't embed stylesheet unless test requests the default behavior
if opts.key? :linkcss_default
opts.delete :linkcss_default
else
opts[:attributes] ||= {}
opts[:attributes]['linkcss'] = ''
end
end
if (template_dir = ENV['TEMPLATE_DIR'])
opts[:template_dir] = template_dir unless opts.key? :template_dir
end
nil
end
# Decode the numeric character reference, such as 8212, to a Unicode glyph
# so it may be used in an XPath expression.
#
# Examples
#
# decode_char 60
# # => "<"
#
# Returns the decoded String that corresponds to the numeric character reference
def decode_char number
[number].pack 'U1'
end
def invoke_cli_with_filenames argv = [], filenames = [], &block
filepaths = []
filenames.each do |filename|
if filenames.nil? || (Pathname.new filename).absolute?
filepaths << filename
else
filepaths << (fixture_path filename)
end
end
invoker = Asciidoctor::Cli::Invoker.new argv + filepaths
invoker.invoke!(&block)
invoker
end
def invoke_cli_to_buffer argv = [], filename = 'sample.adoc', &block
invoke_cli argv, filename, [StringIO.new, StringIO.new], &block
end
def invoke_cli argv = [], filename = 'sample.adoc', buffers = nil, &block
if filename.nil? || filename == '-' || (Pathname.new filename).absolute?
filepath = filename
else
filepath = fixture_path filename
end
invoker = Asciidoctor::Cli::Invoker.new argv + [filepath]
invoker.redirect_streams(*buffers) if buffers
invoker.invoke!(&block)
invoker
end
def redirect_streams
old_stdout, $stdout = $stdout, StringIO.new
old_stderr, $stderr = $stderr, StringIO.new
old_logger = Asciidoctor::LoggerManager.logger
old_logger_level = old_logger.level
new_logger = (Asciidoctor::LoggerManager.logger = Asciidoctor::Logger.new $stderr)
new_logger.level = old_logger_level
yield $stdout, $stderr
ensure
$stdout, $stderr = old_stdout, old_stderr
Asciidoctor::LoggerManager.logger = old_logger
end
def resolve_localhost
Socket.ip_address_list.find(&:ipv4?).ip_address
end
def using_memory_logger level = nil
old_logger = Asciidoctor::LoggerManager.logger
memory_logger = Asciidoctor::MemoryLogger.new
memory_logger.level = level if level
begin
Asciidoctor::LoggerManager.logger = memory_logger
yield memory_logger
ensure
Asciidoctor::LoggerManager.logger = old_logger
end
end
def in_verbose_mode
begin
old_logger_level, Asciidoctor::LoggerManager.logger.level = Asciidoctor::LoggerManager.logger.level, Logger::Severity::DEBUG
yield
ensure
Asciidoctor::LoggerManager.logger.level = old_logger_level
end
end
def asciidoctor_cmd ruby_args = nil
[Gem.ruby, *ruby_args, (File.join bindir, 'asciidoctor')]
end
# NOTE run_command fails on JRuby 9.1 for Windows with the following error:
# Java::JavaLang::ClassCastException at org.jruby.util.ShellLauncher.getModifiedEnv(ShellLauncher.java:271)
def run_command cmd, *args, &block
if Array === cmd
args.unshift(*cmd)
cmd = args.shift
end
kw_args = Hash === args[-1] ? args.pop : {}
env = kw_args[:env]
(env ||= {})['RUBYOPT'] = nil unless kw_args[:use_bundler]
# JRuby 9.1 on Windows doesn't support popen options; therefore, test cannot capture / assert on stderr
opts = jruby_9_1_windows? ? {} : { err: [:child, :out] }
if env
# NOTE while JRuby 9.2.10.0 implements support for unsetenv_others, it doesn't work in child
#if jruby? && (Gem::Version.new JRUBY_VERSION) < (Gem::Version.new '9.2.10.0')
if jruby?
begin
old_env, env = ENV.merge, (ENV.merge env)
env.each {|key, val| env.delete key if val.nil? } if env.value? nil
ENV.replace env
popen [cmd, *args, opts], &block
ensure
ENV.replace old_env
end
elsif env.value? nil
env = env.reduce ENV.to_h do |accum, (key, val)|
val.nil? ? (accum.delete key) : (accum[key] = val)
accum
end
popen [env, cmd, *args, (opts.merge unsetenv_others: true)], &block
else
popen [env, cmd, *args, opts], &block
end
else
popen [cmd, *args, opts], &block
end
end
def popen args, &block
# When block is passed to IO.popen, JRuby for Windows does not return value of block as return value
if jruby? && windows?
result = nil
IO.popen args do |io|
result = yield io
end
result
else
IO.popen args, &block
end
end
def using_test_webserver host = resolve_localhost, port = 9876
base_dir = testdir
server = TCPServer.new host, port
server_thread = Thread.start do
while (session = server.accept)
request = session.gets
if /^GET (\S+) HTTP\/1\.1$/ =~ request.chomp
resource = (resource = $1) == '' ? '.' : resource
else
session.print %(HTTP/1.1 405 Method Not Allowed\r\nContent-Type: text/plain\r\n\r\n)
session.print %(405 - Method not allowed\n)
session.close
next
end
if resource == '/name/asciidoctor'
session.print %(HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\r\n)
session.print %({"name": "asciidoctor"}\n)
elsif File.file?(resource_file = (File.join base_dir, resource))
mimetype = if (ext = File.extname(resource_file)[1..-1])
ext == 'adoc' ? 'text/plain' : %(image/#{ext})
else
'text/plain'
end
session.print %(HTTP/1.1 200 OK\r\nContent-Type: #{mimetype}\r\n\r\n)
File.open resource_file, Asciidoctor::FILE_READ_MODE do |fd|
until fd.eof? do
buffer = fd.read 256
session.write buffer
end
end
else
session.print %(HTTP/1.1 404 File Not Found\r\nContent-Type: text/plain\r\n\r\n)
session.print %(404 - Resource not found.\n)
end
session.close
end
end
begin
yield
ensure
server_thread.exit
server_thread.value
server.close
end
end
end
###
#
# Context goodness provided by @citrusbyte's contest.
# See https://github.com/citrusbyte/contest
#
###
# Contest adds +teardown+, +test+ and +context+ as class methods, and the
# instance methods +setup+ and +teardown+ now iterate on the corresponding
# blocks. Note that all setup and teardown blocks must be defined with the
# block syntax. Adding setup or teardown instance methods defeats the purpose
# of this library.
class Minitest::Test
class << self
def setup &block
define_method :setup do
super(&block)
instance_eval(&block)
end
end
def teardown &block
define_method :teardown do
instance_eval(&block)
super(&block)
end
end
def context name, opts = {}, &block
if opts.key? :if
return unless opts[:if]
elsif opts.key? :unless
return if opts[:unless]
end
subclass = Class.new self
remove_tests subclass
subclass.class_eval(&block) if block_given?
const_set (context_name name), subclass
end
def test name, opts = {}, &block
if opts.key? :if
return unless opts[:if]
elsif opts.key? :unless
return if opts[:unless]
end
define_method (test_name name), &block
end
def remove_tests subclass
subclass.public_instance_methods.each do |m|
subclass.send :undef_method, m if m.to_s.start_with? 'test_'
end
end
alias should test
alias describe context
private
def context_name name
%(Test#{(sanitize_name name).gsub(/(^| )(\w)/) { $2.upcase }}).to_sym
end
def test_name name
%(test_#{((sanitize_name name).gsub %r/\s+/, '_')}).to_sym
end
def sanitize_name name
(name.gsub %r/\W+/, ' ').strip
end
end
end
def context name, &block
Minitest::Test.context name, &block
end
| 28.739382 | 130 | 0.656815 |
bb9ce1280d18224fad33d335c40988017c032ad3 | 1,801 | # frozen_string_literal: true
module Parametric
class BlockValidator
def self.build(meth, &block)
klass = Class.new(self)
klass.public_send(meth, &block)
klass
end
def self.message(&block)
@message_block = block if block_given?
@message_block if instance_variable_defined?('@message_block')
end
def self.validate(&validate_block)
@validate_block = validate_block if block_given?
@validate_block if instance_variable_defined?('@validate_block')
end
def self.coerce(&coerce_block)
@coerce_block = coerce_block if block_given?
@coerce_block
end
def self.eligible(&block)
@eligible_block = block if block_given?
@eligible_block if instance_variable_defined?('@eligible_block')
end
def self.meta_data(&block)
@meta_data_block = block if block_given?
@meta_data_block if instance_variable_defined?('@meta_data_block')
end
attr_reader :message
def initialize(*args)
@args = args
@message = 'is invalid'
@validate_block = self.class.validate || ->(*args) { true }
@coerce_block = self.class.coerce || ->(v, *_) { v }
@eligible_block = self.class.eligible || ->(*args) { true }
@meta_data_block = self.class.meta_data || ->(*args) { {} }
end
def eligible?(value, key, payload)
args = (@args + [value, key, payload])
@eligible_block.call(*args)
end
def coerce(value, key, context)
@coerce_block.call(value, key, context)
end
def valid?(value, key, payload)
args = (@args + [value, key, payload])
@message = self.class.message.call(*args) if self.class.message
@validate_block.call(*args)
end
def meta_data
@meta_data_block.call *@args
end
end
end
| 26.880597 | 72 | 0.649084 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.