hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e86b6a4393548768903bcc6e93409bd6a708a692 | 3,210 | module AudioStream
module Fx
class ConvolutionReverb
# @param impulse [AudioStream::AudioInput] Impulse input
# @param dry [AudioStream::Decibel | Float] Dry gain
# @param wet [AudioStream::Decibel | Float] Wet gain
def initialize(impulse, dry: -6, wet: -6)
impulse_bufs = impulse.to_a
@impulse_size = impulse_bufs.size
@channels = impulse_bufs[0].channels
@window_size = impulse_bufs[0].window_size
@dry_gain = Decibel.db(dry).mag
@wet_gain = Decibel.db(wet).mag
zero_buf = Buffer.create(@window_size, @channels)
impulse_bufs = [zero_buf.clone] + impulse_bufs
@impulse_ffts = []
@impulse_size.times {|i|
na = NArray.float(@channels, @window_size*2)
impulse_bufs[i].to_float_na(na, 0)
impulse_bufs[i+1].to_float_na(na, @window_size*@channels)
@impulse_ffts << FFTW3.fft(na, FFTW3::FORWARD) / na.length
}
@impulse_max_gain = @impulse_ffts.map{|c| c.real**2 + c.imag**2}.map(&:sum).max / @channels
@wet_ffts = RingBuffer.new(@impulse_size) {
Array.new(@impulse_size, NArray.float(@channels, @window_size*2))
}
@prev_input = zero_buf.clone
end
def process(input)
if @window_size!=input.window_size
raise "window size is not match: impulse.size=#{@window_size} input.size=#{input.window_size}"
end
if @channels!=input.channels
raise "channels is not match: impulse.channels=#{@channels} input.channels=#{input.channels}"
end
# current dry to wet
na = NArray.float(@channels, @window_size*2)
@prev_input.to_float_na(na, 0)
input.to_float_na(na, @window_size*@channels)
input_fft = FFTW3.fft(na, FFTW3::FORWARD) / na.length
@wet_ffts.current = @impulse_ffts.map {|impulse_fft|
input_fft * impulse_fft
}
@wet_ffts.rotate
@prev_input = input.clone
# calc wet matrix sum
wet_fft = NArray.complex(@channels, @window_size*2)
@wet_ffts.each_with_index {|wet, i|
wet_fft += wet[@impulse_size-i-1]
}
wet_na = FFTW3.fft(wet_fft, FFTW3::BACKWARD)[(@channels*@window_size)...(@channels*@window_size*2)] * (@wet_gain / @impulse_max_gain)
# current dry + wet matrix sum
src0 = input.streams[0]
src1 = input.streams[1]
case @channels
when 1
output = Buffer.create_mono(@window_size)
dst0 = output.streams[0]
@window_size.times {|i|
dry = src0[i] * @dry_gain
wet = wet_na[i].real
dst0[i] = dry + wet
}
when 2
output = Buffer.create_stereo(@window_size)
dst0 = output.streams[0]
dst1 = output.streams[1]
@window_size.times {|i|
# dry
dry0 = src0[i] * @dry_gain
dry1 = src1[i] * @dry_gain
# wet
wet0 = wet_na[i*2].real
wet1 = wet_na[(i*2)+1].real
dst0[i] = dry0 + wet0
dst1[i] = dry1 + wet1
}
end
output
end
end
end
end
| 31.470588 | 141 | 0.574143 |
e8f8ef76400602b64bb3d09ee94a84b17fa7405a | 82 | Dir[File.dirname(__FILE__) + '/active_japanese/*.rb'].each do |f|
require f
end
| 20.5 | 65 | 0.695122 |
39240d5dc62e0126fbb97dacbe82c11fca57edc8 | 138 | class AddFormattedAddressesToRoutes < ActiveRecord::Migration
def change
add_column :routes, :formatted_addresses, :text
end
end
| 19.714286 | 61 | 0.789855 |
261747c6d09898fc27a8d882559e06708a5f8798 | 2,332 | module IceCube
class WeeklyRule < ValidatedRule
include Validations::HourOfDay
include Validations::MinuteOfHour
include Validations::SecondOfMinute
# include Validations::DayOfMonth # n/a
include Validations::DayOfWeek
include Validations::Day
include Validations::MonthOfYear
# include Validations::DayOfYear # n/a
include Validations::WeeklyInterval
attr_reader :week_start
def initialize(interval = 1, week_start = :sunday)
super(interval)
interval(interval, week_start)
schedule_lock(:wday, :hour, :min, :sec)
reset
end
def day_of_month(*_)
# NO-OP
self
end
def day_of_year(*_)
# NO-OP
self
end
# Move the effective start time to correct for when the schedule has
# validations earlier in the week than the selected start time, e.g.
#
# Schedule.new(wednesday).weekly(2).day(:monday)
#
# The effective start time gets realigned to the second next Monday, jumping
# over the gap week for the interval (2). Without realignment, the correct
# Monday occurrence would be missed when the schedule performs a 7-day jump
# into the next interval week, arriving on the Wednesday. This corrects any
# selections from dates that are misaligned to the schedule interval.
#
def realign(step_time, start_time)
time = TimeUtil::TimeWrapper.new(start_time)
offset = wday_offset(step_time, start_time)
time.add(:day, offset)
super step_time, time.to_time
end
# Calculate how many days to the first wday validation in the correct
# interval week. This may move backwards within the week if starting in an
# interval week with earlier validations.
#
def wday_offset(step_time, start_time)
return 0 if step_time == start_time
wday_validations = other_interval_validations.select { |v| v.type == :wday }
return 0 if wday_validations.none?
days = step_time.to_date - start_time.to_date
interval = base_interval_validation.validate(step_time, start_time).to_i
min_wday = wday_validations.map { |v| TimeUtil.normalize_wday(v.day, week_start) }.min
step_wday = TimeUtil.normalize_wday(step_time.wday, week_start)
days + interval - step_wday + min_wday
end
end
end
| 31.513514 | 92 | 0.697256 |
e9eb0c8b62344a8771195554a304af0efc3a93fd | 229 | module Mongoid
module Contextual
module Aggregable
module Commands
class Match < Base
def initialize(*args)
super('$match', *args)
end
end
end
end
end
end
| 14.3125 | 34 | 0.532751 |
3833fa30b3e0dfb914a4c573fbfd6049b54b216b | 254 | class CreateCocoaPodDependencies < ActiveRecord::Migration
def change
create_table :cocoa_pod_dependencies do |t|
t.references :cocoa_pod, index: true
t.references :dependent_cocoa_pod, index: true
t.timestamps
end
end
end
| 23.090909 | 58 | 0.732283 |
b9fc35e32d982e987d99da42868dc983297f261e | 705 | require_relative "boot"
require "rails/all"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Stardate
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 7.0
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
end
end
| 30.652174 | 79 | 0.737589 |
ff5482e58338ddd3932ed81020e5ace81d0169bd | 1,645 | require 'spec_helper'
describe TaskManager do
describe '.generate_tasks' do
let(:tasks) { TaskManager.generate_tasks }
let(:enabled_at) { 1.hour.ago }
context 'when having an enabled plan' do
before do
@plan = FactoryGirl.create(:plan_with_assignees, assignees_count: assignees_count, enabled_at: enabled_at)
end
shared_examples 'having an enabled plan' do
context 'and only one assignee exists' do
let(:assignees_count) { 1 }
it 'creates only one task' do
tasks.should have(1).item
end
end
context 'and many assignees exist' do
let(:assignees_count) { 3 }
it 'creates a task for each assignee' do
tasks.should have(3).items
end
end
end
it_behaves_like 'having an enabled plan'
context 'and last_task_created_at is nil' do
before { @plan.update_attributes(last_task_created_at: nil) }
it_behaves_like 'having an enabled plan'
end
end
context 'when having enabled plans' do
before { FactoryGirl.create_list(:plan_with_assignees, 3, enabled_at: enabled_at) }
it 'creates tasks for each plan' do
tasks.should have(3).items
end
end
context 'without any enabled plans' do
let(:enabled_at) { 1.hour.since }
before { FactoryGirl.create_list(:plan_with_assignees, 3, enabled_at: enabled_at) }
it 'does nothing' do
tasks.should be_empty
end
end
context 'without any plans' do
it 'does nothing' do
tasks.should be_empty
end
end
end
end
| 25.307692 | 114 | 0.633435 |
085530affb4373b46c60ed8f322a4469955e8a2c | 1,828 | require 'minitest'
Minitest.autorun
require 'terminal-table'
require 'ap'
AwesomePrint.defaults = {
indent: -2,
}
module Minitest::Assertions
def is subject, predicate
assert_equal predicate, subject
end
def assert_response response, expected_output, expected_status
expected_body = expected_output.to_json
if (response.status != expected_status) || (response.body != expected_body)
actual_output = begin
JSON.parse response.body
rescue => error
error
end
table = Terminal::Table.new headings: ['', 'Expected', 'Actual'],
rows: [
['response.status', expected_status, response.status],
['response JSON', expected_output.awesome_inspect, actual_output.awesome_inspect],
]
fail "\n#{table}"
end
end
end
DIR = '/tmp/hobby-rpc.tests'
Dir.mkdir DIR unless Dir.exist? DIR
require 'puma'
require 'hobby/rpc'
require 'securerandom'
require_relative 'rpc_client'
module AppSetup
def setup
@pid = fork do
require_relative 'service'
server = Puma::Server.new app
server.add_unix_listener socket
server.run
sleep
end
sleep 0.1 until File.exist? socket
end
def teardown
Process.kill 9, @pid
File.delete socket if File.exist? socket
end
end
def test name, description, app: Hobby::RPC.new, &block
socket = "#{DIR}/#{name}.#{Time.now.to_i}.#{SecureRandom.uuid}"
Class.new Minitest::Test do
include AppSetup
define_method :app do app end
define_method :socket do socket end
define_method description, &block
end
end
def it summary, app: Hobby::RPC.new, &block
name = File.basename caller_locations.first.path, '.rb'
test name, "#{name}(it #{summary})", app: app, &block
end
| 23.139241 | 92 | 0.654814 |
61f7febc98ce2a717e0181def4f6b1fc66dac784 | 1,659 | # frozen_string_literal: true
require 'virtus'
require 'droplet_kit/utils'
module DropletKit
class BaseModel
DO_NAMESPACE = 'do'
UNSUPPORTED_COLLECTIONS = ['space']
include Virtus.model
include Virtus::Equalizer.new(name || inspect)
def inspect
values = Hash[instance_variables.map { |name| [name, instance_variable_get(name)] } ]
"<#{self.class.name} #{values}>"
end
def urn
"#{DO_NAMESPACE}:#{collection_name}:#{identifier}"
end
def collection_name
DropletKit::Utils.underscore self.class.name.split('::').last
end
def identifier
identifier = attributes[:id] || attributes[:uuid] || attributes[:slug]
raise DropletKit::Error.new("#{self.class.name} doesn't support URNs") if identifier.nil?
identifier
end
def self.valid_urn?(urn)
parts = urn.split(':')
return false if parts.size != 3 || parts[0] != DO_NAMESPACE
collection = parts[1]
return true if UNSUPPORTED_COLLECTIONS.include?(collection)
begin
const_get "DropletKit::#{DropletKit::Utils.camelize(collection)}"
rescue NameError
return false
end
true
end
def self.from_urn(urn)
DropletKit::Error.new("Invalid urn: #{urn}") unless valid_urn?(urn)
parts = urn.split(':')
collection = parts[1]
identifier = parts[2]
return nil if UNSUPPORTED_COLLECTIONS.include?(collection)
klass = const_get("DropletKit::#{DropletKit::Utils.camelize(collection)}")
klass.from_identifier(identifier)
end
def self.from_identifier(identifier)
new(id: identifier)
end
end
end
| 24.397059 | 95 | 0.653406 |
edcbabbb6682619370354be33e82dd9dbb827cfa | 1,256 | require 'rubygems'
require 'bundler/setup'
Bundler.require
require 'open-uri'
require 'yaml'
require 'csv'
module RefreshList
SOURCE_URI = 'https://www.currency-iso.org/dam/downloads/lists/list_one.xml'.freeze
RESULT_FILE_PATH = 'currencies.csv'.freeze
SYMBOLS = YAML.load_file('currency_symbols.yml').freeze
HEADER = ['ENTITY', 'Currency', 'Alphabetic Code', 'Numeric Code', 'Minor unit', 'Symbol'].freeze
extend self
def perform
CSV.open(RESULT_FILE_PATH, 'w') { |c| c << HEADER }
xml_data = read_source
result_data = build_result_data xml_data: xml_data
result_data.each { |data| CSV.open(RESULT_FILE_PATH, 'a') { |c| c << data } }
end
def read_source
URI.open(SOURCE_URI).read
end
def build_result_data(xml_data:)
result = []
xml_document = Nokogiri::XML(xml_data)
xml_document.xpath('//CcyNtry').each do |xml_node|
data = [
xml_node.at_xpath('CtryNm')&.content,
xml_node.at_xpath('CcyNm')&.content,
xml_node.at_xpath('Ccy')&.content,
xml_node.at_xpath('CcyNbr')&.content,
xml_node.at_xpath('CcyMnrUnts')&.content,
SYMBOLS[xml_node.at_xpath('Ccy')&.content]
]
result << data
end
result
end
end
RefreshList.perform
| 25.12 | 99 | 0.672771 |
33d2b76e09ffb60b363b0a192fa5b94ff55450ed | 8,851 | =begin
#OpenAPI Petstore
#This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.0.0-SNAPSHOT
=end
require 'date'
require 'time'
module Petstore
class Pet
attr_accessor :id
attr_accessor :category
attr_accessor :name
attr_accessor :photo_urls
attr_accessor :tags
# pet status in the store
attr_accessor :status
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'id' => :'id',
:'category' => :'category',
:'name' => :'name',
:'photo_urls' => :'photoUrls',
:'tags' => :'tags',
:'status' => :'status'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'id' => :'Integer',
:'category' => :'Category',
:'name' => :'String',
:'photo_urls' => :'Array<String>',
:'tags' => :'Array<Tag>',
:'status' => :'String'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `Petstore::Pet` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `Petstore::Pet`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'id')
self.id = attributes[:'id']
end
if attributes.key?(:'category')
self.category = attributes[:'category']
end
if attributes.key?(:'name')
self.name = attributes[:'name']
end
if attributes.key?(:'photo_urls')
if (value = attributes[:'photo_urls']).is_a?(Array)
self.photo_urls = value
end
end
if attributes.key?(:'tags')
if (value = attributes[:'tags']).is_a?(Array)
self.tags = value
end
end
if attributes.key?(:'status')
self.status = attributes[:'status']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @name.nil?
invalid_properties.push('invalid value for "name", name cannot be nil.')
end
if @photo_urls.nil?
invalid_properties.push('invalid value for "photo_urls", photo_urls cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @name.nil?
return false if @photo_urls.nil?
status_validator = EnumAttributeValidator.new('String', ["available", "pending", "sold"])
return false unless status_validator.valid?(@status)
true
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] status Object to be assigned
def status=(status)
validator = EnumAttributeValidator.new('String', ["available", "pending", "sold"])
unless validator.valid?(status)
fail ArgumentError, "invalid value for \"status\", must be one of #{validator.allowable_values}."
end
@status = status
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id &&
category == o.category &&
name == o.name &&
photo_urls == o.photo_urls &&
tags == o.tags &&
status == o.status
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[id, category, name, photo_urls, tags, status].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
Petstore.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.019672 | 191 | 0.604338 |
033453c469904ccfcd6c7abddf9120b85ba3a571 | 21,552 | require 'puppet'
require 'puppet/util/tagging'
require 'puppet/parameter'
# The simplest resource class. Eventually it will function as the
# base class for all resource-like behaviour.
#
# @api public
class Puppet::Resource
include Puppet::Util::Tagging
include Puppet::Util::PsychSupport
include Enumerable
attr_accessor :file, :line, :catalog, :exported, :virtual, :strict
attr_reader :type, :title, :parameters, :rich_data_enabled
# @!attribute [rw] sensitive_parameters
# @api private
# @return [Array<Symbol>] A list of parameters to be treated as sensitive
attr_accessor :sensitive_parameters
# @deprecated
attr_accessor :validate_parameters
require 'puppet/indirector'
extend Puppet::Indirector
indirects :resource, :terminus_class => :ral
EMPTY_ARRAY = [].freeze
EMPTY_HASH = {}.freeze
ATTRIBUTES = [:file, :line, :exported].freeze
TYPE_CLASS = 'Class'.freeze
TYPE_NODE = 'Node'.freeze
TYPE_SITE = 'Site'.freeze
PCORE_TYPE_KEY = '__pcore_type__'.freeze
VALUE_KEY = 'value'.freeze
def self.from_data_hash(data)
resource = self.allocate
resource.initialize_from_hash(data)
resource
end
def initialize_from_hash(data)
raise ArgumentError, _('No resource type provided in serialized data') unless type = data['type']
raise ArgumentError, _('No resource title provided in serialized data') unless title = data['title']
@type, @title = self.class.type_and_title(type, title)
if params = data['parameters']
params = Puppet::Pops::Serialization::FromDataConverter.convert(params)
@parameters = {}
params.each { |param, value| self[param] = value }
else
@parameters = EMPTY_HASH
end
if sensitives = data['sensitive_parameters']
@sensitive_parameters = sensitives.map(&:to_sym)
else
@sensitive_parameters = EMPTY_ARRAY
end
if tags = data['tags']
tags.each { |t| tag(t) }
end
ATTRIBUTES.each do |a|
value = data[a.to_s]
send("#{a}=", value) unless value.nil?
end
end
def inspect
"#{@type}[#{@title}]#{to_hash.inspect}"
end
def to_data_hash
data = {
'type' => type,
'title' => title.to_s,
'tags' => tags.to_data_hash
}
ATTRIBUTES.each do |param|
value = send(param)
data[param.to_s] = value unless value.nil?
end
data['exported'] ||= false
params = {}
self.to_hash.each_pair do |param, value|
# Don't duplicate the title as the namevar
unless param == namevar && value == title
params[param.to_s] = Puppet::Resource.value_to_json_data(value)
end
end
unless params.empty?
data['parameters'] = Puppet::Pops::Serialization::ToDataConverter.convert(params, {
:rich_data => environment.rich_data?,
:symbol_as_string => true,
:local_reference => false,
:type_by_reference => true,
:message_prefix => ref,
:semantic => self
})
end
data['sensitive_parameters'] = sensitive_parameters.map(&:to_s) unless sensitive_parameters.empty?
data
end
def self.value_to_json_data(value)
if value.is_a?(Array)
value.map{|v| value_to_json_data(v) }
elsif value.is_a?(Hash)
result = {}
value.each_pair { |k, v| result[value_to_json_data(k)] = value_to_json_data(v) }
result
elsif value.is_a?(Puppet::Resource)
value.to_s
elsif value.is_a?(Symbol) && value == :undef
nil
else
value
end
end
def yaml_property_munge(x)
self.value.to_json_data(x)
end
# Proxy these methods to the parameters hash. It's likely they'll
# be overridden at some point, but this works for now.
%w{has_key? keys length delete empty? <<}.each do |method|
define_method(method) do |*args|
parameters.send(method, *args)
end
end
# Set a given parameter. Converts all passed names
# to lower-case symbols.
def []=(param, value)
validate_parameter(param) if validate_parameters
parameters[parameter_name(param)] = value
end
# Return a given parameter's value. Converts all passed names
# to lower-case symbols.
def [](param)
parameters[parameter_name(param)]
end
def ==(other)
return false unless other.respond_to?(:title) and self.type == other.type and self.title == other.title
return false unless to_hash == other.to_hash
true
end
# Compatibility method.
def builtin?
# TODO: should be deprecated (was only used in one place in puppet codebase)
builtin_type?
end
# Is this a builtin resource type?
def builtin_type?
# Note - old implementation only checked if the resource_type was a Class
resource_type.is_a?(Puppet::CompilableResourceType)
end
# Iterate over each param/value pair, as required for Enumerable.
def each
parameters.each { |p,v| yield p, v }
end
def include?(parameter)
super || parameters.keys.include?( parameter_name(parameter) )
end
%w{exported virtual strict}.each do |m|
define_method(m+"?") do
self.send(m)
end
end
def class?
@is_class ||= @type == TYPE_CLASS
end
def stage?
@is_stage ||= @type.to_s.downcase == "stage"
end
# Construct a resource from data.
#
# Constructs a resource instance with the given `type` and `title`. Multiple
# type signatures are possible for these arguments and most will result in an
# expensive call to {Puppet::Node::Environment#known_resource_types} in order
# to resolve `String` and `Symbol` Types to actual Ruby classes.
#
# @param type [Symbol, String] The name of the Puppet Type, as a string or
# symbol. The actual Type will be looked up using
# {Puppet::Node::Environment#known_resource_types}. This lookup is expensive.
# @param type [String] The full resource name in the form of
# `"Type[Title]"`. This method of calling should only be used when
# `title` is `nil`.
# @param type [nil] If a `nil` is passed, the title argument must be a string
# of the form `"Type[Title]"`.
# @param type [Class] A class that inherits from `Puppet::Type`. This method
# of construction is much more efficient as it skips calls to
# {Puppet::Node::Environment#known_resource_types}.
#
# @param title [String, :main, nil] The title of the resource. If type is `nil`, may also
# be the full resource name in the form of `"Type[Title]"`.
#
# @api public
def initialize(type, title = nil, attributes = EMPTY_HASH)
@parameters = {}
@sensitive_parameters = []
if type.is_a?(Puppet::Resource)
# Copy constructor. Let's avoid munging, extracting, tagging, etc
src = type
self.file = src.file
self.line = src.line
self.exported = src.exported
self.virtual = src.virtual
self.set_tags(src)
self.environment = src.environment
@rstype = src.resource_type
@type = src.type
@title = src.title
src.to_hash.each do |p, v|
if v.is_a?(Puppet::Resource)
v = v.copy_as_resource
elsif v.is_a?(Array)
# flatten resource references arrays
v = v.flatten if v.flatten.find { |av| av.is_a?(Puppet::Resource) }
v = v.collect do |av|
av = av.copy_as_resource if av.is_a?(Puppet::Resource)
av
end
end
self[p] = v
end
@sensitive_parameters.replace(type.sensitive_parameters)
else
if type.is_a?(Hash)
raise ArgumentError, "Puppet::Resource.new does not take a hash as the first argument. "+
"Did you mean (#{(type[:type] || type["type"]).inspect}, #{(type[:title] || type["title"]).inspect }) ?"
end
environment = attributes[:environment]
# In order to avoid an expensive search of 'known_resource_types" and
# to obey/preserve the implementation of the resource's type - if the
# given type is a resource type implementation (one of):
# * a "classic" 3.x ruby plugin
# * a compatible implementation (e.g. loading from pcore metadata)
# * a resolved user defined type
#
# ...then, modify the parameters to the "old" (agent side compatible) way
# of describing the type/title with string/symbols.
#
# TODO: Further optimizations should be possible as the "type juggling" is
# not needed when the type implementation is known.
#
if type.is_a?(Puppet::CompilableResourceType) || type.is_a?(Puppet::Resource::Type)
# set the resource type implementation
self.resource_type = type
# set the type name to the symbolic name
type = type.name
end
@exported = false
# Set things like strictness first.
attributes.each do |attr, value|
next if attr == :parameters
send(attr.to_s + "=", value)
end
@type, @title = self.class.type_and_title(type, title)
rt = resource_type
if strict? && rt.nil?
if self.class?
raise ArgumentError, "Could not find declared class #{title}"
else
raise ArgumentError, "Invalid resource type #{type}"
end
end
params = attributes[:parameters]
unless params.nil? || params.empty?
extract_parameters(params)
if rt && rt.respond_to?(:deprecate_params)
rt.deprecate_params(title, params)
end
end
tag(self.type)
tag_if_valid(self.title)
end
end
def ref
to_s
end
# Find our resource.
def resolve
catalog ? catalog.resource(to_s) : nil
end
# A resource is an application component if it exports or consumes
# one or more capabilities, or if it requires a capability resource
def is_application_component?
return true if ! export.empty? || self[:consume]
# Array(self[:require]) does not work for Puppet::Resource instances
req = self[:require] || []
req = [ req ] unless req.is_a?(Array)
req.any? { |r| r.is_capability? }
end
# A resource is a capability (instance) if its underlying type is a
# capability type
def is_capability?
!resource_type.nil? && resource_type.is_capability?
end
# Returns the value of the 'export' metaparam as an Array
# @api private
def export
v = self[:export] || []
v.is_a?(Array) ? v : [ v ]
end
# The resource's type implementation
# @return [Puppet::Type, Puppet::Resource::Type]
# @api private
def resource_type
@rstype ||= self.class.resource_type(type, title, environment)
end
# The resource's type implementation
# @return [Puppet::Type, Puppet::Resource::Type]
# @api private
def self.resource_type(type, title, environment)
case type
when TYPE_CLASS; environment.known_resource_types.hostclass(title == :main ? "" : title)
when TYPE_NODE; environment.known_resource_types.node(title)
when TYPE_SITE; environment.known_resource_types.site(nil)
else
result = Puppet::Type.type(type)
if !result
krt = environment.known_resource_types
result = krt.definition(type) || krt.application(type)
end
result
end
end
# Set the resource's type implementation
# @param type [Puppet::Type, Puppet::Resource::Type]
# @api private
def resource_type=(type)
@rstype = type
end
def environment
@environment ||= if catalog
catalog.environment_instance
else
Puppet.lookup(:current_environment) { Puppet::Node::Environment::NONE }
end
end
def environment=(environment)
@environment = environment
end
# Produces a hash of attribute to value mappings where the title parsed into its components
# acts as the default values overridden by any parameter values explicitly given as parameters.
#
def to_hash
parse_title.merge parameters
end
def to_s
"#{type}[#{title}]"
end
def uniqueness_key
# Temporary kludge to deal with inconsistent use patterns; ensure we don't return nil for namevar/:name
h = self.to_hash
name = h[namevar] || h[:name] || self.name
h[namevar] ||= name
h[:name] ||= name
h.values_at(*key_attributes.sort_by { |k| k.to_s })
end
def key_attributes
resource_type.respond_to?(:key_attributes) ? resource_type.key_attributes : [:name]
end
# Convert our resource to yaml for Hiera purposes.
def to_hierayaml
# Collect list of attributes to align => and move ensure first
attr = parameters.keys
attr_max = attr.inject(0) { |max,k| k.to_s.length > max ? k.to_s.length : max }
attr.sort!
if attr.first != :ensure && attr.include?(:ensure)
attr.delete(:ensure)
attr.unshift(:ensure)
end
attributes = attr.collect { |k|
v = parameters[k]
" %-#{attr_max}s: %s\n" % [k, Puppet::Parameter.format_value_for_display(v)]
}.join
" %s:\n%s" % [self.title, attributes]
end
# Convert our resource to Puppet code.
def to_manifest
# Collect list of attributes to align => and move ensure first
attr = parameters.keys
attr_max = attr.inject(0) { |max,k| k.to_s.length > max ? k.to_s.length : max }
attr.sort!
if attr.first != :ensure && attr.include?(:ensure)
attr.delete(:ensure)
attr.unshift(:ensure)
end
attributes = attr.collect { |k|
v = parameters[k]
" %-#{attr_max}s => %s,\n" % [k, Puppet::Parameter.format_value_for_display(v)]
}.join
escaped = self.title.gsub(/'/,"\\\\'")
"%s { '%s':\n%s}" % [self.type.to_s.downcase, escaped, attributes]
end
def to_ref
ref
end
# Convert our resource to a RAL resource instance. Creates component
# instances for resource types that don't exist.
def to_ral
typeklass = Puppet::Type.type(self.type) || Puppet::Type.type(:component)
typeklass.new(self)
end
def name
# this is potential namespace conflict
# between the notion of an "indirector name"
# and a "resource name"
[ type, title ].join('/')
end
def missing_arguments
resource_type.arguments.select do |param, default|
the_param = parameters[param.to_sym]
the_param.nil? || the_param.value.nil? || the_param.value == :undef
end
end
private :missing_arguments
# @deprecated Not used by Puppet
# @api private
def set_default_parameters(scope)
Puppet.deprecation_warning(_('The method Puppet::Resource.set_default_parameters is deprecated and will be removed in the next major release of Puppet.'))
return [] unless resource_type and resource_type.respond_to?(:arguments)
unless is_a?(Puppet::Parser::Resource)
fail Puppet::DevError, _("Cannot evaluate default parameters for %{resource} - not a parser resource") % { resource: self }
end
missing_arguments.collect do |param, default|
rtype = resource_type
if rtype.type == :hostclass
using_bound_value = false
catch(:no_such_key) do
bound_value = Puppet::Pops::Lookup.search_and_merge("#{rtype.name}::#{param}", Puppet::Pops::Lookup::Invocation.new(scope), nil)
# Assign bound value but don't let an undef trump a default expression
unless bound_value.nil? && !default.nil?
self[param.to_sym] = bound_value
using_bound_value = true
end
end
end
unless using_bound_value
next if default.nil?
self[param.to_sym] = default.safeevaluate(scope)
end
param
end.compact
end
def copy_as_resource
Puppet::Resource.new(self)
end
def valid_parameter?(name)
resource_type.valid_parameter?(name)
end
# Verify that all required arguments are either present or
# have been provided with defaults.
# Must be called after 'set_default_parameters'. We can't join the methods
# because Type#set_parameters needs specifically ordered behavior.
#
# @deprecated Not used by Puppet
# @api private
def validate_complete
Puppet.deprecation_warning(_('The method Puppet::Resource.validate_complete is deprecated and will be removed in the next major release of Puppet.'))
return unless resource_type and resource_type.respond_to?(:arguments)
resource_type.arguments.each do |param, default|
param = param.to_sym
fail Puppet::ParseError, _("Must pass %{param} to %{resource}") % { param: param, resource: self } unless parameters.include?(param)
end
# Perform optional type checking
arg_types = resource_type.argument_types
# Parameters is a map from name, to parameter, and the parameter again has name and value
parameters.each do |name, value|
next unless t = arg_types[name.to_s] # untyped, and parameters are symbols here (aargh, strings in the type)
unless Puppet::Pops::Types::TypeCalculator.instance?(t, value.value)
inferred_type = Puppet::Pops::Types::TypeCalculator.infer_set(value.value)
actual = inferred_type.generalize()
fail Puppet::ParseError, _("Expected parameter '%{name}' of '%{value0}' to have type %{value1}, got %{value2}") % { name: name, value0: self, value1: t.to_s, value2: actual.to_s }
end
end
end
def validate_parameter(name)
raise Puppet::ParseError.new(_("no parameter named '%{name}'") % { name: name }, file, line) unless valid_parameter?(name)
end
# This method, together with #file and #line, makes it possible for a Resource to be a 'source_pos' in a reported issue.
# @return [Integer] Instances of this class will always return `nil`.
def pos
nil
end
def prune_parameters(options = EMPTY_HASH)
properties = resource_type.properties.map(&:name)
dup.collect do |attribute, value|
if value.to_s.empty? or Array(value).empty?
delete(attribute)
elsif value.to_s == "absent" and attribute.to_s != "ensure"
delete(attribute)
end
parameters_to_include = options[:parameters_to_include] || []
delete(attribute) unless properties.include?(attribute) || parameters_to_include.include?(attribute)
end
self
end
# @api private
def self.type_and_title(type, title)
type, title = extract_type_and_title(type, title)
type = munge_type_name(type)
if type == TYPE_CLASS
title = title == '' ? :main : munge_type_name(title)
end
[type, title]
end
def self.extract_type_and_title(argtype, argtitle)
if (argtype.nil? || argtype == :component || argtype == :whit) &&
argtitle =~ /^([^\[\]]+)\[(.+)\]$/m then [ $1, $2 ]
elsif argtitle.nil? && argtype =~ /^([^\[\]]+)\[(.+)\]$/m then [ $1, $2 ]
elsif argtitle then [ argtype, argtitle ]
elsif argtype.is_a?(Puppet::Type) then [ argtype.class.name, argtype.title ]
else raise ArgumentError, "No title provided and #{argtype.inspect} is not a valid resource reference"
end
end
private_class_method :extract_type_and_title
def self.munge_type_name(value)
return :main if value == :main
return TYPE_CLASS if value == '' || value.nil? || value.to_s.casecmp('component') == 0
Puppet::Pops::Types::TypeFormatter.singleton.capitalize_segments(value.to_s)
end
private_class_method :munge_type_name
private
# Produce a canonical method name.
def parameter_name(param)
param = param.to_s.downcase.to_sym
if param == :name and namevar
param = namevar
end
param
end
# The namevar for our resource type. If the type doesn't exist,
# always use :name.
def namevar
if builtin_type? && !(t = resource_type).nil? && t.key_attributes.length == 1
t.key_attributes.first
else
:name
end
end
def extract_parameters(params)
params.each do |param, value|
validate_parameter(param) if strict?
self[param] = value
end
end
# Produces a hash with { :key => part_of_title } for each entry in title_patterns
# for the resource type. A typical result for a title of 'example' is {:name => 'example'}.
# A resource type with a complex title to attribute mapping returns one entry in the hash
# per part.
#
def parse_title
h = {}
type = resource_type
if type.respond_to?(:title_patterns) && !type.title_patterns.nil?
type.title_patterns.each { |regexp, symbols_and_lambdas|
if captures = regexp.match(title.to_s)
symbols_and_lambdas.zip(captures[1..-1]).each do |symbol_and_lambda,capture|
symbol, proc = symbol_and_lambda
# Many types pass "identity" as the proc; we might as well give
# them a shortcut to delivering that without the extra cost.
#
# Especially because the global type defines title_patterns and
# uses the identity patterns.
#
# This was worth about 8MB of memory allocation saved in my
# testing, so is worth the complexity for the API.
if proc then
h[symbol] = proc.call(capture)
else
h[symbol] = capture
end
end
return h
end
}
# If we've gotten this far, then none of the provided title patterns
# matched. Since there's no way to determine the title then the
# resource should fail here.
raise Puppet::Error, _("No set of title patterns matched the title \"%{title}\".") % { title: title }
else
return { :name => title.to_s }
end
end
end
| 32.167164 | 187 | 0.652932 |
189c8d7a51832f57638b4f63db2cb02123821051 | 177 | class CreateTerms < ActiveRecord::Migration[5.0]
def change
create_table :terms do |t|
t.string :name
t.text :definition
t.timestamps
end
end
end
| 16.090909 | 48 | 0.644068 |
b98b82e7e91dc3562bdf5404dd436c94e19d9be1 | 1,644 | # -*- encoding: utf-8 -*-
# stub: unicode-display_width 1.3.0 ruby lib
Gem::Specification.new do |s|
s.name = "unicode-display_width".freeze
s.version = "1.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Jan Lelis".freeze]
s.date = "2017-06-19"
s.description = "[Unicode 10.0.0] Determines the monospace display width of a string using EastAsianWidth.txt, Unicode general category, and other data.".freeze
s.email = "[email protected]".freeze
s.extra_rdoc_files = ["README.md".freeze, "MIT-LICENSE.txt".freeze, "CHANGELOG.md".freeze]
s.files = ["CHANGELOG.md".freeze, "MIT-LICENSE.txt".freeze, "README.md".freeze]
s.homepage = "http://github.com/janlelis/unicode-display_width".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 1.9.3".freeze)
s.rubygems_version = "2.6.14".freeze
s.summary = "Determines the monospace display width of a string in Ruby.".freeze
s.installed_by_version = "2.6.14" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rspec>.freeze, ["~> 3.4"])
s.add_development_dependency(%q<rake>.freeze, ["~> 10.4"])
else
s.add_dependency(%q<rspec>.freeze, ["~> 3.4"])
s.add_dependency(%q<rake>.freeze, ["~> 10.4"])
end
else
s.add_dependency(%q<rspec>.freeze, ["~> 3.4"])
s.add_dependency(%q<rake>.freeze, ["~> 10.4"])
end
end
| 42.153846 | 162 | 0.683698 |
87367d26244134bc7df3cbe2025521b8075f5ba2 | 4,625 | require_relative 'mixins/group_validate_mixin'
class RequestUpdateService
include GroupValidateMixin
attr_accessor :request
def initialize(request_id)
self.request = Request.find(request_id)
end
def update(options)
return if options[:state] == request.state
send(options[:state], options)
end
private
def started(options)
if request.leaf?
start_request
end
request.update!(options)
EventService.new(request).request_started if request.root?
update_parent(options) if request.child?
notify_request if request.leaf?
end
def notified(options)
request.update!(options.merge(:notified_at => DateTime.now))
EventService.new(request).approver_group_notified if request.leaf?
update_parent(options) if request.child?
end
def completed(options)
finish_request(options[:decision]) if request.leaf?
return if request.state == Request::CANCELED_STATE
EventService.new(request).approver_group_finished if request.leaf?
return child_completed(options) if request.child?
return parent_completed(options) if request_completed?(options[:decision])
end
def failed(options)
completed(options)
end
# Root only.
def canceled(options)
skip_leaves
request.random_access_keys.destroy_all
request.update!(options.merge(:finished_at => DateTime.now))
EventService.new(request).request_canceled
end
# Leaf only. skipped is caused by cancel or deny. This state will not propagate to root
def skipped(options)
request.update!(options.merge(:finished_at => DateTime.now))
request.parent.invalidate_number_of_finished_children
end
def skip_leaves
leaves.each do |leaf|
next unless leaf.state == Request::PENDING_STATE
ActionCreateService.new(leaf.id).create(:operation => Action::SKIP_OPERATION, :processed_by => 'system')
end
end
def child_completed(options)
request.random_access_keys.destroy_all
request.update!(options.merge(:finished_at => DateTime.now))
request.parent.invalidate_number_of_finished_children
update_parent(options)
if [Request::DENIED_STATUS, Request::ERROR_STATUS].include?(options[:decision])
skip_leaves
else
start_next_leaves if peers_approved?(request)
end
end
def parent_completed(options)
request.random_access_keys.destroy_all
request.update!(options.merge(:finished_at => DateTime.now))
EventService.new(request).request_completed
end
def request_completed?(decision)
request.number_of_finished_children == request.number_of_children || [Request::DENIED_STATUS, Request::ERROR_STATUS].include?(decision)
end
def peers_approved?(request)
peers = Request.where(:workflow_id => request.workflow_id, :parent_id => request.parent_id)
peers.any? { |peer| peer.decision != Request::APPROVED_STATUS } ? false : true
end
def start_next_leaves
pending_leaves = next_pending_leaves
return unless pending_leaves
pending_leaves.each do |leaf|
ActionCreateService.new(leaf.id).create(:operation => Action::START_OPERATION, :processed_by => 'system')
end
end
def leaves
request.root.children.reverse # sort from oldest to latest
end
def next_pending_leaves
leaves.each_with_object([]) do |leaf, peers|
next unless leaf.state == Request::PENDING_STATE
peers << leaf if peers.empty? || leaf.workflow_id == peers.first.workflow_id
end
end
def update_parent(options)
RequestUpdateService.new(request.parent_id).update(options)
end
# start the external approval process if configured
def start_request
return unless request.workflow.try(:external_processing?)
return unless runtime_validate_group(request)
template = request.workflow.template
processor_class = "#{template.process_setting['processor_type']}_process_service".classify.constantize
ref = processor_class.new(request).start
request.update!(:process_ref => ref)
end
def notify_request
return if request.workflow.try(:external_processing?)
ActionCreateService.new(request.id).create(:operation => Action::NOTIFY_OPERATION, :processed_by => 'system')
end
# complete the external approval process if configured
def finish_request(decision)
return if request_completed?(decision)
return unless request.process_ref.present? && request.workflow.try(:external_processing?)
template = request.workflow.template
processor_class = "#{template.signal_setting['processor_type']}_process_service".classify.constantize
processor_class.new(request).signal(decision)
end
end
| 28.726708 | 139 | 0.747027 |
28be364e080eb4d47212ecc30137d052781f47ac | 122 | class TCuv < Verse
self.table_name = 't_cuv'
has_many :cuv_pericopes, class_name: 'CuvPericope', foreign_key: :id
end
| 24.4 | 70 | 0.745902 |
87e96836b52f83c5cd4c7155093539d9fde0d704 | 1,169 | =begin
#Datadog API V1 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'date'
require 'time'
module DatadogAPIClient::V1
class DashboardReflowType
AUTO = "auto".freeze
FIXED = "fixed".freeze
# Builds the enum from string
# @param [String] The enum value in the form of the string
# @return [String] The enum value
def self.build_from_hash(value)
new.build_from_hash(value)
end
# Builds the enum from string
# @param [String] The enum value in the form of the string
# @return [String] The enum value
def build_from_hash(value)
constantValues = DashboardReflowType.constants.select { |c| DashboardReflowType::const_get(c) == value }
constantValues.empty? ? DatadogAPIClient::V1::UnparsedObject.new(value) : value
end
end
end
| 29.225 | 110 | 0.733105 |
f8d37934763dbb73c93b429962cece50323ccaf6 | 119 | # frozen_string_literal: true
class EnvironmentStatusSerializer < BaseSerializer
entity EnvironmentStatusEntity
end
| 19.833333 | 50 | 0.865546 |
62d633c15ed6574368a3ba138258b8676b3c7019 | 51 | module ActionAuthorization
VERSION = '1.1.2'
end
| 12.75 | 26 | 0.745098 |
b9f14a98574cf38ea0eacb66aa74349709251b8c | 8,576 | =begin
#Argo Workflows API
#Argo Workflows is an open source container-native workflow engine for orchestrating parallel jobs on Kubernetes. For more information, please see https://argoproj.github.io/argo-workflows/
The version of the OpenAPI document: VERSION
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.2.1
=end
require 'date'
require 'time'
module ArgoWorkflows
# CronWorkflow is the definition of a scheduled workflow resource
class IoArgoprojWorkflowV1alpha1CronWorkflow
# APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#resources
attr_accessor :api_version
# Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.io.k8s.community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
attr_accessor :kind
attr_accessor :metadata
attr_accessor :spec
attr_accessor :status
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'api_version' => :'apiVersion',
:'kind' => :'kind',
:'metadata' => :'metadata',
:'spec' => :'spec',
:'status' => :'status'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'api_version' => :'String',
:'kind' => :'String',
:'metadata' => :'ObjectMeta',
:'spec' => :'IoArgoprojWorkflowV1alpha1CronWorkflowSpec',
:'status' => :'IoArgoprojWorkflowV1alpha1CronWorkflowStatus'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `ArgoWorkflows::IoArgoprojWorkflowV1alpha1CronWorkflow` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `ArgoWorkflows::IoArgoprojWorkflowV1alpha1CronWorkflow`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'api_version')
self.api_version = attributes[:'api_version']
end
if attributes.key?(:'kind')
self.kind = attributes[:'kind']
end
if attributes.key?(:'metadata')
self.metadata = attributes[:'metadata']
end
if attributes.key?(:'spec')
self.spec = attributes[:'spec']
end
if attributes.key?(:'status')
self.status = attributes[:'status']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @metadata.nil?
invalid_properties.push('invalid value for "metadata", metadata cannot be nil.')
end
if @spec.nil?
invalid_properties.push('invalid value for "spec", spec cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @metadata.nil?
return false if @spec.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
api_version == o.api_version &&
kind == o.kind &&
metadata == o.metadata &&
spec == o.spec &&
status == o.status
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[api_version, kind, metadata, spec, status].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = ArgoWorkflows.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32 | 296 | 0.63736 |
ff8145b216c96c365227eb1d39486094b408a2e7 | 141 | class RenamePictureString < ActiveRecord::Migration
def up
rename_column :store_pictures, :picture, :image
end
def down
end
end
| 15.666667 | 51 | 0.744681 |
d55901d126301080b84208a893036040e357a909 | 803 | require 'pony'
require 'io/console'
puts 'Введите имя почтового ящика gmail.com'
my_mail = "#{$stdin.gets.chomp}'@gmail.com"
puts "Введите пароль от вашей почты #{my_mail} для отправки письма:"
password = $stdin.noecho(&:gets).chomp
puts 'Кому отправить письмо? Введите адрес:'
send_to = $stdin.gets.chomp
puts 'Что написать в теме письма?'
subject = $stdin.gets.chomp.encode('utf-8')
puts 'Что написать в теле письма?'
body = $stdin.gets.chomp.encode('utf-8')
Pony.mail({
subject: subject,
body: body,
to: send_to,
from: my_mail,
via: :smtp,
via_options: {
address: 'smtp.gmail.com',
port: '587',
enable_starttls_auto: true,
user_name: my_mail,
password: password,
authentication: :plain
}
})
puts 'Письмо отправлено!'
| 22.305556 | 68 | 0.65878 |
113077ab6fa0815fb958fda56da2b4d4f31a599b | 126 | class AddImageToOrganizations < ActiveRecord::Migration
def change
add_column :organizations, :image, :string
end
end
| 21 | 55 | 0.777778 |
03866de03ed3f7e9fd2e453821bdb208ede5bb84 | 674 | module Fog
module Compute
class OpenStack
class Real
def delete_volume(volume_id)
request(
:expects => 202,
:method => 'DELETE',
:path => "os-volumes/#{volume_id}"
)
end
end
class Mock
def delete_volume(volume_id)
response = Excon::Response.new
if list_volumes.body['volumes'].map { |v| v['id'] }.include? volume_id
self.data[:volumes].delete(volume_id)
response.status = 204
response
else
raise Fog::Compute::OpenStack::NotFound
end
end
end
end
end
end
| 23.241379 | 80 | 0.507418 |
38f90d58300f5a15ae4f4a9d77db860f79c571f2 | 590 | require_relative '../../spec_helper'
describe "Thread#thread_variable_get" do
before :each do
@t = Thread.new { }
end
after :each do
@t.join
end
it "returns nil if the variable is not set" do
@t.thread_variable_get(:a).should be_nil
end
it "returns the value previously set by #[]=" do
@t.thread_variable_set :a, 49
@t.thread_variable_get(:a).should == 49
end
it "returns a value private to self" do
@t.thread_variable_set :thread_variable_get_spec, 82
Thread.current.thread_variable_get(:thread_variable_get_spec).should be_nil
end
end
| 22.692308 | 79 | 0.705085 |
b9cc0cc17ffede21a0c6ba9a09ed70a9e2cdbc60 | 46,239 | # frozen_string_literal: true
require './spec/spec_helper'
require 'engine'
require 'engine/game/g_1846'
require 'engine/game/g_1889'
require 'engine/game/g_18_chesapeake'
require 'engine/phase'
require 'engine/round/operating'
require 'engine/action/place_token'
RSpec::Matchers.define :be_assigned_to do |expected|
match do |actual|
expected.assigned?(actual.id)
end
end
module Engine
describe Round::Operating do
let(:players) { %w[a b c] }
let(:game) { Game::G1889.new(players) }
let(:hex_j3) { game.hex_by_id('J3') }
let(:hex_j5) { game.hex_by_id('J5') }
let(:hex_k4) { game.hex_by_id('K4') }
let(:hex_k6) { game.hex_by_id('K6') }
let(:hex_k8) { game.hex_by_id('K8') }
let(:hex_l7) { game.hex_by_id('L7') }
let(:hex_e8) { game.hex_by_id('E8') }
let(:hex_f7) { game.hex_by_id('F7') }
let(:hex_f9) { game.hex_by_id('F9') }
let(:hex_g8) { game.hex_by_id('G8') }
let(:hex_g10) { game.hex_by_id('G10') }
let(:hex_g12) { game.hex_by_id('G12') }
let(:hex_g14) { game.hex_by_id('G14') }
let(:hex_h11) { game.hex_by_id('H11') }
let(:hex_h13) { game.hex_by_id('H13') }
let(:hex_i12) { game.hex_by_id('I12') }
let(:hex_c13) { game.hex_by_id('C13') }
let(:player) { game.players.first }
let(:player2) { game.players[1] }
subject { move_to_or! }
def next_round!
game.send(:next_round!)
game.round.setup
end
def move_to_or!
# Move the game into an OR
next_round! until game.round.is_a?(Round::Operating)
game.round
end
def goto_new_or!
next_round!
move_to_or!
end
def goto_train_step!
# skip past non train-buying actions
until subject.active_step.is_a?(Engine::Step::Train)
action = Action::Pass.new(subject.current_entity)
subject.process_action(action)
end
end
def fake_buy_train(train, corp)
corp.trains.slice!(2)
game.depot.remove_train(train)
corp.cash += train.price
game.phase.buying_train!(corp, train)
game.buy_train(corp, train, train.price)
end
def real_buy_depot_train(corporation, variant)
train = subject.active_step
.buyable_trains(corporation)
.find(&:from_depot?)
price = train.variants[variant][:price]
action = Action::BuyTrain.new(corporation, train: train, price: price, variant: variant)
subject.process_action(action)
end
def remove_trains_until!(train)
until (t = game.depot.depot_trains.first).name == train
game.depot.remove_train(t)
end
t
end
before :each do
game.stock_market.set_par(corporation, game.stock_market.par_prices[0])
corporation.cash = 100
corporation.owner = game.players.first
allow(corporation).to receive(:floated?) { true }
end
context '#1889' do
let(:corporation) { game.corporation_by_id('AR') }
let(:corporation2) { game.corporation_by_id('SR') }
let(:ehime) { game.company_by_id('ER') }
subject { move_to_or! }
before :each do
game.stock_market.set_par(corporation, game.stock_market.par_prices[0])
game.stock_market.set_par(corporation2, game.stock_market.par_prices[0])
next_round!
corporation.cash = 1000
corporation.owner = player
corporation2.cash = 1000
corporation2.owner = player
player.cash = 2000
player2.cash = 2000
# Make player 1 president of two companies, player 2 have the same amount of shares
4.times { game.share_pool.buy_shares(player, corporation.shares.first) }
5.times { game.share_pool.buy_shares(player2, corporation.shares.first) }
3.times { game.share_pool.buy_shares(player, corporation2.shares.first) }
4.times { game.share_pool.buy_shares(player2, corporation2.shares.first) }
player.cash = 2000
player2.cash = 2000
subject.process_action(Action::LayTile.new(corporation, tile: Tile.for('5'), hex: hex_k8, rotation: 3))
end
describe 'sellable_bundles' do
it 'should not return bundles that cause a president change' do
player.cash = 1
corporation.cash = 1
bundles = game.sellable_bundles(player, corporation)
# Player is president of corp 1, but cannot sell any shares without a president change
expect(bundles.size).to eq(0)
bundles = game.sellable_bundles(player, corporation2)
# Player is president of corp 2, but cannot sell any shares without a president change
expect(bundles.size).to eq(0)
end
end
describe 'buyable_trains' do
it 'returns 2 trains in the depot at start' do
available = subject.active_step.buyable_trains(corporation)
expect(available.size).to eq(1)
end
it 'returns a 2 train in the discard if discarded' do
train = subject.active_step.buyable_trains(corporation).first
fake_buy_train(train, corporation)
game.depot.reclaim_train(train)
available = subject.active_step.buyable_trains(corporation)
expect(available.size).to eq(2)
end
it 'returns trains owned by other corporations' do
train = subject.active_step.buyable_trains(corporation).first
fake_buy_train(train, corporation2)
available = subject.active_step.buyable_trains(corporation)
expect(available.size).to eq(2)
end
it 'returns only returns trains in the depot if the corp cannot afford and the player has sold shares' do
train = subject.active_step.buyable_trains(corporation).first
fake_buy_train(train, corporation2)
# Ensure we can sell shares.
game.share_pool.buy_shares(player, corporation2.shares.first)
corporation.cash = 1
player.cash = 1
bundle = game.bundles_for_corporation(player, corporation2).first
subject.process_action(Action::SellShares.new(
player,
shares: bundle.shares,
share_price: bundle.price_per_share,
percent: bundle.percent,
))
available = subject.active_step.buyable_trains(corporation)
expect(available.size).to eq(1)
end
it 'returns only returns cheapest trains available if the corp cannot afford any' do
while (train = subject.active_step.buyable_trains(corporation).first).name == '2'
game.depot.remove_train(train)
game.buy_train(corporation2, train, train.price)
end
game.depot.reclaim_train(corporation2.trains.first) while corporation2.trains.any?
available = subject.active_step.buyable_trains(corporation)
expect(available.size).to eq(2)
corporation.cash = 1
available = subject.active_step.buyable_trains(corporation)
expect(available.size).to eq(1)
end
describe 'buy_train' do
it 'allows purchasing with emergency funds if must buy' do
expect(subject.active_step.must_buy_train?(corporation)).to be true
corporation.cash = 1
train = subject.active_step.buyable_trains(corporation).first
subject.process_action(Action::BuyTrain.new(corporation, train: train, price: train.price))
end
it 'does not allow purchasing with emergency funds if no need to buy' do
train = subject.active_step.buyable_trains(corporation).first
subject.process_action(Action::BuyTrain.new(corporation, train: train, price: train.price))
expect(subject.active_step.must_buy_train?(corporation)).to be false
corporation.cash = 1
train = subject.active_step.buyable_trains(corporation).first
action = Action::BuyTrain.new(corporation, train: train, price: train.price)
expect { subject.process_action(action) }.to raise_error GameError
end
it 'causes a rust event when buying the first 4' do
train = subject.active_step.buyable_trains(corporation).first
subject.process_action(Action::BuyTrain.new(corporation, train: train, price: train.price))
expect(subject.active_step.must_buy_train?(corporation)).to be false
# Move to 4 trains to cause a rust event
while (train = subject.active_step.buyable_trains(corporation).first).name != '4'
fake_buy_train(train, corporation2)
end
corporation.cash = 1000
train = subject.active_step.buyable_trains(corporation).first
action = Action::BuyTrain.new(corporation, train: train, price: train.price)
subject.process_action(action)
expect(corporation.trains.size).to eq(1)
end
it 'does not allow purchasing with emergency funds if no need to buy even if it causes a rusting' do
train = subject.active_step.buyable_trains(corporation).first
subject.process_action(Action::BuyTrain.new(corporation, train: train, price: train.price))
expect(subject.active_step.must_buy_train?(corporation)).to be false
# Move to 4 trains to cause a rust event
while (train = subject.active_step.buyable_trains(corporation).first).name != '4'
fake_buy_train(train, corporation2)
end
train = subject.active_step.buyable_trains(corporation).first
game.buy_train(corporation2, train, train.price)
corporation.cash = 1
train = subject.active_step.buyable_trains(corporation).first
action = Action::BuyTrain.new(corporation, train: train, price: train.price)
fake_buy_train(train, corporation)
expect { subject.process_action(action) }.to raise_error GameError
end
it 'does not allow EMR purchasing diesel when it can afford a 6' do
# Allow diesels to be purchased
while (train = subject.active_step.buyable_trains(corporation).first).name != '6'
fake_buy_train(train, corporation2)
end
fake_buy_train(subject.active_step.buyable_trains(corporation).first, corporation2)
corporation.cash = subject.active_step.buyable_trains(corporation).first.price
train = subject.active_step.buyable_trains(corporation).find { |t| t.name == 'D' }
action = Action::BuyTrain.new(corporation, train: train, price: train.price)
fake_buy_train(train, corporation)
expect { subject.process_action(action) }.to raise_error GameError
end
describe 'bankruptcy' do
let(:corporation3) { game.corporation_by_id('TR') }
before :each do
# give corporation a route so that a train must be bought
hex = game.hex_by_id(corporation.coordinates)
tile = game.tile_by_id('6-0')
hex.lay(tile.rotate!(2))
game.stock_market.set_par(corporation3, game.stock_market.par_prices[0])
corporation3.cash = 1000
corporation3.ipoed = true
next_round! # get past turn 1 so shares are sellable
# skip past non train-buying actions
until game.active_step.is_a?(Engine::Step::Train)
action = Action::Pass.new(game.current_entity)
game.process_action(action)
end
end
it 'does not allow declaring bankruptcy when president has enough cash to buy a train' do
train = remove_trains_until!('6')
corporation.cash = train.price - 1
corporation.player.cash = 1
action = Action::Bankrupt.new(corporation)
expect { subject.process_action(action) }.to raise_error GameError, /Cannot go bankrupt/
end
it 'does not allow declaring bankruptcy when president has enough sellable shares to buy a train' do
# buy another share of corporation2 for some liquidity; other
# player has same number of shares and corporation2s cannot be
# dumped during 1889 EMR
game.share_pool.buy_shares(corporation.player, corporation2.shares.first)
# get to the right operating corporation
game.round.next_entity! until game.current_entity == corporation
# 6T, cost is $630
remove_trains_until!('6')
corporation.cash = 600
corporation.player.cash = 29
expect(game.liquidity(player, emergency: true)).to eq(119)
action = Action::Bankrupt.new(corporation)
expect { subject.process_action(action) }.to raise_error GameError, /Cannot go bankrupt/
end
it 'does allow declaring bankruptcy when president does not have enough liquidity to buy a train' do
# buy another share of corporation2 for some liquidity; other
# player has same number of shares and corporation2s cannot be
# dumped during 1889 EMR
game.share_pool.buy_shares(corporation.player, corporation2.shares.first)
# get to the right operating corporation
game.round.next_entity! until game.current_entity == corporation
# 6T, cost is $630
remove_trains_until!('6')
corporation.cash = 530
corporation.player.cash = 9
expect(game.liquidity(player, emergency: true)).to eq(99)
action = Action::Bankrupt.new(corporation)
subject.process_action(action)
expect(game.send(:bankruptcy_limit_reached?)).to be true
end
end
end
end
describe 'blocking for Ehime Railway' do
before :each do
ehime.owner = game.players[1]
game.phase.next!
end
it 'can lay a tile' do
expect(subject.active_step).to be_a Engine::Step::BuyTrain
expect(game.active_players).to eq([game.players[0]])
subject.process_action(
Action::BuyCompany.new(
corporation,
company: ehime,
price: 40,
)
)
expect(game.active_players).to eq([game.players[1]])
expect(subject.active_step).to be_a Engine::Step::G1889::SpecialTrack
action = Action::LayTile.new(ehime, tile: game.tile_by_id('14-0'), hex: game.hex_by_id('C4'), rotation: 1)
subject.process_action(action)
expect(subject.active_step).to be_a Engine::Step::BuyTrain
expect(game.active_players).to eq([game.players[0]])
expect(subject.active_entities).to eq([corporation])
end
it 'requires a pass action if not laying' do
expect(subject.active_step).to be_a Engine::Step::BuyTrain
train = subject.active_step.buyable_trains(corporation).first
expect(game.active_players).to eq([game.players[0]])
subject.process_action(
Action::BuyCompany.new(
corporation,
company: ehime,
price: 40,
)
)
expect(game.active_players).to eq([game.players[1]])
expect(subject.active_step).to be_a Engine::Step::G1889::SpecialTrack
action = Action::BuyTrain.new(corporation, train: train, price: train.price)
expect { subject.process_action(action) }.to raise_error(GameError)
action = Action::Pass.new(ehime)
subject.process_action(action)
expect(subject.active_step).to be_a Engine::Step::BuyTrain
expect(game.active_players).to eq([game.players[0]])
expect(subject.active_entities).to eq([corporation])
end
end
end
context '#18chesapeake' do
let(:game) { Game::G18Chesapeake.new(players) }
let(:corporation) { game.corporation_by_id('N&W') }
let(:corporation2) { game.corporation_by_id('PRR') }
subject { move_to_or! }
before :each do
game.stock_market.set_par(corporation, game.stock_market.par_prices[0])
game.stock_market.set_par(corporation2, game.stock_market.par_prices[0])
next_round!
corporation.cash = 1000
corporation.owner = player
corporation2.cash = 1000
corporation2.owner = player
player.cash = 2000
player2.cash = 2000
# Make player 1 president of two companies, player 2 have the same amount of shares
4.times { game.share_pool.buy_shares(player, corporation.shares.first) }
5.times { game.share_pool.buy_shares(player2, corporation.shares.first) }
3.times { game.share_pool.buy_shares(player, corporation2.shares.first) }
4.times { game.share_pool.buy_shares(player2, corporation2.shares.first) }
player.cash = 2000
player2.cash = 2000
subject.process_action(Action::LayTile.new(corporation, tile: Tile.for('57'), hex: hex_c13, rotation: 1))
end
describe 'sellable_bundles' do
it 'should return bundles that cause a president change' do
player.cash = 1
corporation.cash = 1
expect(subject.current_entity).to eq(corporation)
bundles = game.sellable_bundles(player, corporation)
# Player is president of corp 1, and it is the current corp
expect(bundles.size).to eq(0)
bundles = game.sellable_bundles(player, corporation2)
# Player is president of corp 2, selling shares will cause a president change
# Only one share can sell to raise the 80 yen needed for a 2 train
expect(bundles.size).to eq(1)
end
end
describe 'buyable_trains' do
it 'returns other corp trains if no shares are sold' do
train = subject.active_step.buyable_trains(corporation).first
fake_buy_train(train, corporation2)
corporation.cash = 1
player.cash = 1
available = subject.active_step.buyable_trains(corporation)
expect(available.size).to eq(2)
end
it 'returns other corp trains if sold shares does not exceed face value' do
train = subject.active_step.buyable_trains(corporation).first
fake_buy_train(train, corporation2)
corporation.cash = 1
player.cash = 1
bundle = game.bundles_for_corporation(player, corporation2).first
subject.process_action(Action::SellShares.new(
player,
shares: bundle.shares,
share_price: bundle.price_per_share,
percent: bundle.percent,
))
available = subject.active_step.buyable_trains(corporation)
expect(available.size).to eq(2)
end
it 'returns only depot trains if sold shares exceeds face value' do
train = subject.active_step.buyable_trains(corporation).first
fake_buy_train(train, corporation2)
while (train = subject.active_step.buyable_trains(corporation).first).name != '4'
fake_buy_train(train, corporation2)
end
corporation.cash = 1
player.cash = 1
bundle = game.bundles_for_corporation(player, corporation2)[3]
subject.process_action(Action::SellShares.new(
player,
shares: bundle.shares,
share_price: bundle.price_per_share,
percent: bundle.percent,
))
available = subject.active_step.buyable_trains(corporation)
expect(available.size).to eq(1)
end
end
describe 'buy_train' do
it 'does not allow EMR purchasing diesel when it can afford a 6' do
# Allow diesels to be purchased
while (train = subject.active_step.buyable_trains(corporation).first).name != '6'
fake_buy_train(train, corporation2)
end
fake_buy_train(subject.active_step.buyable_trains(corporation).first, corporation2)
corporation.cash = subject.active_step.buyable_trains(corporation).first.price
train = subject.active_step.buyable_trains(corporation).find { |t| t.name == 'D' }
action = Action::BuyTrain.new(corporation, train: train, price: train.price)
fake_buy_train(train, corporation)
expect { subject.process_action(action) }.to raise_error GameError
end
it 'allows purchasing another players train' do
fake_buy_train(subject.active_step.buyable_trains(corporation).first, corporation2)
corporation.cash = 1
train = corporation2.trains.first
player.cash = train.price
action = Action::BuyTrain.new(corporation, train: train, price: train.price)
subject.process_action(action)
end
it 'does not allow purchasing another players train for above price' do
fake_buy_train(subject.active_step.buyable_trains(corporation).first, corporation2)
corporation.cash = 1
train = corporation2.trains.first
player.cash = train.price
action = Action::BuyTrain.new(corporation, train: train, price: train.price + 1)
fake_buy_train(train, corporation)
expect { subject.process_action(action) }.to raise_error GameError
end
end
end
describe '#available_hex' do
context 'with awa' do
let(:corporation) { game.corporation_by_id('AR') }
it 'returns the layable hexes' do
hexes = {
hex_k8 => [1, 2, 3, 4],
}
hexes.each { |k, v| expect(subject.active_step.available_hex(corporation, k)).to eq(v) }
subject.process_action(Action::LayTile.new(corporation, tile: Tile.for('5'), hex: hex_k8, rotation: 3))
hexes = {
hex_k6 => [0],
hex_k8 => [1, 2, 3, 4],
hex_l7 => [1],
}
subject = goto_new_or!
hexes.each { |k, v| expect(subject.active_step.available_hex(corporation, k)).to eq(v) }
subject.process_action(Action::LayTile.new(corporation, tile: Tile.for('9'), hex: hex_k6, rotation: 0))
subject = goto_new_or!
hexes = {
hex_j3 => [5],
hex_j5 => [4],
hex_k4 => [0, 1, 2],
hex_k6 => [0, 3],
hex_k8 => [1, 2, 3, 4],
hex_l7 => [1],
}
hexes.each { |k, v| expect(subject.active_step.available_hex(corporation, k)).to eq(v) }
end
end
context 'with tse' do
let(:corporation) { game.corporation_by_id('TR') }
it 'can handle forks' do
subject.process_action(Action::LayTile.new(corporation, tile: Tile.for('58'), hex: hex_g10, rotation: 0))
goto_new_or!.process_action(Action::LayTile.new(corporation, tile: Tile.for('57'), hex: hex_g12, rotation: 0))
game.phase.next!
goto_new_or!.process_action(Action::LayTile.new(corporation, tile: Tile.for('15'), hex: hex_g12, rotation: 3))
goto_new_or!.process_action(Action::LayTile.new(corporation, tile: Tile.for('9'), hex: hex_h13, rotation: 1))
subject = goto_new_or!
hexes = {
hex_e8 => [5],
hex_f7 => [0],
hex_f9 => [2, 3, 4, 5],
hex_g8 => [1],
hex_g10 => [2, 0],
hex_g12 => [3, 4, 5, 0],
hex_g14 => [3, 4],
hex_h11 => [1],
hex_h13 => [2, 1, 4],
hex_i12 => [1],
}
hexes.each { |k, v| expect(subject.active_step.available_hex(corporation, k)).to eq(v) }
end
end
context 'with ko' do
let(:corporation) { game.corporation_by_id('KO') }
let(:company) { game.company_by_id('TR') }
let(:player) { game.player_by_id('a') }
it 'errors when upgrading K4 if Takumatsu is owned by player' do
company.owner = player
player.companies << company
action = Action::LayTile.new(corporation, tile: Tile.for('440'), hex: hex_k4, rotation: 0)
expect { subject.process_action(action) }.to raise_error(GameError)
end
it 'allows upgrading K4 if Takumatsu is owned by any corporation' do
company.owner = corporation
corporation.companies << company
game.phase.next!
subject.process_action(Action::LayTile.new(corporation, tile: Tile.for('440'), hex: hex_k4, rotation: 0))
end
end
end
context '1846' do
let(:players) { %w[a b c d e] }
let(:game) { Game::G1846.new(players) }
let(:corporation) { game.corporation_by_id('B&O') }
let(:corporation_1) { game.corporation_by_id('PRR') }
let(:big4) { game.minor_by_id('BIG4') }
let(:ms) { game.minor_by_id('MS') }
let(:hex_b8) { game.hex_by_id('B8') }
let(:hex_d14) { game.hex_by_id('D14') }
let(:hex_g19) { game.hex_by_id('G19') }
subject { move_to_or! }
before :each do
game.stock_market.set_par(corporation, game.stock_market.par_prices[0])
corporation.ipoed = true
corporation.cash = 80
bundle = ShareBundle.new(corporation.shares.first)
game.share_pool.transfer_shares(bundle, game.players.first)
game.stock_market.set_par(corporation_1, game.stock_market.par_prices[0])
corporation_1.ipoed = true
corporation_1.cash = 80
bundle = ShareBundle.new(corporation_1.shares.first)
game.share_pool.transfer_shares(bundle, game.players[1])
ms.owner = game.players[1]
big4.owner = game.players[2]
end
describe 'Steamboat Company' do
let(:company) { game.company_by_id('SC') }
before :each do
company.owner = game.players.first
allow(ms).to receive(:floated?) { true }
end
it 'handles full lifecycle of assigning to hexes and corporations' do
expect(company).not_to be_assigned_to(corporation)
expect(company).not_to be_assigned_to(corporation_1)
expect(company).not_to be_assigned_to(hex_d14)
expect(company).not_to be_assigned_to(hex_g19)
subject.process_action(Action::Assign.new(company, target: hex_d14))
expect(company).to be_assigned_to(hex_d14)
expect(company).not_to be_assigned_to(hex_g19)
action = Action::Assign.new(company, target: hex_g19)
expect { subject.process_action(action) }.to raise_error GameError
subject.process_action(Action::Assign.new(company, target: corporation))
expect(company).to be_assigned_to(corporation)
expect(company).not_to be_assigned_to(corporation_1)
action = Action::Assign.new(company, target: corporation_1)
expect { subject.process_action(action) }.to raise_error GameError
subject = goto_new_or!
subject.process_action(Action::Assign.new(company, target: ms))
expect(company).not_to be_assigned_to(corporation)
expect(company).not_to be_assigned_to(corporation_1)
expect(company).to be_assigned_to(ms)
subject = goto_new_or!
subject.process_action(Action::Assign.new(company, target: hex_g19))
expect(company).not_to be_assigned_to(hex_d14)
expect(company).to be_assigned_to(hex_g19)
subject.process_action(Action::Assign.new(company, target: corporation_1))
expect(company).not_to be_assigned_to(corporation)
expect(company).not_to be_assigned_to(ms)
expect(company).to be_assigned_to(corporation_1)
subject.process_action(
Action::BuyCompany.new(
corporation,
company: company,
price: 1,
)
)
expect(company).to be_assigned_to(corporation)
expect(company).to be_assigned_to(hex_g19)
expect(company).not_to be_assigned_to(corporation_1)
expect(company).not_to be_assigned_to(hex_d14)
action = Action::Assign.new(company, target: corporation)
expect { subject.process_action(action) }.to raise_error GameError
action = Action::Assign.new(company, target: corporation_1)
expect { subject.process_action(action) }.to raise_error GameError
subject.process_action(Action::Assign.new(company, target: hex_d14))
expect(company).to be_assigned_to(hex_d14)
expect(company).not_to be_assigned_to(hex_g19)
action = Action::Assign.new(company, target: hex_g19)
expect { subject.process_action(action) }.to raise_error GameError
subject = goto_new_or!
expect(company).to be_assigned_to(corporation)
expect(company).to be_assigned_to(hex_d14)
expect(company).not_to be_assigned_to(corporation_1)
expect(company).not_to be_assigned_to(hex_g19)
action = Action::Assign.new(company, target: corporation_1)
expect { subject.process_action(action) }.to raise_error GameError
subject.process_action(Action::Assign.new(company, target: hex_g19))
expect(company).to be_assigned_to(hex_g19)
end
end
describe 'C&WI' do
let(:company) { game.company_by_id('C&WI') }
let(:tile) { game.hex_by_id('D6').tile }
let(:city) { tile.cities[3] }
let(:cities) { tile.cities }
before :each do
company.owner = game.players.first
end
describe 'reservation' do
before :each do
expect(city.reservations).to eq([company])
end
it 'is removed if owned by a player when a 5 train is bought' do
goto_train_step!
train = remove_trains_until!('5')
corporation.cash = train.price
subject.process_action(
Action::BuyTrain.new(
corporation,
train: train,
price: train.price,
)
)
expect(city.reservations).to eq([])
end
it 'is removed when a corporation buys in the C&WI' do
subject.process_action(
Action::BuyCompany.new(
corporation,
company: company,
price: 1,
)
)
expect(city.reservations).to eq([])
end
end
describe 'token placement' do
before :each do
subject.process_action(
Action::BuyCompany.new(
corporation,
company: company,
price: 1,
)
)
end
describe 'can place' do
before :each do
expect(city.tokens).to eq([nil])
end
it 'on the yellow Chi tile, city 3' do
subject.process_action(Action::PlaceToken.new(company, city: city, slot: 0))
expect(city.tokens.map(&:corporation)).to eq([corporation])
end
it 'on the green Chi tile, city 3' do
expect(city.revenue.values.uniq).to eq([10])
game.hex_by_id('D6').lay(game.tile_by_id('298-0'))
city = game.hex_by_id('D6').tile.cities[3]
expect(city.revenue.values.uniq).to eq([40])
subject.process_action(Action::PlaceToken.new(company, city: city, slot: 0))
expect(city.tokens.map(&:corporation)).to eq([corporation])
end
end
describe 'cannot place' do
before :each do
expect(city.tokens).to eq([nil])
end
after :each do
expect(city.tokens).to eq([nil])
end
(0..2).each do |other_city|
it "on yellow Chi tile, city #{other_city}" do
action = Action::PlaceToken.new(company, city: cities[other_city], slot: 0)
expect { subject.process_action(action) }.to raise_error GameError, /can only place token on D6 city 3/
end
it "on green Chi tile, city #{other_city}" do
expect(city.revenue.values.uniq).to eq([10])
game.hex_by_id('D6').lay(game.tile_by_id('298-0'))
city = game.hex_by_id('D6').tile.cities[other_city]
expect(city.revenue.values.uniq).to eq([40])
action = Action::PlaceToken.new(company, city: city, slot: 0)
expect { subject.process_action(action) }.to raise_error GameError, /can only place token on D6 city 3/
end
end
end
end
end
describe 'issue_shares action' do
let(:tile) { game.hex_by_id('G19').tile }
let(:city) { tile.cities.first }
before :each do
corporation.cash = 0
end
it 'is an available until buy train step' do
game.buy_train(corporation, game.trains.first, :free)
city.place_token(corporation, corporation.tokens.first, free: true)
next_round!
expect(subject.actions_for(corporation)).to include('sell_shares')
expect(game.issuable_shares(corporation).size).to eq(2)
# Pass on tile lay and place token step
subject.process_action(Action::Pass.new(corporation))
expect(subject.actions_for(corporation)).to include('sell_shares')
# Run route step
action = game.action_from_h('type' => 'run_routes',
'entity' => 'B&O',
'entity_type' => 'corporation',
'routes' => [{ 'train' => '2-0', 'connections' => [%w[H20 G19]] }])
subject.process_action(action)
expect(subject.actions_for(corporation)).to include('sell_shares')
# Dividend step
corporation.cash += 80
subject.process_action(Action::Dividend.new(corporation, kind: 'payout'))
expect(subject.actions_for(corporation)).not_to include('sell_shares')
# Pass on buy train step
subject.process_action(Action::Pass.new(corporation))
expect(subject.actions_for(corporation)).not_to include('sell_shares')
end
it 'provides the correct amount of cash' do
step = subject.step_for(corporation, 'sell_shares')
expect(step.issuable_shares(corporation)[0].price).to eq(137)
expect(step.issuable_shares(corporation)[1].price).to eq(274)
action = Action::SellShares.new(corporation, shares: corporation.shares[1], share_price: 135, percent: 10)
subject.process_action(action)
expect(corporation.cash).to eq(135)
expect(game.share_pool.num_shares_of(corporation)).to eq(1)
expect(corporation.num_shares_of(corporation)).to eq(7)
end
it 'is no longer available after issuing' do
action = Action::SellShares.new(corporation, shares: corporation.shares.first, share_price: 135, percent: 10)
subject.process_action(action)
expect(subject.actions_for(corporation)).not_to include('sell_shares')
end
it 'causes the track and token step to block when cash is 0' do
expect(subject.actions_for(corporation)).to include('lay_tile')
expect(subject.actions_for(corporation)).to include('place_token')
end
it 'is not available if no shares to issue' do
bundle = ShareBundle.new(corporation.shares.first(4))
game.share_pool.transfer_shares(bundle, game.players[0])
bundle = ShareBundle.new(corporation.shares)
game.share_pool.transfer_shares(bundle, game.players[1])
expect(subject.actions_for(corporation)).not_to include('sell_shares')
end
it 'is not available if no additional shares can be in the bank pool' do
bundle = ShareBundle.new(corporation.shares.first(2))
game.share_pool.transfer_shares(bundle, game.share_pool)
expect(subject.actions_for(corporation)).not_to include('sell_shares')
end
end
describe 'redeem_shares action' do
let(:tile) { game.hex_by_id('G19').tile }
let(:city) { tile.cities.first }
before :each do
corporation.cash = 330
bundle = ShareBundle.new(corporation.shares.first(2))
game.share_pool.transfer_shares(bundle, game.share_pool)
end
it 'is an available until buy train step' do
game.buy_train(corporation, game.trains.first, :free)
city.place_token(corporation, corporation.tokens.first, free: true)
next_round!
expect(subject.actions_for(corporation)).to include('buy_shares')
expect(game.redeemable_shares(corporation).size).to eq(2)
# Pass on tile lay and place token step
subject.process_action(Action::Pass.new(corporation))
expect(subject.actions_for(corporation)).to include('buy_shares')
# Run route sstep
action = game.action_from_h('type' => 'run_routes',
'entity' => 'B&O',
'entity_type' => 'corporation',
'routes' => [{ 'train' => '2-0', 'connections' => [%w[H20 G19]] }])
subject.process_action(action)
expect(subject.actions_for(corporation)).to include('buy_shares')
# Dividend step
subject.process_action(Action::Dividend.new(corporation, kind: 'payout'))
corporation.cash += 80
expect(subject.actions_for(corporation)).not_to include('buy_shares')
# Pass on buy train step
subject.process_action(Action::Pass.new(corporation))
expect(subject.actions_for(corporation)).not_to include('buy_shares')
end
it 'costs the correct amount of cash' do
step = subject.step_for(corporation, 'buy_shares')
expect(step.redeemable_shares(corporation).map(&:price)).to include(165, 330)
action = Action::BuyShares.new(corporation,
shares: game.share_pool.shares_of(corporation).first,
share_price: 165,
percent: 10)
subject.process_action(action)
expect(corporation.cash).to eq(165)
expect(game.share_pool.num_shares_of(corporation)).to eq(1)
expect(corporation.num_shares_of(corporation)).to eq(7)
end
it 'is no longer available after redeeming' do
action = Action::BuyShares.new(corporation,
shares: game.share_pool.shares_of(corporation).first,
share_price: 165,
percent: 10)
subject.process_action(action)
expect(subject.actions_for(corporation)).not_to include('buy_shares')
end
it 'is not available if no shares to redeem' do
bundle = ShareBundle.new(game.share_pool.shares_of(corporation))
game.share_pool.transfer_shares(bundle, corporation)
expect(subject.actions_for(corporation)).not_to include('buy_shares')
expect(game.redeemable_shares(corporation).size).to eq(0)
end
end
describe 'buy_train' do
before :each do
goto_train_step!
# Allow 7/8 to be purchased
while (train = subject.active_step.buyable_trains(corporation).first).name != '6'
fake_buy_train(train, corporation_1)
end
fake_buy_train(subject.active_step.buyable_trains(corporation).first, corporation_1)
# enough cash for a 6
corporation.cash = subject.active_step.buyable_trains(corporation).first.price
end
describe 'corporation can afford a 6' do
before :each do
corporation.cash = 800
end
it 'does not allow president contributing cash to purchase a 7/8' do
# only buyable variant is 6
train = subject.active_step
.buyable_trains(corporation)
.find(&:from_depot?)
expect(subject.active_step.buyable_train_variants(train, corporation)).to eq([train.variants['6']])
expect(corporation.cash).to eq(800)
expect(corporation.trains).to be_empty
# buying it raises error
expect { real_buy_depot_train(corporation, '7/8') }.to raise_error GameError, 'Not a buyable train'
end
it 'does allow the corporation to emergency issue shares to purchase a 7/8' do
bundle = game.emergency_issuable_bundles(corporation).first
subject.process_action(Action::SellShares.new(
corporation,
shares: bundle.shares,
share_price: bundle.price_per_share,
percent: bundle.percent,
))
expect(corporation.cash).to eq(912)
buyable_depot_trains = subject.active_step.buyable_trains(corporation).select(&:from_depot?)
expect(buyable_depot_trains.size).to eq(1)
real_buy_depot_train(corporation, '7/8')
expect(corporation.cash).to eq(12)
expect(corporation.trains.map(&:name)).to eq(%w[7/8])
end
end
describe 'corporation cannot afford a 6' do
before :each do
corporation.cash = 799
end
describe 'has shares to issue' do
describe 'with stock price of 112' do
before :each do
game.stock_market.set_par(corporation, game.stock_market.par_prices[3])
end
it 'can issue one share to buy a 6 and not a 7/8' do
bundle = game.emergency_issuable_bundles(corporation).first
subject.process_action(Action::SellShares.new(
corporation,
shares: bundle.shares,
share_price: bundle.price_per_share,
percent: bundle.percent,
))
expect { real_buy_depot_train(corporation, '7/8') }.to raise_error GameError, 'Not a buyable train'
real_buy_depot_train(corporation, '6')
expect(corporation.trains.map(&:name)).to eq(%w[6])
end
it 'can issue two shares to buy a 7/8 and not a 6' do
bundles = game.emergency_issuable_bundles(corporation)
bundle = bundles[1]
subject.process_action(Action::SellShares.new(
corporation,
shares: bundle.shares,
share_price: bundle.price_per_share,
percent: bundle.percent,
))
expect { real_buy_depot_train(corporation, '6') }.to raise_error GameError, 'Not a buyable train'
real_buy_depot_train(corporation, '7/8')
expect(corporation.trains.map(&:name)).to eq(%w[7/8])
end
end
it 'does not allow president contributing cash to purchase a 7/8' do
expect { real_buy_depot_train(corporation, '7/8') }.to raise_error GameError, 'Not a buyable train'
end
end
describe 'no shares to issue' do
before :each do
# add shares to the pool so the corp may not issue any
bundle = ShareBundle.new(corporation.shares.slice(0..2))
game.share_pool.transfer_shares(bundle, game.share_pool)
expect(game.emergency_issuable_bundles(corporation)).to be_empty
end
it 'allows president contributing cash to purchase a 7/8' do
initial_president_cash = corporation.owner.cash
expect(corporation.cash).to eq(799)
expect(corporation.trains).to be_empty
real_buy_depot_train(corporation, '7/8')
expect(corporation.cash).to eq(0)
expect(corporation.trains.map(&:name)).to eq(%w[7/8])
expect(corporation.owner.cash).to eq(initial_president_cash - 101)
end
it 'allows president selling shares to purchase a 7/8 even if a 6 is affordable '\
'with the presidential cash' do
player = corporation.owner
player.cash = 1
# give the president a 3rd share that they can sell
bundle = ShareBundle.new(corporation.shares[0])
game.share_pool.transfer_shares(bundle, player)
bundle = game.bundles_for_corporation(player, corporation).first
subject.process_action(Action::SellShares.new(
player,
shares: bundle.shares,
share_price: bundle.price_per_share,
percent: bundle.percent,
))
expect(player.cash).to eq(138)
expect(corporation.cash).to eq(799)
buyable_depot_trains = subject.active_step.buyable_trains(corporation).select(&:from_depot?)
expect(buyable_depot_trains.size).to eq(1)
real_buy_depot_train(corporation, '7/8')
expect(player.cash).to eq(37)
expect(corporation.cash).to eq(0)
expect(corporation.trains.map(&:name)).to eq(%w[7/8])
end
end
end
end
end
end
end
| 39.792599 | 120 | 0.605333 |
110857cc36e29d280f7e496eb77cd94606e1f5f6 | 169 | class AddDiscardedAtToThings < ActiveRecord::Migration[6.1]
def change
add_column :things, :discarded_at, :datetime
add_index :things, :discarded_at
end
end
| 24.142857 | 59 | 0.757396 |
79f56907282024f06d150a78ef21b7f13fb47158 | 684 | require 'spec_helper'
describe Monza::RenewalInfo do
context 'pending renewal info' do
let(:response) { JSON.parse File.open("#{Dir.pwd}/spec/response.json", 'rb').read }
let(:renewal_info) { described_class.new(response['pending_renewal_info'].first) }
it { expect(renewal_info.product_id).to eq "product_id" }
it { expect(renewal_info.original_transaction_id).to eq "1000000218147500" }
it { expect(renewal_info.expiration_intent).to eq "1" }
it { expect(renewal_info.will_renew).to eq false }
it { expect(renewal_info.is_in_billing_retry_period).to eq false }
it { expect(renewal_info.auto_renew_product_id).to eq "renew_product_id" }
end
end
| 42.75 | 87 | 0.73538 |
e9d6818d4078085ba293aaf1c69c1c4a2568813b | 1,912 | module Support
module ScanMatcher
extend RSpec::Matchers::DSL
def scan(pattern, **options)
give_scan_result(:scan, pattern, **options)
end
def check(pattern, **options)
give_scan_result(:check, pattern, **options)
end
def scan_until(pattern, **options)
give_scan_result(:scan_until, pattern, **options)
end
def check_until(pattern, **options)
give_scan_result(:check_until, pattern, **options)
end
matcher :give_scan_result do |method_name, pattern, **options|
def result_expectations
@result_expectations ||= []
end
def expect_result(description, expected, &block)
result_expectations << Proc.new do |result|
if !block.call(result)
"expected %p to %s %p matching %s" % [ result.scanner, method_name, pattern, description ]
end
end
end
match do |scanner|
scanned = scanner.public_send(method_name, pattern, **options)
scanned and result_expectations.all? { |e| !e.call(scanned) }
end
chain(:matching_substring) do |substring|
expected_result("the substring %p" % substring) { |r| r.to_s == substring }
end
chain(:matching_length) do |length|
expected_result("%d characters" % length) { |r| r.length == length }
end
chain(:matching_params) do |params|
expected_result("with params %p" % [params]) { |r| r.params == params }
end
failure_message do |scanner|
if scanned = scanner.public_send(method_name, pattern, **options)
message = result_expectations.inject(nil) { |m,e| m || e.call(scanned) }
end
message || "expected %p to %s %p" % [ scanner, method_name, pattern ]
end
failure_message_when_negated do |scanner|
"expected %p not to %s %p" % [ scanner, method_name, pattern ]
end
end
end
end | 30.349206 | 102 | 0.623431 |
1aeb24d1e9ed4f2da79a1b643a5e8bddad315d09 | 4,423 | Rails.application.configure do
# Settings specified here will take precedence over those in
# config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_files = true
# Compress JavaScripts and CSS.
# config.assets.js_compressor = Uglifier.new(mangle: false)
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0.4'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and
# use secure cookies.
if ENV['SSL_ON'] != 'false'
config.force_ssl = true
config.ssl_options = { exclude: proc { |env| env['PATH_INFO'] == "/ping" } }
end
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset
# server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder
# are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to
# raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.smtp_settings = {
address: 'smtp.sendgrid.net',
port: '587',
authentication: :plain,
user_name: ENV['SENDGRID_USERNAME'],
password: ENV['SENDGRID_PASSWORD'],
domain: ENV['SENDGRID_DOMAIN'] || 'people-finder.dsd.io',
enable_starttls_auto: true
}
config.filter_parameters += [
:given_name, :surname, :email, :primary_phone_number,
:secondary_phone_number, :location, :email
]
if ENV['INTERCEPTED_EMAIL_RECIPIENT'].present?
Mail.register_interceptor RecipientInterceptor.new(
ENV['INTERCEPTED_EMAIL_RECIPIENT'],
subject_prefix: '[STAGING]'
)
end
# Logstasher config
config.logstasher.enabled = true
config.logstasher.suppress_app_log = true
config.logstasher.log_level = Logger::INFO
config.logstasher.logger_path =
"#{Rails.root}/log/logstash_#{Rails.env}.json"
config.logstasher.source = 'logstasher'
end
| 35.669355 | 80 | 0.738865 |
f73f27140aceea867b413834cd68e44a262645b1 | 14,578 | require 'spec_helper'
describe 'cis_hardening::logaudit::accounting' do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts) { os_facts }
# Check for default class
it { is_expected.to contain_class('cis_hardening::logaudit::accounting') }
# Ensure Auditing is enabled - Section 4.1.1
# Ensure that auditd is installed - Section 4.1.1.1
it {
is_expected.to contain_package('auditd').with(
'ensure' => 'present',
)
}
it {
is_expected.to contain_package('audit-libs').with(
'ensure' => 'present',
)
}
# Ensure that Exec to notify from auditd rules changes - Section 4.1.1.2
it {
is_expected.to contain_service('auditd').with(
'ensure' => 'running',
'enable' => true,
'hasstatus' => true
'hasrestart' =>
)
}
it {
is_expected.to contain_exec('restart_auditd').with(
'path' => '/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin',
'command' => '/bin/systemctl restart auditd',
)
}
# Ensure that Ensure audit log storage size is configured - Section 4.1.1.1
it {
is_expected.to contain_file_line('set_auditd_logfile_size').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'max_log_file = 1024',
'match' => '^max_log_file\ \=',
).that_notifies('Exec[restart_auditd]')
}
# Ensure that system is disabled when audit logs are full - Section 4.1.1.2
it {
is_expected.to contain_file_line('full_logfile_notify_action').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'space_left_action = email',
'match' => '^space_left_action\ \=',
).that_notifies('Exec[restart_auditd]')
}
it {
is_expected.to contain_file_line('set_action_mail_account').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'action_mail_acct = root',
'match' => '^action_mail_acct\ \=',
).that_notifies('Exec[restart_auditd]')
}
it {
is_expected.to contain_file_line('set_admin_space_left_action').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'admin_space_left_action = SYSLOG',
'match' => '^admin_space_left_action\ \=',
).that_notifies('Exec[restart_auditd]')
}
# Ensure that Ensure audit logs are not automatically deleted - Section 4.1.1.3
it {
is_expected.to contain_file_line('set_max_logfile_action').with(
'ensure' => 'present',
'path' => '/etc/audit/auditd.conf',
'line' => 'max_log_file_action = keep_logs',
'match' => '^max_log_file_action\ \=',
)
}
# Ensure that Ensure auditd service is enabled - Section 4.1.2
it {
is_expected.to contain_service('auditd').with(
'ensure' => 'running',
'enable' => true,
'hasstatus' => true,
'hasrestart' => true,
)
}
# Ensure that Ensure defaults directory is present for grub settings - Section 4.1.3 prerequisites
it {
is_expected.to contain_file('/etc/default').with(
'ensure' => 'directory',
'owner' => 'root',
'group' => 'root',
'mode' => '0755',
)
}
it {
is_expected.to contain_file('/etc/default/grub').with(
'ensure' => 'file',
'owner' => 'root',
'group' => 'root',
'mode' => '0644',
).that_requires('File[/etc/default]')
}
# Ensure that Ensure auditing for processes that start prior to auditd is enabled - Section 4.1.3
it {
is_expected.to contain_file_line('pre_auditd_settings').with(
'ensure' => 'present',
'path' => '/etc/default/grub',
'line' => 'GRUB_CMDLINE_LINUX="audit=1"',
).that_requires('File[/etc/default/grub]')
}
# Ensure that Ensure events that modify date and time information are collected - Section 4.1.4
it {
is_expected.to contain_file_line('time_change_64bit_item1').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S adjtimex -S settimeofday -k time-change',
)
}
it {
is_expected.to contain_file_line('time_change_64bit_item2').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S clock_settime -k time-change',
)
}
it {
is_expected.to contain_file_line('time_change_64bit_item3').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/localtime -p wa -k time-change',
)
}
# Ensure that Ensure events that modify user/group information are collected - Section 4.1.5
it {
is_expected.to contain_file_line('ownerchange_group').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/group -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_passwd').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/passwd -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_gshadow').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/gshadow -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_shadow').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/shadow -p wa -k identity',
)
}
it {
is_expected.to contain_file_line('ownerchange_opasswd').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/security/opasswd -p wa -k identity',
)
}
# Ensure that Ensure events that modify the system's network environment are collected - Section 4.1.6
it {
is_expected.to contain_file_line('network_namechanges').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S sethostname -S setdomainname -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_issue').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/issue -p wa -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_issuedotnet').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/issue.net -p wa -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_network').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sysconfig/network -p wa -k system-locale',
)
}
it {
is_expected.to contain_file_line('network_networkscripts').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sysconfig/network-scripts/ -p wa -k system-locale',
)
}
# Ensure that Ensure events that modify the system's Mandatory Access Controls are collected - Section 4.1.7
it {
is_expected.to contain_file_line('macpolicy_selinux').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/selinux/ -p wa -k MAC-policy',
)
}
it {
is_expected.to contain_file_line('macpolicy_selinuxshare').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /usr/share/selinux/ -p wa -k MAC-policy',
)
}
# Ensure that Ensure login and logout events are collected - Section 4.1.8
it {
is_expected.to contain_file_line('lastlogin').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/log/lastlog -p wa -k logins',
)
}
it {
is_expected.to contain_file_line('faillock').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/faillock/ -p wa -k logins',
)
}
# Ensure that Ensure session initiation information is collected - Section 4.1.9
it {
is_expected.to contain_file_line('utmp_entry').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/utmp -p wa -k session',
)
}
it {
is_expected.to contain_file_line('wtmp_entry').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/wtmp -p wa -k logins',
)
}
it {
is_expected.to contain_file_line('btmp_entry').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/run/btmp -p wa -k logins',
)
}
# Ensure that Ensure discretionary access control permission modification events are collected - Section 4.1.10
it {
is_expected.to contain_file_line('chmod_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S chmod -S fchmod -S fchmodat -F auid>=1000 -F auid!=4294967295 -k perm_mod',
)
}
it {
is_expected.to contain_file_line('chown_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S chown -S fchown -S fchownat -S lchown -F auid>=1000 -F auid!=4294967295 -k perm_mod',
)
}
it {
is_expected.to contain_file_line('xattr_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S setxattr -S lsetxattr -S fsetxattr -S removexattr -S lremovexattr -S fremovexattr -F auid>=1000 -F auid!=4294967295 -k perm_mod',
)
}
# Ensure that Ensure unsuccessful unauthorized file access attempts are collected - Section 4.1.11
it {
is_expected.to contain_file_line('file_truncate').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S creat -S open -S openat -S truncate -S ftruncate -F exit=-EACCES -F auid>=1000 -F auid!=4294967295 -k access',
)
}
# Ensure that Ensure use of privileged commands is collected - Section 4.1.12 **unused**
# Ensure that Ensure succesful filesystem mounts are collected - Section 4.1.13
it {
is_expected.to contain_file_line('mount_cmds').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S mount -F auid>=1000 -F auid!=4294967295 -k mounts',
)
}
# Ensure that Ensure file deletion events by users are captured - Section 4.1.14
it {
is_expected.to contain_file_line('file_deletions').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S unlink -S unlinkat -S rename -S renameat -F auid>=1000 -F auid!=4294967295 -k delete',
)
}
# Ensure that Ensure changes to system administration scope (sudoers) is collected - Section 4.1.15
it {
is_expected.to contain_file_line('sudoers_file').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sudoers -p wa -k scope',
)
}
it {
is_expected.to contain_file_line('sudoers_dir').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /etc/sudoers.d/ -p wa -k scope',
)
}
# Ensure that Ensure system administrator actions (sudolog) are collected - Section 4.1.16
it {
is_expected.to contain_file_line('sudolog').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /var/log/sudo.log -p wa -k actions',
)
}
# Ensure that Ensure Kernel module loading and unloading are collected - Section 4.1.17
it {
is_expected.to contain_file_line('check_insmod').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /sbin/insmod -p x -k modules',
)
}
it {
is_expected.to contain_file_line('check_rmmod').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /sbin/rmmod -p x -k modules',
)
}
it {
is_expected.to contain_file_line('check_modprobe').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-w /sbin/modprobe -p x -k modules',
)
}
it {
is_expected.to contain_file_line('check_modulestate').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-a always,exit -F arch=b64 -S init_module -S delete_module -k modules',
)
}
it {
is_expected.to contain_file_line('make_auditd_immutable').with(
'ensure' => 'present',
'path' => '/etc/audit/audit.rules',
'line' => '-e 2',
'match' => '^-e\ ',
'append_on_no_match' => true,
)
}
# Ensure manifest compiles with all dependencies
it {
is_expected.to compile.with_all_deps
}
end
end
end
| 34.301176 | 182 | 0.534367 |
e83efa66ea6990f219d33c46485307966152f491 | 1,039 | @DATA_PIN = 2
@RATCH_PIN = 3
@CLOCK_PIN = 4
class SN74HC595N
attr_accessor :data_pin, :ratch_pin, :clock_pin, :mode
def initialize(data_pin, ratch_pin, clock_pin)
@data_pin = data_pin
@ratch_pin = ratch_pin
@clock_pin = clock_pin
@mode = MSBFIRST
# ピンを初期化
[@data_pin, @ratch_pin, @clock_pin].each do |p|
pinMode(p, OUTPUT)
digitalWrite(p, 0)
end
end
def output(value)
# ラッチをHIGHにすると書き込める
digitalWrite(@ratch_pin, 1)
# 書き込み
shiftOut(@data_pin, @clock_pin, @mode, value)
# ラッチをLOWに戻す
digitalWrite(@ratch_pin, 0)
end
def clear
output(0x00)
end
end
sr = SN74HC595N.new(@DATA_PIN, @RATCH_PIN, @CLOCK_PIN)
sr.clear
256.times do |i|
sr.output(i)
delay(50)
end
# シフトレジスタなので次のデータを入れないと最後のデータが出力されない
sr.clear()
delay(250)
# MSB -> LSB
sr.mode = LSBFIRST
256.times do |i|
sr.output(i)
delay(50)
end
# シフトレジスタなので次のデータを入れないと最後のデータが出力されない
sr.clear()
delay(100) | 17.913793 | 58 | 0.615014 |
38777c516f2f488cfaa95617d2ceefbd02315716 | 560 | # frozen_string_literal: true
module Ouranos
module Jobs
# A job for unlocking the environment
class EnvironmentLock
@queue = :locks
def self.perform(lock_params)
lock_params.symbolize_keys!
locker = EnvironmentLocker.new(lock_params)
locker.lock!
status = ::Deployment::Status.new(lock_params[:name_with_owner], lock_params[:deployment_id])
status.description = "#{locker.name_with_owner} locked on #{locker.environment} by #{locker.actor}"
status.success!
end
end
end
end
| 25.454545 | 107 | 0.680357 |
334bf10c72cd68d3892810e9a60e475adc0c50c7 | 3,494 | class Api::V0::ApiController < ApplicationController
# Returns the top most-cited violations.
#
# @param params [Hash]
# @option params [Hash] limit A positive integer number of records to return. Corresponds to a SQL LIMIT clause value. Default is 50.
#
# @example [{"violation_id":"1", "violation_code":"40-6-20", "violation_description":"FAIL TO OBEY TRAF CTRL DEVICE", "citation_count":"3469"},{"violation_id":"9", "violation_code":"40-2-8", "violation_description":"NO TAG/ NO DECAL", "citation_count":"2515"},{"violation_id":"11", "violation_code":"40-8-76.1", "violation_description":"SAFETY BELT VIOLATION", "citation_count":"1960"}]
#
def top_violations
default_limit = 50
limit = params[:limit].try(:to_i) || default_limit # block sql-injection by converting (malicious) strings to zeros ...
limit = default_limit if limit <= 0
query_string =<<-SQL
SELECT
v.id AS violation_id
,v.code AS violation_code
,v.description AS violation_description
,count(DISTINCT cv.citation_guid) AS citation_count
FROM atlanta_violations v
JOIN atlanta_citation_violations cv ON v.code = cv.violation_code
GROUP BY 1,2,3
ORDER BY citation_count DESC
LIMIT #{limit};
SQL
query_results = ActiveRecord::Base.connection.execute(query_string)
@violations = query_results.to_a
respond_to do |format|
format.json { render json: @violations }
end
end
# Returns a histogram of defendant counts per each citation count.
#
# @param params [Hash]
# @option params [Hash] limit A positive integer number of records to return. Corresponds to a SQL LIMIT clause value. Default is 6.
#
# @example [{"citation_count":"1","defendant_count":"130706"},{"citation_count":"2","defendant_count":"29159"},{"citation_count":"3","defendant_count":"8987"},{"citation_count":"4","defendant_count":"3509"},{"citation_count":"5","defendant_count":"1409"},{"citation_count":"6","defendant_count":"678"},{"citation_count":"7","defendant_count":"314"},{"citation_count":"8","defendant_count":"170"},{"citation_count":"9","defendant_count":"86"},{"citation_count":"10","defendant_count":"61"},{"citation_count":"11","defendant_count":"31"},{"citation_count":"12","defendant_count":"19"},{"citation_count":"13","defendant_count":"14"},{"citation_count":"14","defendant_count":"5"},{"citation_count":"15","defendant_count":"6"},{"citation_count":"16","defendant_count":"2"},{"citation_count":"17","defendant_count":"2"},{"citation_count":"18","defendant_count":"4"},{"citation_count":"19","defendant_count":"4"},{"citation_count":"21","defendant_count":"1"}]
#
def defendant_citation_distribution
default_limit = 6
limit = params[:limit].try(:to_i) || default_limit # block sql-injection by converting (malicious) strings to zeros ...
limit = default_limit if limit <= 0
sql_string = <<-SQL
SELECT
zz.citation_count
,count(distinct defendant_full_name) as defendant_count
FROM (
SELECT
c.defendant_full_name
,count(DISTINCT c.guid) AS citation_count
FROM atlanta_citations c
GROUP BY 1
ORDER BY 2 DESC
) zz
GROUP BY 1
ORDER BY 1
LIMIT #{limit};
SQL
result = ActiveRecord::Base.connection.execute(sql_string)
@citation_distributions = result.to_a
respond_to do |format|
format.json { render json: @citation_distributions }
end
end
end
| 44.794872 | 954 | 0.684888 |
26852cf599dfea46326496d2165a2fb852f706e9 | 202 | ActiveModelSerializers.config.tap do |c|
# c.adapter = :json_api //introduces too much uneeded info in the JSON object
# c.jsonapi_include_toplevel_object = true
# c.jsonapi_version = "1.0"
end
| 33.666667 | 81 | 0.742574 |
e2f53d72400a0c7bee36836f00cb7b9ab822377c | 42 | module Targetdata
VERSION = "0.3.1"
end
| 10.5 | 19 | 0.690476 |
38c0192e67579d592df0889753b714b23897ef89 | 1,584 | class ArticleCoverageCurated < Source
# include common methods for Article Coverage
include Coverable
def get_related_works(result, work)
Array(result.fetch('referrals', nil)).map do |item|
timestamp = get_iso8601_from_time(item.fetch('published_on', nil))
type = item.fetch("type", nil)
type = MEDIACURATION_TYPE_TRANSLATIONS.fetch(type, nil) if type
{
"author" => nil,
"title" => item.fetch("title", ""),
"container-title" => item.fetch("publication", ""),
"issued" => get_date_parts(timestamp),
"timestamp" => timestamp,
"URL" => item.fetch('referral', nil),
"type" => type,
"tracked" => true,
"related_works" => [{ "related_work" => work.pid,
"source" => name,
"relation_type" => "discusses" }] }
end
end
def get_extra(result)
Array(result.fetch('referrals', nil)).map do |item|
event_time = get_iso8601_from_time(item['published_on'])
url = item['referral']
type = item.fetch("type", nil)
type = MEDIACURATION_TYPE_TRANSLATIONS.fetch(type, nil) if type
{ event: item,
event_time: event_time,
event_url: url,
# the rest is CSL (citation style language)
event_csl: {
'author' => '',
'title' => item.fetch('title') { '' },
'container-title' => item.fetch('publication') { '' },
'issued' => get_date_parts(event_time),
'url' => url,
'type' => type }
}
end
end
end
| 32.326531 | 72 | 0.559975 |
0157802703194d67f629b1988d3a6b523ddca676 | 403 | set :host, "144.76.4.166"
set :stage, :production
set :env, :production
set :user, "rbdev"
set :deploy_to, "/home/#{fetch :user}/#{fetch :application}"
set :branch, "master"
set :rvm_type, :user
set :rvm_ruby_version, '2.1.3'
server fetch(:host), user: fetch(:user), roles: %w{web app db}, primary: true
set :ssh_options, {
forward_agent: true,
auth_methods: %w(publickey)
}
| 22.388889 | 77 | 0.647643 |
1a3b2087569a3bf404fcb8f1e81dae344332c5e8 | 1,088 | # frozen_string_literal: true
RSpec.describe Codebreaker::Game do
let(:game_with_invalid_user) { described_class.new 228, Codebreaker::Difficulty.new('hell') }
let(:game_with_invalid_difficulty) { described_class.new Codebreaker::User.new('Daniil'), 'hell' }
describe '#valid?' do
context 'when first passed parameter is not an instance of User' do
it 'returns false' do
expect(game_with_invalid_user.valid?).to eq false
end
it 'adds ExpectedUserInstanceError to errors' do
game_with_invalid_user.valid?
expect(game_with_invalid_user.errors).to include Codebreaker::ExpectedUserInstanceError
end
end
context 'when second passed parameter is not an instance of Difficulty' do
it 'returns false' do
expect(game_with_invalid_difficulty.valid?).to eq false
end
it 'adds ExpectedDifficultyInstanceError to errors' do
game_with_invalid_difficulty.valid?
expect(game_with_invalid_difficulty.errors).to include Codebreaker::ExpectedDifficultyInstanceError
end
end
end
end
| 36.266667 | 107 | 0.735294 |
26117cb4185a368ad4bdde5d522fc6f8cdc0f9fb | 3,915 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV["RAILS_SERVE_STATIC_FILES"].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "bg-lib_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.210526 | 102 | 0.761941 |
f7d2ad62cd7281a873521ce684a9e0104e4f11c4 | 71 | require "bundler"
Bundler.require
require_all './lib/beer_o_clock_cli' | 17.75 | 36 | 0.816901 |
ff05d991456d673f42e956618092ef4b4242f0dd | 222 | # Load the Rails application.
require File.expand_path('../application', __FILE__)
require "jquery-rails"
require 'coffee-rails'
require 'bootstrap-sass'
# Initialize the Rails application.
Rails.application.initialize!
| 22.2 | 52 | 0.788288 |
4a1eb58134587781d3045d437c9062f70224bfe1 | 919 | # frozen_string_literal: true
require 'test_helper'
class TestSelector::HTMLHelperTest < ActionDispatch::IntegrationTest
test 'the test method should return the right input in html' do
get '/some/path'
puts response.body
assert_includes response.body, '<span test-selector="_app_views_some_path__partial_html_erb__dummy">1</span>'
assert_includes response.body, '<span test-selector="_app_views_some_path__partial_html_erb__dummy" test-value="1">1</span>'
assert_includes response.body, '<span test-selector="_app_views_some_path__partial_html_erb">1</span>'
end
test 'the test_selector method should return the right input in html' do
get '/some/path'
assert_includes response.body, '<span test-selector="_app_views_some_path__partial_html_erb__dummy">2</span>'
assert_includes response.body, '<span test-selector="_app_views_some_path__partial_html_erb">2</span>'
end
end
| 41.772727 | 128 | 0.78346 |
bf349e2b6f6258634fedd508b6ead34ad3051700 | 1,943 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
config.debug_exception_response_format = :api
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp', 'caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 35.327273 | 86 | 0.763767 |
e28d858d71b3fdb12a8888eff0dbc939b8223c7e | 564 | class Flash < Formula
desc "Command-line script to flash SD card images of any kind"
homepage "https://github.com/hypriot/flash"
url "https://github.com/hypriot/flash/releases/download/2.7.1/flash"
sha256 "879057fea97c791a812e5c990d4ea07effd02406d3a267a9b24285c31ea6db3f"
bottle :unneeded
def install
bin.install "flash"
end
test do
system "hdiutil", "create", "-size", "128k", "test.dmg"
output = shell_output("echo foo | #{bin}/flash --device /dev/disk42 test.dmg", 1)
assert_match "Please answer yes or no.", output
end
end
| 29.684211 | 85 | 0.716312 |
7aa4829f58dba2fc3b79bfd9525e02e4c2a9cd00 | 1,033 | module Dice
class RollList
include Dice::HasValues
attr_reader :list
def initialize *args
@list = []
args.each do |v|
if v.is_a?(Array)
@list += v
else
@list << v
end
end
end
def roll!
@list.each(&:roll!)
end
def results
@list.map(&:results).flatten
end
def << roll
@list << roll
end
def to_s
@list.map(&:to_s).join(', ')
end
def self.parse scanner
if scanner.scan(Dice::Parser::Token::LEFT_PAREN)
list = parse(scanner)
raise "Unmatched parenthesis" unless scanner.scan(Dice::Parser::Token::RIGHT_PARAM)
list
elsif roll = Dice::VectorRoll.parse(scanner)
items = [ roll ]
while scanner.scan(Dice::Parser::Token::COMMA)
if roll = Dice::VectorRoll.parse(scanner)
items << roll
else
break
end
end
RollList.new(items)
else
nil
end
end
end
end
| 18.781818 | 91 | 0.522749 |
bb4460784aecf591737a6cfc3f5c8455a3f935a3 | 5,017 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Ensures that a master key has been made available in either ENV["RAILS_MASTER_KEY"]
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress CSS using a preprocessor.
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :digitalocean
# Mount Action Cable outside main process or domain.
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment).
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "lifestyle_production"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Inserts middleware to perform automatic connection switching.
# The `database_selector` hash is used to pass options to the DatabaseSelector
# middleware. The `delay` is used to determine how long to wait after a write
# to send a subsequent read to the primary.
#
# The `database_resolver` class is used by the middleware to determine which
# database is appropriate to use based on the time delay.
#
# The `database_resolver_context` class is used by the middleware to set
# timestamps for the last write to the primary. The resolver uses the context
# class timestamps to determine how long to wait before reading from the
# replica.
#
# By default Rails will store a last write timestamp in the session. The
# DatabaseSelector middleware is designed as such you can define your own
# strategy for connection switching and pass that into the middleware through
# these configuration options.
# config.active_record.database_selector = { delay: 2.seconds }
# config.active_record.database_resolver = ActiveRecord::Middleware::DatabaseSelector::Resolver
# config.active_record.database_resolver_context = ActiveRecord::Middleware::DatabaseSelector::Resolver::Session
end
| 44.39823 | 114 | 0.762607 |
9116fb157f52475d802d93bab886507dab4d44f3 | 495 | class PasswordsController < Devise::PasswordsController
protected
def after_resetting_password_path_for(resource)
root_path
end
##
# Override Devise default behaviour by sending user to the home page
# after the password reset email has been sent
#
# @resource_name [String] The user's email address
# ---------------------------------------------------------------------
def after_sending_reset_password_instructions_path_for(resource_name)
root_path
end
end | 26.052632 | 73 | 0.660606 |
ffb17fd3259d3bbb4f0810d1619db131a3a5dead | 2,613 | # == Schema Information
#
# Table name: services
#
# id :integer not null, primary key
# type :string(255)
# title :string(255)
# project_id :integer
# created_at :datetime
# updated_at :datetime
# active :boolean default(FALSE), not null
# properties :text
# template :boolean default(FALSE)
# push_events :boolean default(TRUE)
# issues_events :boolean default(TRUE)
# merge_requests_events :boolean default(TRUE)
# tag_push_events :boolean default(TRUE)
# note_events :boolean default(TRUE), not null
#
require 'spec_helper'
require 'socket'
require 'json'
describe IrkerService, models: true do
describe 'Associations' do
it { is_expected.to belong_to :project }
it { is_expected.to have_one :service_hook }
end
describe 'Validations' do
context 'when service is active' do
before { subject.active = true }
it { is_expected.to validate_presence_of(:recipients) }
end
context 'when service is inactive' do
before { subject.active = false }
it { is_expected.not_to validate_presence_of(:recipients) }
end
end
describe 'Execute' do
let(:irker) { IrkerService.new }
let(:user) { create(:user) }
let(:project) { create(:project) }
let(:sample_data) do
Gitlab::DataBuilder::Push.build_sample(project, user)
end
let(:recipients) { '#commits irc://test.net/#test ftp://bad' }
let(:colorize_messages) { '1' }
before do
@irker_server = TCPServer.new 'localhost', 0
allow(irker).to receive_messages(
active: true,
project: project,
project_id: project.id,
service_hook: true,
server_host: @irker_server.addr[2],
server_port: @irker_server.addr[1],
default_irc_uri: 'irc://chat.freenode.net/',
recipients: recipients,
colorize_messages: colorize_messages)
irker.valid?
end
after do
@irker_server.close
end
it 'sends valid JSON messages to an Irker listener' do
irker.execute(sample_data)
conn = @irker_server.accept
conn.readlines.each do |line|
msg = JSON.load(line.chomp("\n"))
expect(msg.keys).to match_array(['to', 'privmsg'])
expect(msg['to']).to match_array(["irc://chat.freenode.net/#commits",
"irc://test.net/#test"])
end
conn.close
end
end
end
| 28.714286 | 77 | 0.590126 |
f7cec0a6c6c516a6004420687fbfd152b3410ecc | 142 | class AddShopToShopType < ActiveRecord::Migration
def change
add_reference :shop_types, :shop, index: true, foreign_key: true
end
end
| 23.666667 | 68 | 0.767606 |
e82b722705595908b873a59dd00f16373fe499ba | 3,676 | #! /usr/bin/env ruby
#
# check-mesos-lost-tasks
#
# DESCRIPTION:
# This plugin checks that there are less or same number of lost tasks than provided on a Mesos cluster
#
# OUTPUT:
# plain text
#
# PLATFORMS:
# Linux
#
# DEPENDENCIES:
# gem: sensu-plugin
# gem: rest-client
# gem: json
#
# USAGE:
# #YELLOW
#
# NOTES:
#
# LICENSE:
# Copyright 2016, Oskar Flores ([email protected])
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
#
require 'sensu-plugin/check/cli'
require 'rest-client'
require 'json'
require 'daybreak'
class MesosLostTasksCheck < Sensu::Plugin::Check::CLI
check_name 'CheckMesosLostTasks'
@metrics_name = 'master/tasks_lost'.freeze
class << self
attr_reader :metrics_name
end
option :server,
description: 'Mesos server',
short: '-s SERVER',
long: '--server SERVER',
default: 'localhost'
option :port,
description: 'port (default 5050)',
short: '-p PORT',
long: '--port PORT',
default: 5050,
required: false
option :timeout,
description: 'timeout in seconds',
short: '-t TIMEOUT',
long: '--timeout TIMEOUT',
proc: proc(&:to_i),
default: 5
option :protocol,
description: 'Marathon protocol [http/https]',
short: '-P PROTOCOL',
long: '--protocol PROTOCOL',
required: false,
default: 'http'
option :uri,
description: 'Endpoint URI',
short: '-u URI',
long: '--uri URI',
default: '/metrics/snapshot'
option :value,
description: 'value to check against',
short: '-v VALUE',
long: '--value VALUE',
default: 0,
proc: proc(&:to_i),
required: false
option :delta,
short: '-d',
long: '--delta',
description: 'Use this flag to compare the metric with the previously retreived value',
boolean: true
def run
if config[:value] < 0
unknown 'Number of lost tasks cannot be negative, please set --value to a number greater or equal to 0'
end
server = config[:server]
port = config[:port]
uri = config[:uri]
timeout = config[:timeout]
value = config[:value]
begin
server = get_leader_url server, port
# remove comment for debugging purpose
# puts(server)
r = RestClient::Resource.new("#{server}#{uri}", timeout).get
tasks_lost = check_tasks(r)
if config[:delta]
db = Daybreak::DB.new '/tmp/mesos-metrics.db', default: 0
prev_value = db["task_#{MesosLostTasksCheck.metrics_name}"]
db.lock do
db["task_#{MesosLostTasksCheck.metrics_name}"] = tasks_lost
end
tasks_lost -= prev_value
db.flush
db.compact
db.close
end
if tasks_lost >= value
critical "The number of LOST tasks [#{tasks_lost}] is bigger than provided [#{value}]!"
end
end
ok
end
def get_leader_url(server, port)
RestClient::Resource.new("#{config[:protocol]}://#{server}:#{port}/redirect").get.request.url
end
# Parses JSON data as returned from Mesos's metrics API
# @param data [String] Server response
# @return [Integer] Number of lost tasks in Mesos
def check_tasks(data)
begin
tasks_lost = JSON.parse(data)[MesosLostTasksCheck.metrics_name]
rescue JSON::ParserError
raise "Could not parse JSON response: #{data}"
end
if tasks_lost.nil?
raise "No metrics for [#{MesosLostTasksCheck.metrics_name}] in server response: #{data}"
end
tasks_lost.round.to_i
end
end
| 25.006803 | 109 | 0.61235 |
f7f9cfadbc59639090115c52c211dc4e8fea663d | 1,553 | #
# Cookbook:: dmca
# Recipe:: default
#
# Copyright:: 2018, OpenStreetMap Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe "apache"
include_recipe "php::fpm"
apache_module "proxy"
apache_module "proxy_fcgi"
directory "/srv/dmca.openstreetmap.org" do
owner "root"
group "root"
mode "755"
end
remote_directory "/srv/dmca.openstreetmap.org/html" do
source "html"
owner "root"
group "root"
mode "755"
files_owner "root"
files_group "root"
files_mode "644"
end
ssl_certificate "dmca.openstreetmap.org" do
domains ["dmca.openstreetmap.org", "dmca.osm.org"]
notifies :reload, "service[apache2]"
end
php_fpm "dmca.openstreetmap.org" do
php_admin_values "open_basedir" => "/srv/dmca.openstreetmap.org/html/:/usr/share/php/:/tmp/",
"disable_functions" => "exec,shell_exec,system,passthru,popen,proc_open"
prometheus_port 11201
end
apache_site "dmca.openstreetmap.org" do
template "apache.erb"
directory "/srv/dmca.openstreetmap.org"
variables :aliases => ["dmca.osm.org"]
end
| 26.775862 | 95 | 0.735995 |
e2dfc9cca2a6fed76e0d1e4384c78ac2fe376bc7 | 3,861 | class Link < ApplicationRecord
before_update :set_link_check_results_on_updated_link, if: :url_changed?
before_create :set_link_check_results_on_new_link
belongs_to :local_authority, touch: true
belongs_to :service_interaction, touch: true
has_one :service, through: :service_interaction
has_one :interaction, through: :service_interaction
validates :local_authority, :service_interaction, presence: true
validates :service_interaction_id, uniqueness: { scope: :local_authority_id }
validates :url, non_blank_url: true
scope :for_service,
lambda { |service|
includes(service_interaction: %i[service interaction])
.references(:service_interactions)
.where(service_interactions: { service_id: service })
}
scope :with_url, -> { where.not(url: nil) }
scope :without_url, -> { where(url: nil) }
scope :ok, -> { where(status: "ok") }
scope :broken, -> { where(status: "broken") }
scope :caution, -> { where(status: "caution") }
scope :missing, -> { where(status: "missing") }
scope :pending, -> { where(status: "pending") }
scope :broken_or_missing, -> { broken.or(missing) }
scope :last_checked_before,
lambda { |last_checked|
where("link_last_checked IS NULL OR link_last_checked < ?", last_checked)
}
validates :status, inclusion: { in: %w[ok broken caution missing pending] }, allow_nil: true
def self.enabled_links
joins(:service).where(services: { enabled: true })
end
def self.retrieve_or_build(params)
joins(:local_authority, :service, :interaction).find_by(
local_authorities: { slug: params[:local_authority_slug] },
services: { slug: params[:service_slug] },
interactions: { slug: params[:interaction_slug] },
) || build(params)
end
def self.lookup_by_service_and_interaction(service, interaction)
with_url.joins(:service, :interaction).find_by(
services: { id: service.id },
interactions: { id: interaction.id },
)
end
def self.lookup_by_base_path(base_path)
govuk_slug, local_authority_slug = base_path[1..].split("/")
joins(:local_authority, :service_interaction)
.find_by(local_authorities: { slug: local_authority_slug },
service_interactions: { govuk_slug: govuk_slug })
end
def self.build(params)
Link.new(
local_authority: LocalAuthority.find_by(slug: params[:local_authority_slug]),
service_interaction: ServiceInteraction.find_by(
service: Service.find_by(slug: params[:service_slug]),
interaction: Interaction.find_by(slug: params[:interaction_slug]),
),
)
end
def make_missing
self.url = nil
save!
end
private
def link_with_matching_url
existing_link || existing_homepage
end
def existing_link
@existing_link ||= Link.find_by(url: url)
end
def existing_homepage
@existing_homepage ||= LocalAuthority.find_by(homepage_url: url)
end
def set_link_check_results_on_updated_link
if url.nil?
update_columns(
status: "missing",
link_last_checked: nil,
link_errors: [],
link_warnings: [],
problem_summary: nil,
suggested_fix: nil,
)
elsif link_with_matching_url
set_link_check_results(link_with_matching_url)
else
update_columns(
status: nil,
link_last_checked: nil,
link_errors: [],
link_warnings: [],
problem_summary: nil,
suggested_fix: nil,
)
end
end
def set_link_check_results_on_new_link
set_link_check_results(link_with_matching_url) if link_with_matching_url
end
def set_link_check_results(link)
self.status = link.status
self.link_errors = link.link_errors
self.link_warnings = link.link_warnings
self.link_last_checked = link.link_last_checked
end
end
| 29.7 | 94 | 0.691013 |
610c68f2eee70f425c107781b0657af2acd56ede | 41 | class Activity < ActiveRecord::Base
end
| 10.25 | 35 | 0.780488 |
28e4a923e2cc7c5bb960e0a2afc25e8310a3052e | 126 | class RemoveUserToIdFromInvites < ActiveRecord::Migration[5.1]
def change
remove_column :invites, :user_to_id
end
end
| 21 | 62 | 0.777778 |
f8859a5a645083a7a9ef074c67f3703071ff3878 | 1,011 | cask 'postbox' do
version '5.0.5'
sha256 'b87f775fc40fc626980c468cfea9ef397529792e4b2e4d5eeac1a16aac637791'
# amazonaws.com/download.getpostbox.com was verified as official when first introduced to the cask
url "https://s3.amazonaws.com/download.getpostbox.com/installers/#{version}/1_348aedfb4d5afd22e43adecbddf15a86b9aedad3/postbox-#{version}-mac64.dmg"
name 'Postbox'
homepage 'https://www.postbox-inc.com/'
depends_on macos: '>= :mavericks'
depends_on arch: :x86_64
app 'Postbox.app'
zap delete: [
'~/Library/Application Support/Postbox',
'~/Library/Caches/com.crashlytics.data/com.postbox-inc.postbox',
'~/Library/Caches/com.postbox-inc.postbox',
'~/Library/Caches/Postbox',
'~/Library/PDF Services/Mail PDF with Postbox',
'~/Library/Preferences/com.postbox-inc.postbox.plist',
'~/Library/Saved Application State/com.postbox-inc.postbox.savedState',
]
end
| 40.44 | 150 | 0.673591 |
0100a480be607cd99e1900259aa19048737dc643 | 2,909 | require 'spec_helper'
describe Icalendar2::Parser do
let(:test_file) { File.open(File.join(File.dirname(__FILE__), 'fixtures', 'single_event.ics')) }
let(:test_file_2) { File.open(File.join(File.dirname(__FILE__), 'fixtures', 'multiple_events.ics')) }
describe "initialization" do
it "does not accept invalid input" do
expect { Parser.new(nil) }.to raise_error(ArgumentError)
end
it "accepts a file" do
expect { Parser.new(test_file) }.to_not raise_error(ArgumentError)
end
it "accepts a string" do
expect { Parser.new(test_file.read) }.to_not raise_error(ArgumentError)
end
end
describe "parsing simple calendar" do
before(:all) do
@calendars = Parser.new(test_file).parse
end
it "returns the correct number of calendars" do
@calendars.size.should eq(1)
@calendars.first.class.should eq(Calendar)
end
it "parses the calendar properties" do
@calendars.first.version.should eq("2.0")
@calendars.first.prodid.should eq("bsprodidfortestabc123")
@calendars.first.calscale.should eq("GREGORIAN")
@calendars.first.method_property.should eq("REQUEST")
end
context "event" do
let(:event) { @calendars.first.events.first }
it "parses properties" do
event.uid.value.to_s.should eq("bsuidfortestabc123")
event.organizer.value.to_s.should eq("mailto:[email protected]")
event.attachments.size.should eq(2)
event.attachments.first.value.to_s.should eq("http://example.com/pub/audio-files/ssbanner.aud")
event.attachments.first.parameters.should eq({ "FMTTYPE" => ["audio/basic"] })
event.attachments.last.value.to_s.should eq("http://example.com/public/quarterly-report.doc")
event.klass.value.to_s.should eq("PRIVATE")
event.priority.value.to_s.should eq("2")
event.geo.value.to_s.should eq("37.386013;-122.0829322")
event.dtstart.value.to_s.should eq("20050120T170000")
event.dtstart.parameters.should eq({ "TZID" => ["US-Mountain"] })
event.dtend.value.to_s.should eq("20050120T184500")
event.dtstamp.value.to_s.should eq("20050118T211523Z")
event.categories_properties.size.should eq(1)
categories = event.categories_properties.first
categories.value.map(&:to_s).should eq(["APPLES", "ORANGES", "UNICORNS"])
exdate = event.exdates.first
exdate.value.map(&:to_s).should eq(["19960402T010000Z", "19960403T010000Z", "19960404T010000Z"])
end
end
end
describe "parsing multiple event calendar" do
before(:all) do
@calendars = Parser.new(test_file_2).parse
end
it "returns the correct number of calendars" do
@calendars.size.should eq(1)
@calendars.first.class.should eq(Calendar)
end
it "parses all events" do
@calendars.first.events.size.should eq(3)
end
end
end
| 34.630952 | 104 | 0.680303 |
1d7229cbd1a1f073c86c1b73e373c24faebb541b | 1,262 | # Copyright 2019 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
require 'cloudconfig_test'
class SubscribeFromUnknownHost < CloudConfigTest
def initialize(*args)
super(*args)
@num_hosts = 2
end
def can_share_configservers?(method_name=nil)
false
end
def setup
set_owner("musum")
set_description("Tests that getting config from another host than defined in an application package will get config from default tenant and application")
end
def test_subscribe_from_host_outside_vespa_instance
deploy_app(CloudconfigApp.new)
config_server = vespa.configservers["0"]
if (config_server.hostname == @vespa.nodeproxies.values[0].hostname)
external_node = @vespa.nodeproxies.values[1]
else
external_node = @vespa.nodeproxies.values[0]
end
assert_config(config_server.hostname, 19070, external_node)
end
def assert_config(configserver_hostname, configserver_port, node_proxy)
config = node_proxy.execute("vespa-get-config -n cloud.config.log.logd -i admin -s #{configserver_hostname} -p #{configserver_port} | grep host")
assert_equal("logserver.host \"#{configserver_hostname}\"", config.chomp)
end
def teardown
stop
end
end
| 31.55 | 157 | 0.750396 |
62cec409cc67dea2f01ee433f8ea9a34a21419ce | 2,155 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module Model
module Adapter
# The default adapter for models which haven't one registered
#
module Default
# Module for implementing methods and logic related to fetching records from the database
#
module Records
# Return the collection of records fetched from the database
#
# By default uses `MyModel#find[1, 2, 3]`
#
def records
klass.find(@ids)
end
end
# Module for implementing methods and logic related to hooking into model lifecycle
# (e.g. to perform automatic index updates)
#
# @see http://api.rubyonrails.org/classes/ActiveModel/Callbacks.html
module Callbacks
# noop
end
# Module for efficiently fetching records from the database to import them into the index
#
module Importing
# @abstract Implement this method in your adapter
#
def __find_in_batches(options={}, &block)
raise NotImplemented, "Method not implemented for default adapter"
end
# @abstract Implement this method in your adapter
#
def __transform
raise NotImplemented, "Method not implemented for default adapter"
end
end
end
end
end
end
| 31.691176 | 97 | 0.662181 |
01bdd55dea426e848fc54cfa4f25950756c32d41 | 4,393 | require 'test_helper'
class Edition::ImagesTest < ActiveSupport::TestCase
class EditionWithImages < Edition
include ::Edition::Images
end
include ActionDispatch::TestProcess
test "editions can be created with multiple images" do
edition = EditionWithImages.create!(valid_edition_attributes.merge(
images_attributes: [
{alt_text: "Something about this image",
caption: "Text to be visible along with the image",
image_data_attributes: {file: fixture_file_upload('minister-of-funk.960x640.jpg')}},
{alt_text: "alt-text-2",
caption: "caption-2",
image_data_attributes: {file: fixture_file_upload('minister-of-funk.960x640.jpg')}}
]))
assert_equal 2, edition.images.count
assert_equal "Something about this image", edition.images[0].alt_text
assert_equal "Text to be visible along with the image", edition.images[0].caption
assert_equal "alt-text-2", edition.images[1].alt_text
assert_equal "caption-2", edition.images[1].caption
end
test "#create_draft should include copies of image attributes" do
image = create(:image)
published_edition = EditionWithImages.create!(valid_edition_attributes.merge(
state: 'published',
major_change_published_at: Time.zone.now,
first_published_at: Time.zone.now,
images: [image]
))
VirusScanHelpers.simulate_virus_scan
draft_edition = published_edition.create_draft(build(:user))
draft_edition.change_note = 'change-note'
assert draft_edition.valid?
new_image = draft_edition.images.last
refute_equal image, new_image
assert_equal image.alt_text, new_image.alt_text
assert_equal image.caption, new_image.caption
end
test "#create_draft should not duplicate the actual image data" do
image = create(:image)
published_edition = EditionWithImages.create!(valid_edition_attributes.merge(
state: 'published',
major_change_published_at: Time.zone.now,
first_published_at: Time.zone.now,
images: [image]
))
VirusScanHelpers.simulate_virus_scan
draft_edition = published_edition.create_draft(build(:user))
new_image = draft_edition.images.last
assert_equal image.image_data_id, new_image.image_data_id
end
test "#create_draft should carry-over images even when there are validation errors in image data" do
published_edition = EditionWithImages.new(valid_edition_attributes.merge(
state: 'published',
major_change_published_at: Time.zone.now,
first_published_at: Time.zone.now,
images_attributes: [{
alt_text: "image smaller than 960x640",
caption: "some-caption",
image_data_attributes: {
file: fixture_file_upload('horrible-image.64x96.jpg')
}
}]
))
published_edition.save(validate: false)
VirusScanHelpers.simulate_virus_scan
new_draft = published_edition.create_draft(build(:user))
new_draft.reload
assert_equal 1, new_draft.images.count
assert_equal new_draft.images.first.image_data, published_edition.images.first.image_data
end
test "captions for images can be changed between versions" do
published_edition = EditionWithImages.create!(valid_edition_attributes.merge(
state: 'published',
major_change_published_at: Time.zone.now,
first_published_at: Time.zone.now,
images_attributes: [{
alt_text: "alt-text",
caption: "original-caption",
image_data_attributes: {
file: fixture_file_upload('minister-of-funk.960x640.jpg')
}
}]
))
VirusScanHelpers.simulate_virus_scan
draft_edition = published_edition.create_draft(build(:user))
draft_edition.images.first.update_attributes(caption: "new-caption")
assert_equal "original-caption", published_edition.images.first.caption
end
test "#destroy should also remove the image" do
image = create(:image)
edition = EditionWithImages.create!(valid_edition_attributes.merge(images: [image]))
edition.destroy
refute Image.find_by_id(image.id)
end
test "should indicate that it allows image attachments" do
assert EditionWithImages.new.allows_image_attachments?
end
private
def valid_edition_attributes
{
title: 'edition-title',
body: 'edition-body',
summary: 'edition-summary',
creator: build(:user)
}
end
end
| 33.534351 | 102 | 0.72183 |
2699d4460323f889fc576609b5f694a2ca86c24d | 2,961 | # frozen_string_literal: true
module Baw
module ActiveRecord
# Helper methods for incrementing counters
module Upsert
# We've added custom Arel nodes, extend the ToSql visitor class so that our
# nodes can be translated to SQL.
::Arel::Visitors::ToSql.prepend(Baw::Arel::Visitors::ToSqlExtensions)
# Increments the counters from attributes by inserting the values or
# updating existing values to be the sum of the new and old values.
# Primary keys are detected and omitted on update.
# Does not construct a model, does not return a value.
# @param [Hash] attributes a hash of attributes to upsert
# @return [void]
def upsert_counter(attributes)
update_attrs = attributes.keys.reject { |key| primary_keys.include?(key.to_s) }
upsert_query(
attributes,
returning: nil,
on_conflict: Baw::Arel::Helpers.upsert_on_conflict_sum(arel_table, *update_attrs)
).execute
end
# Creates an upsert query for one row of data.
# Rails has an upsert query, but it does not support custom sql on conflict. It
# won't until Rails 7, and it is not viable to upgrade just ActiveRecord.
# @param [Hash] attributes a hash of attributes to upsert
# @param [Symbol,Array<::Arel::Nodes::node>,::Arel::Nodes::Node] on_conflict:
# What to do when the insert fails.
# - :do_nothing simply cancels the insert.
# - :update generates a list of updates for columns that are not primary keys.
# You can pass an array of ::Arel::Nodes here (or Arel.Sql() literals) to
# do a custom update. Custom updates are useful for combining new and old values
# (e.g. incrementing counters with a SUM) or for inserting computed values.
# @param [Symbol,Array<Symbol>,String] conflict_target: used to constrain
# the conflict resolution to a subset of columns. If omitted defaults to the
# value of `primary_keys`. If a symbol or an array of symbols it assumed these
# are references to a tuple of columns that are unique. If a string, is used
# as the name of the unique constraint to reference for uniqueness.
# @param [Array<::Arel::Nodes::Node>] conflict_where: a predicate used to allow partial
# unique indexes to be resolved.
# @return [Array<Array<Object>,Object>] an array rows each including an array columns,
# if `returning` is not `nil`. If only one column is returned the columns array will be unwrapped.
def upsert_query(attributes, on_conflict:, returning: [], conflict_target: nil, conflict_where: nil)
Baw::Arel::UpsertManager.new(self).tap do |u|
u.insert(attributes)
u.on_conflict(on_conflict, conflict_target, conflict_where)
returning = primary_keys if !returning.nil? && returning.empty?
u.returning(returning)
end
end
end
end
end
| 50.186441 | 106 | 0.678487 |
ac891012c1325af8978b354de6eb51dc3e3808fd | 487 | require 'spec_helper'
describe NewsRelease, type: :model do
it 'returns the formatted date and title as a string' do
news_release = described_class.new(
released_on: '2013-07-31',
title: 'BigCo hires new CEO'
)
expect(news_release.title_with_date).to eq '2013-07-31: BigCo hires new CEO'
end
it { is_expected.to validate_presence_of :released_on }
it { is_expected.to validate_presence_of :title }
it { is_expected.to validate_presence_of :body }
end
| 30.4375 | 80 | 0.726899 |
ff734a50b5b38402b191dc26a5b5c73121e70b9a | 500 | module Spree
class OrderStockLocation < Spree::Base
belongs_to :variant, class_name: "Spree::Variant"
belongs_to :stock_location, class_name: "Spree::StockLocation"
belongs_to :order, class_name: "Spree::Order"
def self.fulfill_for_order_with_stock_location(order, stock_location)
where(order_id: order.id, stock_location_id: stock_location.id).each(&:fulfill_shipment!)
end
def fulfill_shipment!
update_attributes!(shipment_fulfilled: true)
end
end
end
| 31.25 | 95 | 0.75 |
79851f807607b5fcd5d753cec53f876d9f1f56dc | 331 | require 'rubygems'
require 'bundler/setup'
require 'devise_crowd_authenticatable'
::Devise.crowd_config = Proc.new() {{
'url' => 'http://localhost:4567/rest',
'username' => 'foo',
'password' => 'bar'
}}
conn = Devise::Crowd::Connection.new(username: 'user', password: 'xxx')
puts conn.authenticated?
| 20.6875 | 71 | 0.640483 |
7a2b63cf6b0c45bf257df1ed257aafd85f3a4a19 | 13,135 | require 'rails_helper'
describe UserGroupsController do
fixtures :all
def valid_attributes
FactoryBot.attributes_for(:user_group)
end
describe 'GET index' do
before(:each) do
FactoryBot.create(:user_group)
end
describe 'When logged in as Administrator' do
login_fixture_admin
it 'assigns all user_groups as @user_groups' do
get :index
assigns(:user_groups).should eq(UserGroup.order(:position))
end
end
describe 'When logged in as Librarian' do
login_fixture_librarian
it 'assigns all user_groups as @user_groups' do
get :index
assigns(:user_groups).should eq(UserGroup.order(:position))
end
end
describe 'When logged in as User' do
login_fixture_user
it 'assigns all user_groups as @user_groups' do
get :index
assigns(:user_groups).should eq(UserGroup.order(:position))
end
end
describe 'When not logged in' do
it 'assigns all user_groups as @user_groups' do
get :index
assigns(:user_groups).should eq(UserGroup.order(:position))
end
end
end
describe 'GET show' do
describe 'When logged in as Administrator' do
login_fixture_admin
it 'assigns the requested user_group as @user_group' do
user_group = FactoryBot.create(:user_group)
get :show, params: { id: user_group.id }
assigns(:user_group).should eq(user_group)
end
end
describe 'When logged in as Librarian' do
login_fixture_librarian
it 'assigns the requested user_group as @user_group' do
user_group = FactoryBot.create(:user_group)
get :show, params: { id: user_group.id }
assigns(:user_group).should eq(user_group)
end
end
describe 'When logged in as User' do
login_fixture_user
it 'assigns the requested user_group as @user_group' do
user_group = FactoryBot.create(:user_group)
get :show, params: { id: user_group.id }
assigns(:user_group).should eq(user_group)
end
end
describe 'When not logged in' do
it 'assigns the requested user_group as @user_group' do
user_group = FactoryBot.create(:user_group)
get :show, params: { id: user_group.id }
assigns(:user_group).should eq(user_group)
end
end
end
describe 'GET new' do
describe 'When logged in as Administrator' do
login_fixture_admin
it 'assigns the requested user_group as @user_group' do
get :new
assigns(:user_group).should_not be_valid
expect(response).to be_successful
end
end
describe 'When logged in as Librarian' do
login_fixture_librarian
it 'should not assign the requested user_group as @user_group' do
get :new
assigns(:user_group).should be_nil
expect(response).to be_forbidden
end
end
describe 'When logged in as User' do
login_fixture_user
it 'should not assign the requested user_group as @user_group' do
get :new
assigns(:user_group).should be_nil
expect(response).to be_forbidden
end
end
describe 'When not logged in' do
it 'should not assign the requested user_group as @user_group' do
get :new
assigns(:user_group).should be_nil
expect(response).to redirect_to(new_user_session_url)
end
end
end
describe 'GET edit' do
describe 'When logged in as Administrator' do
login_fixture_admin
it 'assigns the requested user_group as @user_group' do
user_group = FactoryBot.create(:user_group)
get :edit, params: { id: user_group.id }
assigns(:user_group).should eq(user_group)
end
end
describe 'When logged in as Librarian' do
login_fixture_librarian
it 'assigns the requested user_group as @user_group' do
user_group = FactoryBot.create(:user_group)
get :edit, params: { id: user_group.id }
expect(response).to be_forbidden
end
end
describe 'When logged in as User' do
login_fixture_user
it 'assigns the requested user_group as @user_group' do
user_group = FactoryBot.create(:user_group)
get :edit, params: { id: user_group.id }
expect(response).to be_forbidden
end
end
describe 'When not logged in' do
it 'should not assign the requested user_group as @user_group' do
user_group = FactoryBot.create(:user_group)
get :edit, params: { id: user_group.id }
expect(response).to redirect_to(new_user_session_url)
end
end
end
describe 'POST create' do
before(:each) do
@attrs = valid_attributes
@invalid_attrs = { name: '' }
end
describe 'When logged in as Administrator' do
login_fixture_admin
describe 'with valid params' do
it 'assigns a newly created user_group as @user_group' do
post :create, params: { user_group: @attrs }
assigns(:user_group).should be_valid
end
it 'redirects to the created patron' do
post :create, params: { user_group: @attrs }
expect(response).to redirect_to(assigns(:user_group))
end
end
describe 'with invalid params' do
it 'assigns a newly created but unsaved user_group as @user_group' do
post :create, params: { user_group: @invalid_attrs }
assigns(:user_group).should_not be_valid
end
it "re-renders the 'new' template" do
post :create, params: { user_group: @invalid_attrs }
expect(response).to render_template('new')
end
end
end
describe 'When logged in as Librarian' do
login_fixture_librarian
describe 'with valid params' do
it 'assigns a newly created user_group as @user_group' do
post :create, params: { user_group: @attrs }
assigns(:user_group).should be_nil
end
it 'should be forbidden' do
post :create, params: { user_group: @attrs }
expect(response).to be_forbidden
end
end
describe 'with invalid params' do
it 'assigns a newly created but unsaved user_group as @user_group' do
post :create, params: { user_group: @invalid_attrs }
assigns(:user_group).should be_nil
end
it 'should be forbidden' do
post :create, params: { user_group: @invalid_attrs }
expect(response).to be_forbidden
end
end
end
describe 'When logged in as User' do
login_fixture_user
describe 'with valid params' do
it 'assigns a newly created user_group as @user_group' do
post :create, params: { user_group: @attrs }
assigns(:user_group).should be_nil
end
it 'should be forbidden' do
post :create, params: { user_group: @attrs }
expect(response).to be_forbidden
end
end
describe 'with invalid params' do
it 'assigns a newly created but unsaved user_group as @user_group' do
post :create, params: { user_group: @invalid_attrs }
assigns(:user_group).should be_nil
end
it 'should be forbidden' do
post :create, params: { user_group: @invalid_attrs }
expect(response).to be_forbidden
end
end
end
describe 'When not logged in' do
describe 'with valid params' do
it 'assigns a newly created user_group as @user_group' do
post :create, params: { user_group: @attrs }
assigns(:user_group).should be_nil
end
it 'should be forbidden' do
post :create, params: { user_group: @attrs }
expect(response).to redirect_to(new_user_session_url)
end
end
describe 'with invalid params' do
it 'assigns a newly created but unsaved user_group as @user_group' do
post :create, params: { user_group: @invalid_attrs }
assigns(:user_group).should be_nil
end
it 'should be forbidden' do
post :create, params: { user_group: @invalid_attrs }
expect(response).to redirect_to(new_user_session_url)
end
end
end
end
describe 'PUT update' do
before(:each) do
@user_group = FactoryBot.create(:user_group)
@attrs = valid_attributes
@invalid_attrs = { name: '' }
end
describe 'When logged in as Administrator' do
login_fixture_admin
describe 'with valid params' do
it 'updates the requested user_group' do
put :update, params: { id: @user_group.id, user_group: @attrs }
end
it 'assigns the requested user_group as @user_group' do
put :update, params: { id: @user_group.id, user_group: @attrs }
assigns(:user_group).should eq(@user_group)
end
it 'moves its position when specified' do
put :update, params: { id: @user_group.id, user_group: @attrs, move: 'lower' }
expect(response).to redirect_to(user_groups_url)
end
end
describe 'with invalid params' do
it 'assigns the requested user_group as @user_group' do
put :update, params: { id: @user_group.id, user_group: @invalid_attrs }
expect(response).to render_template('edit')
end
end
end
describe 'When logged in as Librarian' do
login_fixture_librarian
describe 'with valid params' do
it 'updates the requested user_group' do
put :update, params: { id: @user_group.id, user_group: @attrs }
end
it 'assigns the requested user_group as @user_group' do
put :update, params: { id: @user_group.id, user_group: @attrs }
assigns(:user_group).should eq(@user_group)
expect(response).to be_forbidden
end
end
describe 'with invalid params' do
it 'assigns the requested user_group as @user_group' do
put :update, params: { id: @user_group.id, user_group: @invalid_attrs }
expect(response).to be_forbidden
end
end
end
describe 'When logged in as User' do
login_fixture_user
describe 'with valid params' do
it 'updates the requested user_group' do
put :update, params: { id: @user_group.id, user_group: @attrs }
end
it 'assigns the requested user_group as @user_group' do
put :update, params: { id: @user_group.id, user_group: @attrs }
assigns(:user_group).should eq(@user_group)
expect(response).to be_forbidden
end
end
describe 'with invalid params' do
it 'assigns the requested user_group as @user_group' do
put :update, params: { id: @user_group.id, user_group: @invalid_attrs }
expect(response).to be_forbidden
end
end
end
describe 'When not logged in' do
describe 'with valid params' do
it 'updates the requested user_group' do
put :update, params: { id: @user_group.id, user_group: @attrs }
end
it 'should be forbidden' do
put :update, params: { id: @user_group.id, user_group: @attrs }
expect(response).to redirect_to(new_user_session_url)
end
end
describe 'with invalid params' do
it 'assigns the requested user_group as @user_group' do
put :update, params: { id: @user_group.id, user_group: @invalid_attrs }
expect(response).to redirect_to(new_user_session_url)
end
end
end
end
describe 'DELETE destroy' do
before(:each) do
@user_group = FactoryBot.create(:user_group)
end
describe 'When logged in as Administrator' do
login_fixture_admin
it 'destroys the requested user_group' do
delete :destroy, params: { id: @user_group.id }
end
it 'redirects to the user_groups list' do
delete :destroy, params: { id: @user_group.id }
expect(response).to redirect_to(user_groups_url)
end
end
describe 'When logged in as Librarian' do
login_fixture_librarian
it 'destroys the requested user_group' do
delete :destroy, params: { id: @user_group.id }
end
it 'should be forbidden' do
delete :destroy, params: { id: @user_group.id }
expect(response).to be_forbidden
end
end
describe 'When logged in as User' do
login_fixture_user
it 'destroys the requested user_group' do
delete :destroy, params: { id: @user_group.id }
end
it 'should be forbidden' do
delete :destroy, params: { id: @user_group.id }
expect(response).to be_forbidden
end
end
describe 'When not logged in' do
it 'destroys the requested user_group' do
delete :destroy, params: { id: @user_group.id }
end
it 'should be forbidden' do
delete :destroy, params: { id: @user_group.id }
expect(response).to redirect_to(new_user_session_url)
end
end
end
end
| 29.583333 | 88 | 0.633346 |
b96d4d8dc85375542a1d442ca46edec0a1e776ff | 440 | $LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "bundler/setup"
require "numbers_to_kurdish_words"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 25.882353 | 66 | 0.754545 |
b9903e37f40640d9a1110e41a01818bc93b7b937 | 3,775 | # frozen_string_literal: true
module Gitlab
module Kubernetes
module Helm
class Api
def initialize(kubeclient)
@kubeclient = kubeclient
@namespace = Gitlab::Kubernetes::Namespace.new(Gitlab::Kubernetes::Helm::NAMESPACE, kubeclient)
end
def install(command)
namespace.ensure_exists!
create_service_account(command)
create_cluster_role_binding(command)
create_config_map(command)
delete_pod!(command.pod_name)
kubeclient.create_pod(command.pod_resource)
end
def update(command)
namespace.ensure_exists!
update_config_map(command)
delete_pod!(command.pod_name)
kubeclient.create_pod(command.pod_resource)
end
##
# Returns Pod phase
#
# https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#pod-phase
#
# values: "Pending", "Running", "Succeeded", "Failed", "Unknown"
#
def status(pod_name)
kubeclient.get_pod(pod_name, namespace.name).status.phase
end
def log(pod_name)
kubeclient.get_pod_log(pod_name, namespace.name).body
end
def delete_pod!(pod_name)
kubeclient.delete_pod(pod_name, namespace.name)
rescue ::Kubeclient::ResourceNotFoundError
# no-op
end
def get_config_map(config_map_name)
namespace.ensure_exists!
kubeclient.get_config_map(config_map_name, namespace.name)
end
private
attr_reader :kubeclient, :namespace
def create_config_map(command)
command.config_map_resource.tap do |config_map_resource|
if config_map_exists?(config_map_resource)
kubeclient.update_config_map(config_map_resource)
else
kubeclient.create_config_map(config_map_resource)
end
end
end
def update_config_map(command)
command.config_map_resource.tap do |config_map_resource|
kubeclient.update_config_map(config_map_resource)
end
end
def create_service_account(command)
command.service_account_resource.tap do |service_account_resource|
break unless service_account_resource
if service_account_exists?(service_account_resource)
kubeclient.update_service_account(service_account_resource)
else
kubeclient.create_service_account(service_account_resource)
end
end
end
def create_cluster_role_binding(command)
command.cluster_role_binding_resource.tap do |cluster_role_binding_resource|
break unless cluster_role_binding_resource
if cluster_role_binding_exists?(cluster_role_binding_resource)
kubeclient.update_cluster_role_binding(cluster_role_binding_resource)
else
kubeclient.create_cluster_role_binding(cluster_role_binding_resource)
end
end
end
def config_map_exists?(resource)
kubeclient.get_config_map(resource.metadata.name, resource.metadata.namespace)
rescue ::Kubeclient::ResourceNotFoundError
false
end
def service_account_exists?(resource)
kubeclient.get_service_account(resource.metadata.name, resource.metadata.namespace)
rescue ::Kubeclient::ResourceNotFoundError
false
end
def cluster_role_binding_exists?(resource)
kubeclient.get_cluster_role_binding(resource.metadata.name)
rescue ::Kubeclient::ResourceNotFoundError
false
end
end
end
end
end
| 30.443548 | 105 | 0.651656 |
1819f969bb5d55683b5d63f92a0262d961ef4bf3 | 1,013 | require 'happyco/inspect/inspection/v1/inspection_pb'
require 'happyco/inspect/inspection/v1/inspection_services_pb'
require 'happyco/inspect/report/v1/report_pb'
require 'happyco/inspect/report/v1/report_services_pb'
require 'happyco/inspect/template/v1/template_pb'
require 'happyco/inspect/template/v1/template_services_pb'
require 'happyco/inspect/type/v1/inspect_type_pb'
require 'happyco/manage/account/v1/account_pb'
require 'happyco/manage/account/v1/account_services_pb'
require 'happyco/manage/account_provisioning/v1/account_provisioning_pb'
require 'happyco/manage/account_provisioning/v1/account_provisioning_services_pb'
require 'happyco/manage/asset/v1/asset_pb'
require 'happyco/manage/asset/v1/asset_services_pb'
require 'happyco/manage/folder/v1/folder_pb'
require 'happyco/manage/folder/v1/folder_services_pb'
require 'happyco/type/v1/date_pb'
require 'happyco/type/v1/event_pb'
require 'happyco/type/v1/integration_id_pb'
require 'happyco/type/v1/paging_pb'
require 'happyco/type/v1/varia_pb'
| 48.238095 | 81 | 0.846989 |
39995a6050d231487211a0ad9ba9e57b8f02b969 | 325 | cask :v1 => 'today-scripts' do
version '1.1'
sha256 'e9e1136e787347f257e9554efdd546c4fd92a6b2a6ae28d23f0065b3d7b96bc4'
url 'https://github.com/SamRothCA/Today-Scripts/releases/download/v1.1/Today-Scripts.tar.gz'
homepage 'https://github.com/SamRothCA/Today-Scripts'
license :unknown
app 'Today Scripts.app'
end
| 29.545455 | 94 | 0.772308 |
d52e31a96d00e7511ded63df2265620d47472dee | 229 | include SendGrid
class ErrorNotifier < ApplicationMailer
def send_error_email
puts "send_error_email"
mail( :to => '[email protected]',
:subject => "You got an error!"
)
end
end
| 22.9 | 58 | 0.681223 |
e92d02f6b9a9a603d81dd370be3ebceecfb190a0 | 509 | # frozen_string_literal: true
module Sidekiq
module Statistic
class Middleware
def call(worker, message, queue)
class_name = message['wrapped'] || worker.class.to_s
metric = Metric.for(class_name: class_name, arguments: message['args'])
metric.queue = message['queue'] || queue
metric.start
yield
rescue => e
metric.fails!
raise e
ensure
metric.finish
Metrics::Store.call(metric)
end
end
end
end
| 19.576923 | 79 | 0.603143 |
ac6c645f81b5440c2c3dd5ede36edb311251b6b1 | 272 | module SpreeEcs
class Base
class << self
def cache(key, options = { :expires_in => 1.day })
Rails.cache.fetch("#{Digest::SHA1.hexdigest(key)}",options){ yield }
end
def log(msg)
Rails.logger.debug(msg)
end
end
end
end
| 18.133333 | 76 | 0.573529 |
f8a54946077c2b010413d665b55b0546f0a2770c | 232 | class CreateNotes < ActiveRecord::Migration
def change
create_table :notes do |t|
t.timestamps null: false
t.text :note, null: false
t.references :noteable, index: true, polymorphic: true
end
end
end
| 19.333333 | 60 | 0.672414 |
bf9d6d22ce5649387b05969606ab3a56e4bd5c77 | 12,543 | require 'fileutils'
require "cgi"
module Prometheus
module Client
module DataStores
# Stores data in binary files, one file per process and per metric.
# This is generally the recommended store to use to deal with pre-fork servers and
# other "multi-process" scenarios.
#
# Each process will get a file for a metric, and it will manage its contents by
# storing keys next to binary-encoded Floats, and keeping track of the offsets of
# those Floats, to be able to update them directly as they increase.
#
# When exporting metrics, the process that gets scraped by Prometheus will find
# all the files that apply to a metric, read their contents, and aggregate them
# (generally that means SUMming the values for each labelset).
#
# In order to do this, each Metric needs an `:aggregation` setting, specifying how
# to aggregate the multiple possible values we can get for each labelset. By default,
# Counters, Histograms and Summaries get `SUM`med, and Gauges will report `ALL`
# values, tagging each one with a `pid` label.
# For Gauges, it's also possible to set `SUM`, MAX` or `MIN` as aggregation, to get
# the highest / lowest value / or the sum of all the processes / threads.
#
# Before using this Store, please read the "`DirectFileStore` caveats and things to
# keep in mind" section of the main README in this repository. It includes a number
# of important things to keep in mind.
class DirectFileStore
class InvalidStoreSettingsError < StandardError; end
AGGREGATION_MODES = [MAX = :max, MIN = :min, SUM = :sum, ALL = :all, MOST_RECENT = :most_recent]
DEFAULT_METRIC_SETTINGS = { aggregation: SUM }
DEFAULT_GAUGE_SETTINGS = { aggregation: ALL }
def initialize(dir:)
@store_settings = { dir: dir }
FileUtils.mkdir_p(dir)
end
def for_metric(metric_name, metric_type:, metric_settings: {})
default_settings = DEFAULT_METRIC_SETTINGS
if metric_type == :gauge
default_settings = DEFAULT_GAUGE_SETTINGS
end
settings = default_settings.merge(metric_settings)
validate_metric_settings(metric_type, settings)
MetricStore.new(metric_name: metric_name,
store_settings: @store_settings,
metric_settings: settings)
end
private
def validate_metric_settings(metric_type, metric_settings)
unless metric_settings.has_key?(:aggregation) &&
AGGREGATION_MODES.include?(metric_settings[:aggregation])
raise InvalidStoreSettingsError,
"Metrics need a valid :aggregation key"
end
unless (metric_settings.keys - [:aggregation]).empty?
raise InvalidStoreSettingsError,
"Only :aggregation setting can be specified"
end
if metric_settings[:aggregation] == MOST_RECENT && metric_type != :gauge
raise InvalidStoreSettingsError,
"Only :gauge metrics support :most_recent aggregation"
end
end
class MetricStore
attr_reader :metric_name, :store_settings
def initialize(metric_name:, store_settings:, metric_settings:)
@metric_name = metric_name
@store_settings = store_settings
@values_aggregation_mode = metric_settings[:aggregation]
@store_opened_by_pid = nil
@lock = Monitor.new
end
# Synchronize is used to do a multi-process Mutex, when incrementing multiple
# values at once, so that the other process, reading the file for export, doesn't
# get incomplete increments.
#
# `in_process_sync`, instead, is just used so that two threads don't increment
# the same value and get a context switch between read and write leading to an
# inconsistency
def synchronize
in_process_sync do
internal_store.with_file_lock do
yield
end
end
end
def set(labels:, val:)
in_process_sync do
internal_store.write_value(store_key(labels), val.to_f)
end
end
def increment(labels:, by: 1)
if @values_aggregation_mode == DirectFileStore::MOST_RECENT
raise InvalidStoreSettingsError,
"The :most_recent aggregation does not support the use of increment"\
"/decrement"
end
key = store_key(labels)
in_process_sync do
value = internal_store.read_value(key)
internal_store.write_value(key, value + by.to_f)
end
end
def get(labels:)
in_process_sync do
internal_store.read_value(store_key(labels))
end
end
def all_values
stores_data = Hash.new{ |hash, key| hash[key] = [] }
# There's no need to call `synchronize` here. We're opening a second handle to
# the file, and `flock`ing it, which prevents inconsistent reads
stores_for_metric.each do |file_path|
begin
store = FileMappedDict.new(file_path, true)
store.all_values.each do |(labelset_qs, v, ts)|
# Labels come as a query string, and CGI::parse returns arrays for each key
# "foo=bar&x=y" => { "foo" => ["bar"], "x" => ["y"] }
# Turn the keys back into symbols, and remove the arrays
label_set = CGI::parse(labelset_qs).map do |k, vs|
[k.to_sym, vs.first]
end.to_h
stores_data[label_set] << [v, ts]
end
ensure
store.close if store
end
end
# Aggregate all the different values for each label_set
aggregate_hash = Hash.new { |hash, key| hash[key] = 0.0 }
stores_data.each_with_object(aggregate_hash) do |(label_set, values), acc|
acc[label_set] = aggregate_values(values)
end
end
private
def in_process_sync
@lock.synchronize { yield }
end
def store_key(labels)
if @values_aggregation_mode == ALL
labels[:pid] = process_id
end
labels.to_a.sort.map{|k,v| "#{CGI::escape(k.to_s)}=#{CGI::escape(v.to_s)}"}.join('&')
end
def internal_store
if @store_opened_by_pid != process_id
@store_opened_by_pid = process_id
@internal_store = FileMappedDict.new(filemap_filename)
else
@internal_store
end
end
# Filename for this metric's PStore (one per process)
def filemap_filename
filename = "metric_#{ metric_name }___#{ process_id }.bin"
File.join(@store_settings[:dir], filename)
end
def stores_for_metric
Dir.glob(File.join(@store_settings[:dir], "metric_#{ metric_name }___*"))
end
def process_id
Process.pid
end
def aggregate_values(values)
# Each entry in the `values` array is a tuple of `value` and `timestamp`,
# so for all aggregations except `MOST_RECENT`, we need to only take the
# first value in each entry and ignore the second.
if @values_aggregation_mode == MOST_RECENT
latest_tuple = values.max { |a,b| a[1] <=> b[1] }
latest_tuple.first # return the value without the timestamp
else
values = values.map(&:first) # Discard timestamps
if @values_aggregation_mode == SUM
values.inject { |sum, element| sum + element }
elsif @values_aggregation_mode == MAX
values.max
elsif @values_aggregation_mode == MIN
values.min
elsif @values_aggregation_mode == ALL
values.first
else
raise InvalidStoreSettingsError,
"Invalid Aggregation Mode: #{ @values_aggregation_mode }"
end
end
end
end
private_constant :MetricStore
# A dict of doubles, backed by an file we access directly as a byte array.
#
# The file starts with a 4 byte int, indicating how much of it is used.
# Then 4 bytes of padding.
# There's then a number of entries, consisting of a 4 byte int which is the
# size of the next field, a utf-8 encoded string key, padding to an 8 byte
# alignment, and then a 8 byte float which is the value, and then a 8 byte
# float which is the unix timestamp when the value was set.
class FileMappedDict
INITIAL_FILE_SIZE = 1024*1024
attr_reader :capacity, :used, :positions
def initialize(filename, readonly = false)
@positions = {}
@used = 0
open_file(filename, readonly)
@used = @f.read(4).unpack('l')[0] if @capacity > 0
if @used > 0
# File already has data. Read the existing values
with_file_lock { populate_positions }
else
# File is empty. Init the `used` counter, if we're in write mode
if !readonly
@used = 8
@f.seek(0)
@f.write([@used].pack('l'))
end
end
end
# Return a list of key-value pairs
def all_values
with_file_lock do
@positions.map do |key, pos|
@f.seek(pos)
value, timestamp = @f.read(16).unpack('dd')
[key, value, timestamp]
end
end
end
def read_value(key)
if [email protected]_key?(key)
init_value(key)
end
pos = @positions[key]
@f.seek(pos)
@f.read(8).unpack('d')[0]
end
def write_value(key, value)
if [email protected]_key?(key)
init_value(key)
end
now = Process.clock_gettime(Process::CLOCK_MONOTONIC)
pos = @positions[key]
@f.seek(pos)
@f.write([value, now].pack('dd'))
@f.flush
end
def close
@f.close
end
def with_file_lock
@f.flock(File::LOCK_EX)
yield
ensure
@f.flock(File::LOCK_UN)
end
private
def open_file(filename, readonly)
mode = if readonly
"r"
elsif File.exist?(filename)
"r+b"
else
"w+b"
end
@f = File.open(filename, mode)
if @f.size == 0 && !readonly
resize_file(INITIAL_FILE_SIZE)
end
@capacity = @f.size
end
def resize_file(new_capacity)
@f.truncate(new_capacity)
end
# Initialize a value. Lock must be held by caller.
def init_value(key)
# Pad to be 8-byte aligned.
padded = key + (' ' * (8 - (key.length + 4) % 8))
value = [padded.length, padded, 0.0, 0.0].pack("lA#{padded.length}dd")
while @used + value.length > @capacity
@capacity *= 2
resize_file(@capacity)
end
@f.seek(@used)
@f.write(value)
@used += value.length
@f.seek(0)
@f.write([@used].pack('l'))
@f.flush
@positions[key] = @used - 16
end
# Read position of all keys. No locking is performed.
def populate_positions
@f.seek(8)
while @f.pos < @used
padded_len = @f.read(4).unpack('l')[0]
key = @f.read(padded_len).unpack("A#{padded_len}")[0].strip
@positions[key] = @f.pos
@f.seek(16, :CUR)
end
end
end
end
end
end
end
| 35.332394 | 104 | 0.547317 |
d5f2d4dcb0c92d55245562dca4e9b852db5a6543 | 517 | # == Schema Information
#
# Table name: events
#
# id :integer not null, primary key
# type :string(255)
# org_handle :string(255)
# dxuser :string(255)
# param1 :string(255)
# param2 :string(255)
# param3 :string(255)
# created_at :datetime not null
# param4 :string(255)
#
class Event::UserAccessRequested < Event
alias_attribute :invitation_id, :param1
def self.create_for(invitation)
create(
invitation_id: invitation.id
)
end
end
| 20.68 | 53 | 0.622824 |
fffd9300c8c17747d7e8f6a77128c13fd4edeb8d | 11,136 | require_relative "chunk"
require_relative "scanner"
require_relative "opcode"
require_relative "token"
require_relative "object"
require_relative "local"
module Lr
class Compiler
# Precedence
PREC_NONE = 0
PREC_ASSIGNMENT = 1 # =
PREC_OR = 2 # or
PREC_AND = 3 # and
PREC_EQUALITY = 4 # == !=
PREC_COMPARISON = 5 # < > <= >=
PREC_TERM = 6 # + -
PREC_FACTOR = 7 # * /
PREC_UNARY = 8 # ! -
PREC_CALL = 9 # . ()
PREC_PRIMARY = 10
def initialize
@current = nil
@previous = nil
@rules = define_rules
@locals = []
@local_count = 0
@scope_depth = 0
end
def compile(source, debug = false)
@chunk = Chunk.new
@scanner = Scanner.new(source)
advance
until match(Token::EOF)
declaration
end
emit_return
@chunk.disassemble("code") if debug
@chunk
end
private
def begin_scope
@scope_depth += 1
end
def end_scope
@scope_depth -= 1
while @local_count > 0 && @locals[@local_count - 1].depth > @scope_depth
emit_byte(Opcode::OP_POP)
@locals.pop
@local_count -= 1
end
end
def expression
parse_precedence(PREC_ASSIGNMENT)
end
def block
loop do
break if check(Token::RIGHT_BRACE) || check(Token::EOF)
declaration
end
consume(Token::RIGHT_BRACE, "Expect '}' after block.")
end
def var_declaration
global = parse_variable("Expect variable name.")
if match(Token::EQUAL)
expression
else
emit_byte(Token::NIL)
end
consume(Token::SEMICOLON, "Expect ';' after variable declaration.")
define_variable(global)
end
def print_statement
expression
consume(Token::SEMICOLON, "Expect ';' after value.")
emit_byte(Opcode::OP_PRINT)
end
def expression_statement
expression
consume(Token::SEMICOLON, "Expect ';' after expression.")
emit_byte(Opcode::OP_POP)
end
def if_statement
consume(Token::LEFT_PAREN, "Expect '(' after 'if'.")
expression
consume(Token::RIGHT_PAREN, "Expect ')' after condition.")
then_jump = emit_jump(Opcode::OP_JUMP_IF_FALSE)
emit_byte(Opcode::OP_POP)
statement
else_jump = emit_jump(Opcode::OP_JUMP)
patch_jump(then_jump)
emit_byte(Opcode::OP_POP)
statement if match(Token::ELSE)
patch_jump(else_jump)
end
def declaration
if match(Token::VAR)
var_declaration
else
statement
end
# TODO: synchronize
end
def statement
if match(Token::PRINT)
print_statement
elsif match(Token::IF)
if_statement
elsif match(Token::LEFT_BRACE)
begin_scope
block
end_scope
else
expression_statement
end
end
def grouping(assign)
expression
consume(Token::RIGHT_PAREN, "Expect ')' after expression.")
end
def unary(assign)
type = @previous.type
# Compile the operand.
parse_precedence(PREC_UNARY)
# Emit the operator instruction.
case type
when Token::BANG
emit_byte(Opcode::OP_NOT)
when Token::MINUS
emit_byte(Opcode::OP_NEGATE)
else
return
end
end
def binary(assign)
type = @previous.type
rule = @rules[type]
parse_precedence(rule.precedence + 1)
case type
when Token::BANG_EQUAL
emit_bytes(Opcode::OP_EQUAL, Opcode::OP_NOT)
when Token::EQUAL_EQUAL
emit_byte(Opcode::OP_EQUAL)
when Token::GREATER
emit_byte(Opcode::OP_GREATER)
when Token::GREATER_EQUAL
emit_bytes(Opcode::OP_LESS, Opcode::OP_NOT)
when Token::LESS
emit_byte(Opcode::OP_LESS)
when Token::LESS_EQUAL
emit_bytes(Opcode::OP_GREATER, Opcode::OP_NOT)
when Token::PLUS
emit_byte(Opcode::OP_ADD)
when Token::MINUS
emit_byte(Opcode::OP_SUBTRACT)
when Token::STAR
emit_byte(Opcode::OP_MULTIPLY)
when Token::SLASH
emit_byte(Opcode::OP_DIVIDE)
end
end
def literal(assign)
case @previous.type
when Token::FALSE
emit_byte(Opcode::OP_FALSE)
when Token::NIL
emit_byte(Opcode::OP_NIL)
when Token::TRUE
emit_byte(Opcode::OP_TRUE)
end
end
def number(assign)
value = @previous.lexeme.to_f
emit_constant(Value.number_val(value))
end
def string(assign)
value = @previous.lexeme[1..-2] # trim quotation marks
emit_constant(Value.obj_val(Lr::Object.string_obj(value)))
end
def variable(assign)
named_variable(@previous, assign)
end
def named_variable(name, assign)
get_op = Opcode::OP_GET_LOCAL
set_op = Opcode::OP_SET_LOCAL
arg = resolve_local(name)
if arg == -1
arg = identifier_constant(name)
get_op = Opcode::OP_GET_GLOBAL
set_op = Opcode::OP_SET_GLOBAL
end
if assign && match(Token::EQUAL)
expression
emit_bytes(set_op, arg)
else
emit_bytes(get_op, arg)
end
end
def advance
@previous = @current
loop do
@current = @scanner.scan_token()
break if @current.type != Token::ERROR
error_at_current(@current.lexeme)
end
end
def consume(type, message)
@current.type == type ? advance : error_at_current(message)
end
def match(type)
return false unless check(type)
advance
true
end
def check(type)
@current.type == type
end
def error_at_current(message)
# TODO: write this.
end
def error(message)
# TODO: write this.
end
def error_at(token, message)
$stderr.print "[line #{token.line}] Error"
if token.type == Token::EOF
$stderr.print " at end"
else
$stderr.print " at '#{token.lexeme}'"
end
$stderr.puts ": #{message}"
end
def emit_byte(byte)
@chunk.write(byte, @previous.line)
end
def emit_bytes(byte1, byte2)
emit_byte(byte1)
emit_byte(byte2)
end
def emit_jump(instruction)
emit_byte(instruction)
emit_byte(0xff)
@chunk.count - 1
end
def emit_return
emit_byte(Opcode::OP_RETURN)
end
def emit_constant(value)
emit_bytes(Opcode::OP_CONSTANT, make_constant(value))
end
def patch_jump(offset)
jump = @chunk.count - offset - 1
@chunk.code[offset] = jump
end
def make_constant(value)
@chunk.add_constant(value)
end
def parse_precedence(precedence)
advance
prefix = @rules[@previous.type].prefix
unless prefix
# TODO
# error("Expect expression.")
return
end
assign = precedence <= PREC_ASSIGNMENT
self.send(prefix, assign)
while precedence <= @rules[@current.type].precedence
advance
infix = @rules[@previous.type].infix
self.send(infix, assign)
end
if assign && match(Token::EQUAL)
# TODO: error("Invalid assignment target.")
end
end
def parse_variable(error_message)
consume(Token::IDENTIFIER, error_message)
declare_variable
return 0 if @scope_depth > 0
identifier_constant(@previous)
end
def mark_initialized
@locals[@local_count - 1].depth = @scope_depth
end
def define_variable(global)
if @scope_depth > 0
mark_initialized
return
end
emit_bytes(Opcode::OP_DEFINE_GLOBAL, global)
end
def identifier_constant(name)
make_constant(Lr::Value.obj_val(name.lexeme))
end
def identifier_equal(a, b)
a.lexeme == b.lexeme
end
def resolve_local(name)
index = @locals.rindex { |local| identifier_equal(local.name, name) }
if index && @locals[index].depth == -1
error("Can't read local variable in its own initializer.")
end
index ? index : -1
end
def add_local(name)
@locals << Local.new(name, -1)
@local_count += 1
end
def declare_variable
return if @scope_depth == 0
name = @previous
@locals.each do |local|
break if local.depth != -1 && local.depth < @scope_depth
if identifier_equal(name, local.name)
error("Already a variable with this name in this scope.")
end
end
add_local(name)
end
def define_rules
rule = Struct.new(:prefix, :infix, :precedence)
rules = {
Token::LEFT_PAREN => rule.new(:grouping, nil, PREC_NONE),
Token::RIGHT_PAREN => rule.new(nil, nil, PREC_NONE),
Token::LEFT_BRACE => rule.new(nil, nil, PREC_NONE),
Token::RIGHT_BRACE => rule.new(nil, nil, PREC_NONE),
Token::COMMA => rule.new(nil, nil, PREC_NONE),
Token::DOT => rule.new(nil, nil, PREC_NONE),
Token::MINUS => rule.new(:unary, :binary, PREC_TERM),
Token::PLUS => rule.new(nil, :binary, PREC_TERM),
Token::SEMICOLON => rule.new(nil, nil, PREC_NONE),
Token::SLASH => rule.new(nil, :binary, PREC_FACTOR),
Token::STAR => rule.new(nil, :binary, PREC_FACTOR),
Token::BANG => rule.new(:unary, nil, PREC_NONE),
Token::BANG_EQUAL => rule.new(nil, :binary, PREC_EQUALITY),
Token::EQUAL => rule.new(nil, nil, PREC_NONE),
Token::EQUAL_EQUAL => rule.new(nil, :binary, PREC_EQUALITY),
Token::GREATER => rule.new(nil, :binary, PREC_COMPARISON),
Token::GREATER_EQUAL => rule.new(nil, :binary, PREC_COMPARISON),
Token::LESS => rule.new(nil, :binary, PREC_COMPARISON),
Token::LESS_EQUAL => rule.new(nil, :binary, PREC_COMPARISON),
Token::IDENTIFIER => rule.new(:variable, nil, PREC_NONE),
Token::STRING => rule.new(:string, nil, PREC_NONE),
Token::NUMBER => rule.new(:number, nil, PREC_NONE),
Token::AND => rule.new(nil, nil, PREC_NONE),
Token::CLASS => rule.new(nil, nil, PREC_NONE),
Token::ELSE => rule.new(nil, nil, PREC_NONE),
Token::FALSE => rule.new(:literal, nil, PREC_NONE),
Token::FOR => rule.new(nil, nil, PREC_NONE),
Token::FUN => rule.new(nil, nil, PREC_NONE),
Token::IF => rule.new(nil, nil, PREC_NONE),
Token::NIL => rule.new(:literal, nil, PREC_NONE),
Token::OR => rule.new(nil, nil, PREC_NONE),
Token::PRINT => rule.new(nil, nil, PREC_NONE),
Token::RETURN => rule.new(nil, nil, PREC_NONE),
Token::SUPER => rule.new(nil, nil, PREC_NONE),
Token::THIS => rule.new(nil, nil, PREC_NONE),
Token::TRUE => rule.new(:literal, nil, PREC_NONE),
Token::VAR => rule.new(nil, nil, PREC_NONE),
Token::WHILE => rule.new(nil, nil, PREC_NONE),
Token::ERROR => rule.new(nil, nil, PREC_NONE),
Token::EOF => rule.new(nil, nil, PREC_NONE),
}
rules.freeze
end
end
end
| 25.309091 | 78 | 0.596713 |
7a7ba8ecbfe3f0d48e7776f4e26adc1dd3d6a89e | 596 | module CanTango
class PermissionEngine < Engine
module Parser
class Regex < Rule
attr_reader :regex
def parse
target.gsub!(/\/(.*)\//, '\1')
@regex = /#{target}/
build_statement
end
private
def targets
config_models.by_reg_exp(regex)
end
def build_statement
targets.map do |target|
"#{method} :#{action}, #{target.name}"
end.join("\n")
end
def config_models
CanTango.config.models
end
end
end
end
end
| 16.555556 | 50 | 0.501678 |
f8a06486cec7e90fd6141ac1819a4b5f9bc1d703 | 747 | Pod::Spec.new do |s|
s.name = 'SteviaLayout'
s.version = "4.5.0"
s.summary = "Elegant view layout for iOS"
s.homepage = "https://github.com/s4cha/Stevia"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = 'S4cha'
s.source = { :git => "https://github.com/s4cha/Stevia.git",
:tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/sachadso'
s.source_files = "Source/*.swift"
s.requires_arc = true
s.ios.deployment_target = "8"
s.tvos.deployment_target = "10.2"
s.description = "Elegant view layout for iOS :leaves: - Auto layout code finally readable by a human being"
s.module_name = 'Stevia'
end
| 41.5 | 110 | 0.574297 |
03f3702054ad9de811ea41aeb4895816fad49fd6 | 95 | require 'rails_helper'
RSpec.describe Front::SubscribersController, type: :controller do
end
| 15.833333 | 65 | 0.810526 |
e842cb0ee7f83e443ff8eb855e626361eeba6e0a | 899 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'dvm/version'
Gem::Specification.new do |spec|
spec.name = 'dvm'
spec.version = Dvm::VERSION
spec.authors = ['Xingjian Xu']
spec.email = ['[email protected]']
spec.summary = %q{Deploy version manager}
spec.description = %q{Deploy version manager}
spec.homepage = 'https://github.com/dotswing/dvm'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.6'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_runtime_dependency 'colorize', '~> 0.7'
end
| 34.576923 | 74 | 0.638487 |
26b91dd964337d33bad60b5daf1fb523b6695ad7 | 130 | class AddEncryptedPinToUsers < ActiveRecord::Migration[4.2]
def change
add_column :users, :encrypted_pin, :string
end
end
| 21.666667 | 59 | 0.761538 |
d573148413a05bb4b38de75312c0c5e00713de33 | 968 | require 'spec_helper'
require 'support/test_application_helper'
require 'rack/test'
describe Redshift::Rails::Middleware do
include Rack::Test::Methods
let(:test_app) { TestApplicationHelper::TestApplication.new }
let(:app) { Redshift::Rails::Middleware.new(test_app) }
describe "GET '/'" do
it 'should return 200 OK' do
get '/'
expect(last_response.status).to eq 200
end
context 'when rack.test is true' do
before do
allow(Redshift::Client).to receive :disconnect
end
it 'should not call disconnect' do
get '/'
expect(Redshift::Client).to_not have_received :disconnect
end
end
context 'when rack.test is false' do
before do
allow(Redshift::Client).to receive :disconnect
end
it 'should call disconnect' do
get '/', {}, {'rack.test' => false}
expect(Redshift::Client).to have_received(:disconnect).once
end
end
end
end
| 24.2 | 67 | 0.646694 |
bfece53b4bdfa2c46e885ac5b71ce64b4f51b213 | 8,738 | # Provides full CRUD+ for Distributions, which are the primary way for inventory to leave a Diaperbank. Most
# Distributions are given out through community partners (either via Partnerbase, or to Partners-on-record). It's
# technically possible to also do Direct Services by having a Partner called "Direct Services" and then issuing
# Distributions to them, though it would lack some of the additional featuers and failsafes that a Diaperbank
# might want if they were doing direct services.
class DistributionsController < ApplicationController
include DateRangeHelper
include DistributionHelper
def print
@distribution = Distribution.find(params[:id])
respond_to do |format|
format.any do
pdf = DistributionPdf.new(current_organization, @distribution)
send_data pdf.render,
filename: format("%s %s.pdf", @distribution.partner.name, sortable_date(@distribution.created_at)),
type: "application/pdf",
disposition: "inline"
end
end
end
def destroy
result = DistributionDestroyService.new(params[:id]).call
if result.success?
flash[:notice] = "Distribution #{params[:id]} has been reclaimed!"
else
flash[:error] = "Could not destroy distribution #{params[:id]}. Please contact technical support."
end
redirect_to distributions_path
end
def index
setup_date_range_picker
@highlight_id = session.delete(:created_distribution_id)
@distributions = current_organization
.distributions
.apply_filters(filter_params, helpers.selected_range)
@paginated_distributions = @distributions.page(params[:page])
@total_value_all_distributions = total_value(@distributions)
@total_value_paginated_distributions = total_value(@paginated_distributions)
@total_items_all_distributions = total_items(@distributions)
@total_items_paginated_distributions = total_items(@paginated_distributions)
@items = current_organization.items.alphabetized
@storage_locations = current_organization.storage_locations.alphabetized
@partners = @distributions.collect(&:partner).uniq.sort_by(&:name)
@selected_item = filter_params[:by_item_id]
@selected_partner = filter_params[:by_partner]
@selected_status = filter_params[:by_state]
@selected_location = filter_params[:by_location]
# FIXME: one of these needs to be removed but it's unclear which at this point
@statuses = Distribution.states.transform_keys(&:humanize)
respond_to do |format|
format.html
format.csv do
send_data Exports::ExportDistributionsCSVService.new(distribution_ids: @distributions.map(&:id)).generate_csv, filename: "Distributions-#{Time.zone.today}.csv"
end
end
end
def create
result = DistributionCreateService.new(distribution_params.merge(organization: current_organization), request_id).call
if result.success?
session[:created_distribution_id] = result.distribution.id
@distribution = result.distribution
flash[:notice] = "Distribution created!"
perform_inventory_check
redirect_to(distribution_path(result.distribution)) && return
else
@distribution = result.distribution
flash[:error] = insufficient_error_message(result.error.message)
@distribution.line_items.build if @distribution.line_items.size.zero?
@items = current_organization.items.alphabetized
@storage_locations = current_organization.storage_locations.alphabetized
render :new
end
end
def new
@distribution = Distribution.new
if params[:request_id]
@distribution.copy_from_request(params[:request_id])
else
@distribution.line_items.build
@distribution.copy_from_donation(params[:donation_id], params[:storage_location_id])
end
@items = current_organization.items.alphabetized
@storage_locations = current_organization.storage_locations.has_inventory_items.alphabetized
end
def show
@distribution = Distribution.includes(:line_items).includes(:storage_location).find(params[:id])
@line_items = @distribution.line_items
@total_quantity = @distribution.total_quantity
@total_package_count = @line_items.sum { |item| item.has_packages || 0 }
if @total_package_count.zero?
@total_package_count = nil
end
end
def edit
@distribution = Distribution.includes(:line_items).includes(:storage_location).find(params[:id])
if ([email protected]? && @distribution.future?) || current_user.organization_admin?
@distribution.line_items.build if @distribution.line_items.size.zero?
@items = current_organization.items.alphabetized
@storage_locations = current_organization.storage_locations.has_inventory_items.alphabetized
else
redirect_to distributions_path, error: 'To edit a distribution,
you must be an organization admin or the current date must be later than today.'
end
end
def update
@distribution = Distribution.includes(:line_items).includes(:storage_location).find(params[:id])
result = DistributionUpdateService.new(@distribution, distribution_params).call
if result.success?
if result.resend_notification? && @distribution.partner&.send_reminders
send_notification(current_organization.id, @distribution.id, subject: "Your Distribution Has Changed", distribution_changes: result.distribution_content.changes)
end
schedule_reminder_email(@distribution)
perform_inventory_check
redirect_to @distribution, notice: "Distribution updated!"
else
flash[:error] = insufficient_error_message(result.error.message)
@distribution.line_items.build if @distribution.line_items.size.zero?
@items = current_organization.items.alphabetized
@storage_locations = current_organization.storage_locations.alphabetized
render :edit
end
end
# TODO: This needs a little more context. Is it JSON only? HTML?
def schedule
@pick_ups = current_organization.distributions
end
def picked_up
distribution = current_organization.distributions.find(params[:id])
if !distribution.complete? && distribution.complete!
flash[:notice] = 'This distribution has been marked as being completed!'
else
flash[:error] = 'Sorry, we encountered an error when trying to mark this distribution as being completed'
end
redirect_back(fallback_location: distribution_path)
end
def pickup_day
@pick_ups = current_organization.distributions.during(pickup_date).order(issued_at: :asc)
@daily_items = daily_items(@pick_ups)
@selected_date = pickup_day_params[:during]&.to_date || Time.zone.now.to_date
end
private
def insufficient_error_message(details)
"Sorry, we weren't able to save the distribution. \n #{@distribution.errors.full_messages.join(', ')} #{details}"
end
def send_notification(org, dist, subject: 'Your Distribution', distribution_changes: {})
PartnerMailerJob.perform_now(org, dist, subject, distribution_changes)
end
def schedule_reminder_email(distribution)
return if distribution.past? || !distribution.partner.send_reminders
DistributionMailer.delay_until(distribution.issued_at - 1.day).reminder_email(distribution.id)
end
def distribution_params
params.require(:distribution).permit(:comment, :agency_rep, :issued_at, :partner_id, :storage_location_id, :reminder_email_enabled, :delivery_method, line_items_attributes: %i(item_id quantity _destroy))
end
def request_id
params.dig(:distribution, :request_attributes, :id)
end
def total_items(distributions)
LineItem.where(itemizable_type: "Distribution", itemizable_id: distributions.pluck(:id)).sum('quantity')
end
def total_value(distributions)
distributions.sum(&:value_per_itemizable)
end
def daily_items(pick_ups)
item_groups = LineItem.where(itemizable_type: "Distribution", itemizable_id: pick_ups.pluck(:id)).group_by(&:item_id)
item_groups.map do |_id, items|
{
name: items.first.item.name,
quantity: items.sum(&:quantity),
package_count: items.sum { |item| item.package_count.to_i }
}
end
end
helper_method \
def filter_params
return {} unless params.key?(:filters)
params.require(:filters).permit(:by_item_id, :by_partner, :by_state, :by_location)
end
def perform_inventory_check
inventory_check_result = InventoryCheckService.new(@distribution).call
if inventory_check_result.error.present?
flash[:error] = inventory_check_result.error
end
if inventory_check_result.alert.present?
flash[:alert] = inventory_check_result.alert
end
end
end
| 38.493392 | 207 | 0.742733 |
d5de14f6890fb85b8c0b66ffef5f4ccda2515cdf | 1,318 | # frozen_string_literal: true
module GraphQL
module Relay
class ConnectionResolve
def initialize(field, underlying_resolve)
@field = field
@underlying_resolve = underlying_resolve
@max_page_size = field.connection_max_page_size
end
def call(obj, args, ctx)
# in a lazy resolve hook, obj is the promise,
# get the object that the promise was
# originally derived from
parent = ctx.object
nodes = @underlying_resolve.call(obj, args, ctx)
if nodes.nil? || ctx.schema.lazy?(nodes) || nodes.is_a?(GraphQL::Execution::Execute::Skip) || ctx.wrapped_connection
nodes
else
ctx.wrapped_connection = true
build_connection(nodes, args, parent, ctx)
end
end
private
def build_connection(nodes, args, parent, ctx)
if nodes.is_a? GraphQL::ExecutionError
ctx.add_error(nodes)
nil
else
if parent.is_a?(GraphQL::Schema::Object)
parent = parent.object
end
connection_class = GraphQL::Relay::BaseConnection.connection_for_nodes(nodes)
connection_class.new(nodes, args, field: @field, max_page_size: @max_page_size, parent: parent, context: ctx)
end
end
end
end
end
| 29.954545 | 124 | 0.628983 |
6a1dc0bf6403f7d841b8339019a213e7345c1e07 | 2,975 | require_relative "spec_helper"
describe "pg_inet extension" do
ipv6_broken = (IPAddr.new('::1'); false) rescue true
before do
@db = Sequel.connect('mock://postgres')
@db.extend_datasets{def quote_identifiers?; false end}
@db.extension(:pg_array, :pg_inet)
end
it "should literalize IPAddr v4 instances to strings correctly" do
@db.literal(IPAddr.new('127.0.0.1')).must_equal "'127.0.0.1/32'"
@db.literal(IPAddr.new('127.0.0.0/8')).must_equal "'127.0.0.0/8'"
end
it "should literalize IPAddr v6 instances to strings correctly" do
@db.literal(IPAddr.new('2001:4f8:3:ba::/64')).must_equal "'2001:4f8:3:ba::/64'"
@db.literal(IPAddr.new('2001:4f8:3:ba:2e0:81ff:fe22:d1f1')).must_equal "'2001:4f8:3:ba:2e0:81ff:fe22:d1f1/128'"
end unless ipv6_broken
it "should set up conversion procs correctly" do
cp = @db.conversion_procs
cp[869].call("127.0.0.1").must_equal IPAddr.new('127.0.0.1')
cp[650].call("127.0.0.1").must_equal IPAddr.new('127.0.0.1')
end
it "should set up conversion procs for arrays correctly" do
cp = @db.conversion_procs
cp[1041].call("{127.0.0.1}").must_equal [IPAddr.new('127.0.0.1')]
cp[651].call("{127.0.0.1}").must_equal [IPAddr.new('127.0.0.1')]
cp[1040].call("{127.0.0.1}").must_equal ['127.0.0.1']
end
it "should not affect literalization of custom objects" do
o = Object.new
def o.sql_literal(ds) 'v' end
@db.literal(o).must_equal 'v'
end
it "should support using IPAddr as bound variables" do
@db.bound_variable_arg(1, nil).must_equal 1
@db.bound_variable_arg(IPAddr.new('127.0.0.1'), nil).must_equal '127.0.0.1/32'
end
it "should support using IPAddr instances in array types in bound variables" do
@db.bound_variable_arg(Sequel.pg_array([IPAddr.new('127.0.0.1')]), nil).must_equal '{"127.0.0.1/32"}'
end
it "should parse inet/cidr type from the schema correctly" do
@db.fetch = [{:name=>'id', :db_type=>'integer'}, {:name=>'i', :db_type=>'inet'}, {:name=>'c', :db_type=>'cidr'}]
@db.schema(:items).map{|e| e[1][:type]}.must_equal [:integer, :ipaddr, :ipaddr]
end
it "should set :ruby_default schema entries if default value is recognized" do
@db.fetch = [{:name=>'id', :db_type=>'integer', :default=>'1'}, {:name=>'t', :db_type=>'inet', :default=>"'127.0.0.1'::inet"}]
s = @db.schema(:items)
s[1][1][:ruby_default].must_equal IPAddr.new('127.0.0.1')
end
it "should support typecasting for the ipaddr type" do
ip = IPAddr.new('127.0.0.1')
@db.typecast_value(:ipaddr, ip).must_be_same_as(ip)
@db.typecast_value(:ipaddr, ip.to_s).must_equal ip
proc{@db.typecast_value(:ipaddr, '')}.must_raise(Sequel::InvalidValue)
proc{@db.typecast_value(:ipaddr, 1)}.must_raise(Sequel::InvalidValue)
end
it "should return correct results for Database#schema_type_class" do
@db.schema_type_class(:ipaddr).must_equal IPAddr
@db.schema_type_class(:integer).must_equal Integer
end
end
| 40.753425 | 130 | 0.674622 |
ac2cd7e5f4d1a287aefb0eece92c81143f8a85e5 | 2,706 | # seed data를 생성하기 위해 존재하는 service입니다.
module Sellers
class OrderCreateService
attr_accessor :order_info, :ship_info, :payment, :cart, :seller
attr_reader :errors
def initialize(seller, order_info_source, ship_info_source, payment_source)
@seller = seller
@order_info = OrderInfo.new(order_info_source)
@ship_info = ShipInfo.new(ship_info_source)
@payment = Payment.new(payment_source)
@cart = order_info.cart
@errors = nil
end
def save
return true if cart.cannot_create_order
begin
ApplicationRecord.transaction do
# 0. Associations
payment.order_info = order_info
ship_info.order_info = order_info
# 1. save ship_info
valid_save ship_info
# 2. save order_info
order_info.enc_id ||= OrderInfo.gen_enc_id
order_info.ordered_at ||= DateTime.now
valid_save order_info
# 3. save payment
payment.write_self
valid_save payment
# 4. update cart status
checkout_cart
valid_save cart
# 5. capture price fields into cart items
cart.items.each(&:capture_price_fields!)
# 6. update seller's info
item_sold_papers = cart.items.map(&:item_sold_paper).compact
item_sold_papers.each(&:order!)
# 7. update seller info cache
item_sold_papers.each do |paper|
paper.seller_info.update_counter_cache paper
end
raise ActiveRecord::Rollback if errors
end
rescue ActiveRecord::RecordNotUnique => e
ap e.message
errors = e
dup_order_defence
return true
end
OrderInfo.where(id: order_info.id).any?
end
def rollback!
ship_info.destroy if ship_info.persisted?
payment.destroy if payment.persisted?
recall_cart if cart
order_info.destroy
end
def self.checkout_cart(cart)
cart.update(
order_status: 'pay',
current: false
)
end
private
def valid_save(record)
record.save! if valid_record? record
end
def valid_record?(record)
if record.invalid?
@errors = record.errors
false
else
true
end
end
def checkout_cart
cart.order_status = payment.paid ? 'paid' : 'pay'
cart.current = false
end
def recall_cart
cart.update(
order_status: 0,
current: true
)
end
def dup_order_defence
@cart = cart.reload
@order_info = cart.order_info
@ship_info = order_info.ship_info
@payment = order_info.payment
end
end
end | 23.946903 | 79 | 0.616408 |
ed31f2b142bff1704e993e02e4b3e7bc2b91aae5 | 83 | require "active_job_resque_solo/version"
require 'active_job/plugins/resque/solo'
| 20.75 | 40 | 0.843373 |
3322b30eab35357ecccace14b8d8891f308c38c5 | 338 | class LandingController < Catherine::ApplicationController
def index
if @current_user
if @current_user.super_admin? || @current_user.has_role?("Site Admin")
redirect_to catherine.admin_dashboard_url
else
redirect_to catherine.user_dashboard_url
end
else
redirect_to usman.sign_in_url
end
end
end
| 19.882353 | 74 | 0.745562 |
38a29b3a10fba455d06dea1fd5ad3526685c8f6e | 1,404 | # encoding: utf-8
Gem::Specification.new do |s|
s.name = "twitter-text"
s.version = "1.14.3"
s.authors = ["Matt Sanford", "Patrick Ewing", "Ben Cherry", "Britt Selvitelle",
"Raffi Krikorian", "J.P. Cummins", "Yoshimasa Niwa", "Keita Fujii", "James Koval"]
s.email = ["[email protected]", "[email protected]", "[email protected]", "[email protected]",
"[email protected]", "[email protected]", "[email protected]", "[email protected]", "[email protected]"]
s.homepage = "http://twitter.com"
s.description = s.summary = "A gem that provides text handling for Twitter"
s.license = "Apache 2.0"
s.platform = Gem::Platform::RUBY
s.has_rdoc = true
s.summary = "Twitter text handling library"
s.add_development_dependency "multi_json", "~> 1.3"
s.add_development_dependency "nokogiri", "~> 1.5.10"
s.add_development_dependency "rake", "~> 11.1" # 12 removes method named `last_comment`
s.add_development_dependency "rdoc"
s.add_development_dependency "rspec", "~> 2.14.0"
s.add_development_dependency "simplecov", "~> 0.8.0"
s.add_runtime_dependency "unf", "~> 0.1.0"
s.files = `git ls-files`.split("\n") + ['lib/assets/tld_lib.yml']
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
end
| 45.290323 | 114 | 0.654558 |
911d41a1d6b28789578c1253a7de9eafab1f8216 | 1,092 | # coding: us-ascii
require File.expand_path('../lib/struct/alias_member/version', __FILE__)
Gem::Specification.new do |gem|
# specific
gem.description = %q{Struct will be able to alias the members name.}
gem.summary = gem.description.dup
gem.homepage = 'http://kachick.github.com/struct-alias_member'
gem.license = 'MIT'
gem.name = 'struct-alias_member'
gem.version = Struct::Alias_Member::VERSION.dup
gem.add_development_dependency 'test-declare', '~> 0.0.3'
gem.add_development_dependency 'yard', '>= 0.9.20', '< 2'
gem.add_development_dependency 'rake', '>= 10', '< 20'
gem.add_development_dependency 'bundler', '>= 2', '< 3'
if RUBY_ENGINE == 'rbx'
gem.add_dependency 'rubysl', '~> 2.0'
end
# common
gem.authors = ['Kenichi Kamiya']
gem.email = ['[email protected]']
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ['lib']
end
| 31.2 | 75 | 0.639194 |
b988a92718251f21105a2e1d57942bcaacd0bb22 | 415 | module Nurego
class Offering < APIResource
def self.retrieve(id, api_key = nil)
raise NotImplementedError.new("Offering cannot be retrieved with ID. Retrieve an offering using Offering.current")
end
def self.current(params = {}, api_key = nil)
response, api_key = Nurego.request(:get, self.url, api_key, params)
Util.convert_to_nurego_object(response, api_key)
end
end
end
| 25.9375 | 120 | 0.710843 |
61b7b3dbaae1b05b9fb61a4e14a02f3b48ce83fb | 1,692 | ['../dispatchresponsepayload', '../dispatcherrorpayload'].each do |f|
path = File.absolute_path(File.dirname(__FILE__) + '/' + f)
puts "Requiring: #{path}."
require path
end
Thread.abort_on_exception = true
class Dispatcher
def initialize(extra_plugin_dir = nil)
puts "Loading jar-file plugins."
directory = File.dirname(__FILE__)
plugin_directory = File.absolute_path(directory + "/../plugins")
Dir[plugin_directory + '/*.rb'].each do |plugin|
begin
puts "Loading plugin: #{plugin}."; require plugin
rescue Exception => e
puts "ERROR: Couldn't load plugin #{plugin}: #{e}."
end
end
puts "Finished loading jar-file plugins."
if extra_plugin_dir
puts "Loading non-jar-file plugins."
Dir[extra_plugin_dir + "/*.rb"].each do |plugin|
begin
puts "Loading external plugin: #{plugin}."; require plugin
rescue Exception => e
puts "ERROR: Couldn't load external plugin #{plugin}: #{e}."
end
end
puts "Finished loading non-jar-file plugins."
end
end
def dispatch(payload)
unless (plugin = Plugins[payload.plugin])
return ErrorResponse.new(:error_message => "#{payload.plugin} does not exist.")
end
unless (plugin.action_exists?(action = payload.action))
return ErrorResponse.new(:error_message => "#{payload.plugin} does not support #{action}.")
end
begin
puts "Getting response: plugin = #{payload.plugin}, action = #{action}."
ResponsePayload.new(:plugin_response => plugin.act(action, payload.arguments))
rescue Exception => e
ErrorResponse.new(:error_message => e.message)
end
end
end
| 32.538462 | 97 | 0.651891 |
618faa9fbdac90543ac69c2e05b788bfc34eef33 | 1,567 | #
# Be sure to run `pod lib lint MyTools.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'MyTools'
s.version = '0.1.0'
s.summary = 'A short description of MyTools.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/SONG808/testcocoapods'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'SONG' => '[email protected]' }
s.source = { :git => 'https://github.com/SONG808/testcocoapods.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'MyTools/Classes/**/*'
# s.resource_bundles = {
# 'MyTools' => ['MyTools/Assets/*.png']
# }
s.public_header_files = 'MyTools/Classes/MyTools.h'
s.frameworks = 'UIKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 36.44186 | 105 | 0.6388 |
032283736e96e67f37dde63f3b6a3aa4cfbc88fc | 2,200 | # Copyright © 2020 MUSC Foundation for Research Development~
# All rights reserved.~
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:~
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.~
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following~
# disclaimer in the documentation and/or other materials provided with the distribution.~
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products~
# derived from this software without specific prior written permission.~
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,~
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT~
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL~
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS~
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR~
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.~
every 1.hour, :roles => [:app] do
rake 'update_from_sparc'
end
every 1.day, at: ["1:25 am", "2:25 am", "3:25 am", "4:25 am", "5:25 am", "6:25 am", "7:25 am", "8:25 am", "9:25 am", "10:25 am", "11:25 am", "12:25 pm", "1:25 pm", "2:25 pm", "3:25 pm", "4:25 pm", "5:25 pm", "6:25 pm", "7:25 pm", "8:25 pm", "9:25 pm", "10:25 pm", "11:25 pm", "12:25 am"], :roles => [:app] do
rake 'update_from_eirb_db'
end
every 1.day, at: ['10:40 pm'], :roles => [:app] do
rake 'update_from_coeus_db'
end
every 1.day, at: ['10:50 pm'], :roles => [:app] do
rake 'update_from_cayuse_db'
end
every 1.hour, :roles => [:app] do
rake 'delayed_job_monitor'
end
| 55 | 308 | 0.730909 |
6a3f0990070b9816791916f9c6d77a162bd57a88 | 1,636 | class ZonesController < ApplicationController
before_action { params[:id] && @zone = Zone.find(params[:id]) }
before_action { @active_nav = :zones }
def index
@zones = Zone.order(:updated_at => :desc).includes(:pending_changes)
end
def show
@records = @zone.ordered_records.to_a
end
def zone_file
render :plain => @zone.generate_zone_file
end
def new
@zone = Zone.new
end
def create
@zone = Zone.new(safe_params)
if @zone.save
redirect_to @zone, :notice => "#{@zone.name} has been created successfully"
else
render 'new'
end
end
def update
if @zone.update(safe_params)
redirect_to @zone, :notice => "#{@zone.name} has been updated successfully"
else
render 'edit'
end
end
def destroy
@zone.destroy
redirect_to root_path, :notice => "#{@zone.name} has been removed successfully"
end
def publish
if request.post?
publisher = Bound::Publisher.new(:all => Change.pending.empty?)
publisher.publish
@result = publisher.result
render 'publish_results'
else
@changes = Change.pending.order(:created_at)
end
end
def import
@import = Bound::Import.new(@zone, params[:records])
if params[:import].present?
stats = @import.import
redirect_to @zone, :notice => "Imported #{stats[:imported]} of #{stats[:total]} (#{stats[:duplicates]} duplicates, #{stats[:errored]} errored)"
end
end
private
def safe_params
params.require(:zone).permit(:name, :primary_ns, :email_address, :refresh_time, :retry_time, :expiration_time, :max_cache, :ttl)
end
end
| 23.371429 | 149 | 0.655868 |
38cec53916bc6948a502fc2bf88606a856619fda | 1,340 | require 'json'
module Sprue::Serializer
# == Module Methods =======================================================
def hash_to_list(hash)
hash.to_a.flatten.collect(&:to_s)
end
def list_to_hash(list)
hash = { }
list.each_with_index do |k, i|
next if (i % 2 == 1)
hash[k] = list[i + 1]
end
hash
end
def serialize(ident, attributes, attribute_options)
values = [ ]
attribute_options.each do |name, options|
value = attributes[name]
values << name.to_s
values <<
if (!value.nil? or options[:allow_nil])
options[:serialize].call(attributes[name])
else
''
end
end
[ ident, values ]
end
def deserialize(ident, values, attribute_options)
attributes = {
:ident => ident.respond_to?(:ident) ? ident.ident : ident
}
case (values)
when Array
values = list_to_hash(values)
end
attribute_options.each do |name, options|
next if (name == :ident)
value = values[name.to_s]
attributes[name] =
if (value == '' and !options[:allow_blank])
nil
elsif (!value.nil? or (value.nil? and options[:allow_nil]))
options[:deserialize].call(value)
else
nil
end
end
attributes
end
extend self
end
| 18.873239 | 77 | 0.55 |
1c8093fcfa9cd1dd1c05be94ca163c5464e9ab7e | 932 | module Datadog
module Contrib
# Base provides features that are shared across all integrations
module Patchable
def self.included(base)
base.send(:extend, ClassMethods)
base.send(:include, InstanceMethods)
end
# Class methods for integrations
module ClassMethods
def version
nil
end
def present?
!version.nil?
end
def compatible?
Gem::Version.new(RUBY_VERSION) >= Gem::Version.new(VERSION::MINIMUM_RUBY_VERSION) && present?
end
end
# Instance methods for integrations
module InstanceMethods
def patcher
nil
end
def patch
if !self.class.compatible? || patcher.nil?
Datadog::Logger.log.warn("Unable to patch #{self.class.name}")
return
end
patcher.patch
end
end
end
end
end
| 21.674419 | 103 | 0.575107 |
08bffc0a7094564d137d893bed1b91adbf168417 | 1,321 | module Spree
class User < Spree::Base
include UserAddress
include UserMethods
include UserPaymentSource
devise :database_authenticatable, :registerable, :recoverable,
:rememberable, :trackable, :validatable, :encryptable, :encryptor => 'authlogic_sha512'
devise :confirmable if Spree::Auth::Config[:confirmable]
acts_as_paranoid
after_destroy :scramble_email_and_password
before_validation :set_login
users_table_name = User.table_name
roles_table_name = Role.table_name
scope :admin, -> { includes(:spree_roles).where("#{roles_table_name}.name" => "admin") }
def self.admin_created?
User.admin.count > 0
end
def admin?
has_spree_role?('admin')
end
protected
def password_required?
!persisted? || password.present? || password_confirmation.present?
end
private
def set_login
# for now force login to be same as email, eventually we will make this configurable, etc.
self.login ||= self.email if self.email
end
def scramble_email_and_password
self.email = SecureRandom.uuid + "@example.net"
self.login = self.email
self.password = SecureRandom.hex(8)
self.password_confirmation = self.password
self.save
end
end
end
| 26.42 | 98 | 0.67676 |
7947f29295efa76cbc905d749d6f6b6acb3a8358 | 1,043 | Pod::Spec.new do |s|
s.name = 'GoogleTagManager'
s.version = '3.02'
s.summary = 'Google Tag Manager SDK.'
s.description = 'Google Tag Manager enables developers to change configuration values in their mobile applications using the Google Tag Manager interface without having to rebuild and resubmit application binaries to app marketplaces.'
s.homepage = 'http://developers.google.com/tag-manager/ios'
s.license = {
:type => 'Copyright',
:text => <<-LICENSE
Copyright 2013 Google, Inc. All rights reserved.
LICENSE
}
s.author = 'Google Inc.'
s.source = { :http => 'http://dl.google.com/googleanalyticsservices/GoogleAnalyticsServicesiOS_3.02.zip', :flatten => true }
s.platform = :ios
s.frameworks = 'CFNetwork', 'CoreData', 'SystemConfiguration', 'AdSupport'
s.source_files = 'GoogleTagManager/Library/*.h'
s.preserve_path = 'libGoogleAnalyticsServices.a'
s.library = 'GoogleAnalyticsServices'
s.xcconfig = { 'OTHER_LDFLAGS' => '-ObjC' , 'LIBRARY_SEARCH_PATHS' => '"$(PODS_ROOT)/GoogleTagManager"'}
end
| 45.347826 | 237 | 0.722915 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.