hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
7ab2b9fdf16cd760622fc5191f1aabbb5a5cabc5 | 778 | # frozen_string_literal: true
# RSpec example that tests usage of `#warn`
#
# class Klass
# include Technologic
#
# def initialize(user)
# @user = user
# end
#
# def do_a_thing
# warn :thing_being_done, user: @user
# end
# end
#
# RSpec.describe Klass do
# let(:instance) { described_class.new(user) }
# let(:user) { double }
#
# before { instance.do_a_thing }
#
# it_behaves_like "a warn event is logged", :thing_being_done do
# let(:expected_data) do
# { user: user }
# end
# end
# end
RSpec.shared_examples_for "a warn event is logged" do |event, for_class = described_class|
include_examples "a logged event with severity", event, :warn, for_class
end
| 23.575758 | 90 | 0.604113 |
01cbd0c678d082c89b2748462a899fe17b854bc5 | 165 | class ShowsController < ApplicationController
post '/shows' do
name = params["name"]
@shows = Show.search_shows_by_name(name)
erb :index
end
end
| 13.75 | 45 | 0.684848 |
26b5599af33a34b37e22b89cbac23c018cfd9cb0 | 1,223 | require File.expand_path(File.join(File.dirname(__FILE__), 'spec_helper'))
describe AlchemyAPI, 'concept_extraction' do
before do
AlchemyAPI::Config.apikey = API_KEY
end
subject { AlchemyAPI::ConceptTagging.new }
describe '#search' do
{
text: 'Google is a large company',
html: '<html><body>Google is a large company</body></html>',
url: 'http://www.alchemy.com'
}.each do |type, value|
[:json].each do |output_mode|
before do
AlchemyAPI::Config.output_mode = output_mode
end
describe "#{type} search with #{output_mode} results" do
it 'returns an array of results' do
VCR.use_cassette("concept_basic_#{type}_#{output_mode}_search") do
result = subject.search(type => value)
result.must_be_instance_of Array
end
end
it 'includes the keyword text and relavence' do
VCR.use_cassette("concept_basic_#{type}_#{output_mode}_search") do
result = subject.search(type => value)[0]
result['text'].wont_be_nil
result['relevance'].wont_be_nil
end
end
end
end
end
end
end
| 28.44186 | 78 | 0.600981 |
ff318475d131b4e98d9048af78e1a2aa202a0650 | 977 | class RequestResetsController < AccountUsers::ControllerBase
helper_method :request_reset_presenters_path
def show
@request_reset_presenter = RequestResetPresenter.new
end
def create
@request_reset_presenter = RequestResetPresenter.new params_permit
if @request_reset_presenter.invalid?
render action: :show, status: :conflict
else
user = User.email_lookup @request_reset_presenter.email
unless user.nil?
# rescue all exception to prevent leakage of account existance
begin
UserMailer.reset_password(user).deliver_now
rescue Exception => e
Rails.logger.error "Exception in RequestResetsController::create: #{e.class.name}: #{e.message}"
end
end
end
end
def request_reset_presenters_path
AccountUsers::Engine.routes.url_helpers.login_request_reset_path
end
private
def params_permit
params.require(:request_reset_presenter).permit(:email)
end
end
| 27.138889 | 107 | 0.733879 |
edf4575f9630fff4c08e240303d22f0964d34f59 | 1,143 | class Csvtomd < Formula
include Language::Python::Virtualenv
desc "CSV to Markdown table converter"
homepage "https://github.com/mplewis/csvtomd"
url "https://files.pythonhosted.org/packages/9d/59/ea3c8b102f9c72e5d276a169f7f343432213441c39a6eac7a8f444c66681/csvtomd-0.3.0.tar.gz"
sha256 "a1fbf1db86d4b7b62a75dc259807719b2301ed01db5d1d7d9bb49c4a8858778b"
license "MIT"
revision 2
livecheck do
url :stable
end
bottle do
cellar :any_skip_relocation
sha256 "c9749639795ac8d18278813fd8e8c62df76de23919cd58de6c65175539b7ec96" => :catalina
sha256 "39dbb7e395b6dd34ca0e7ae1c723b586875551ab9a3cbff93b250a695ee25e64" => :mojave
sha256 "4233cce0f722709b0d1b49c3af66faf3ea75ff5317a53d404dda2420ed147d75" => :high_sierra
end
depends_on "[email protected]"
def install
virtualenv_install_with_resources
end
test do
(testpath/"test.csv").write <<~EOS
column 1,column 2
hello,world
EOS
markdown = <<~EOS.strip
column 1 | column 2
----------|----------
hello | world
EOS
assert_equal markdown, shell_output("#{bin}/csvtomd test.csv").strip
end
end
| 27.878049 | 135 | 0.733158 |
ab39d4b3b52d11a1b347ff6b5a4876690acb892b | 349 | json.array! @likers do |liker|
json.id liker.id
json.username liker.username
json.avatar_image_tag avatar_for(liker, size: 50)
json.description liker.description
json.urlPath user_path(liker)
json.following current_user.following?(liker)
json.isSelf current_user?(liker)
json.currentPage @current_page
json.nextPage @next_page
end
| 29.083333 | 51 | 0.7851 |
28c6515c5208ffff4ca11e6f30056e7ac7f081c2 | 1,463 | # frozen_string_literal: true
require "spec_helper"
describe Onebox::Engine::GithubFolderOnebox do
context 'without fragments' do
before(:all) do
@link = "https://github.com/discourse/discourse/tree/master/spec/fixtures"
@uri = "https://github.com/discourse/discourse/tree/master/spec/fixtures"
fake(@uri, response(described_class.onebox_name))
end
include_context "engines"
it_behaves_like "an engine"
describe "#to_html" do
it "includes link to folder with truncated display path" do
expect(html).to include("<a href='https://github.com/discourse/discourse/tree/master/spec/fixtures' target=\"_blank\" rel=\"noopener\">master/spec/fixtures</a>")
end
it "includes repository name" do
expect(html).to include("discourse/discourse")
end
it "includes logo" do
expect(html).to include("")
end
end
end
context 'with fragments' do
before do
@link = "https://github.com/discourse/discourse#setting-up-discourse"
@uri = "https://github.com/discourse/discourse"
fake(@uri, response("githubfolder-discourse-root"))
@onebox = described_class.new(@link)
end
it "extracts subtitles when linking to docs" do
expect(@onebox.to_html).to include("<a href='https://github.com/discourse/discourse#setting-up-discourse' target=\"_blank\" rel=\"noopener\">discourse/discourse - Setting up Discourse</a>")
end
end
end
| 31.804348 | 195 | 0.684211 |
f84250496c4504b9e8895cf351a4da5ad46c7af5 | 1,414 | class BoshCli < Formula
desc "Cloud Foundry BOSH CLI v2"
homepage "https://bosh.io/docs/cli-v2/"
url "https://github.com/cloudfoundry/bosh-cli/archive/v6.4.4.tar.gz"
sha256 "2410c243046fd9bc748257add8cee4b250e8973ffb45474e8a0ac7f83822eb97"
license "Apache-2.0"
head "https://github.com/cloudfoundry/bosh-cli.git", branch: "main"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "750abacbf96ac11668086f5ed9c53ac4fa83ffea19c1b95d46753ad10b94dc71"
sha256 cellar: :any_skip_relocation, big_sur: "fc0dfe5f6e06be0f3a76f9648e9c43a2d5e1de5ec51ea7ed0691359b8b2c33c9"
sha256 cellar: :any_skip_relocation, catalina: "91f00e9bd5e20330c8be9cc13a2a62e2cc205645d2d5e08dea91dd800c76eba2"
sha256 cellar: :any_skip_relocation, mojave: "5176e503a56dc61fd3e3458e10ef7a7fff24698adea343ad48d5a889b7c3e419"
end
depends_on "go" => :build
def install
# https://github.com/cloudfoundry/bosh-cli/blob/master/ci/tasks/build.sh#L23-L24
inreplace "cmd/version.go", "[DEV BUILD]", "#{version}-#{tap.user}-#{Time.now.utc.strftime("%Y-%m-%dT%H:%M:%SZ")}"
system "go", "build", *std_go_args(ldflags: "-s -w")
end
test do
system bin/"bosh-cli", "generate-job", "brew-test"
assert_equal 0, $CHILD_STATUS.exitstatus
assert_predicate testpath/"jobs/brew-test", :exist?
assert_match version.to_s, shell_output("#{bin}/bosh-cli --version")
end
end
| 44.1875 | 122 | 0.745403 |
03079d0cb6062bbc3a0394eb4cc8bff30010857b | 286 | # name: more-privacy
# about: Hide some information from Discourse user profiles
# version: 0.0.3
# authors: Joerg Seyfried (JSey)
# url: https://github.com/JSey/more-privacy
register_asset "javascripts/discourse/templates/user/user.hbs"
enabled_site_setting :more_privacy_enabled
| 26 | 62 | 0.783217 |
b99f0e6af068e4e37799a4eacfffd72d1ae717b0 | 894 | # frozen_string_literal: true
describe DependentClaimant, :postgres do
let(:participant_id) { "pid" }
let(:file_number) { "vfn" }
let(:claimant) do
create(:claimant,
type: "DependentClaimant",
participant_id: participant_id,
decision_review: build(:appeal, veteran_file_number: file_number))
end
describe "#power_of_attorney" do
let(:bgs_service) { BGSService.new }
subject { claimant.power_of_attorney }
context "when participant ID doesn't return any POA" do
before do
allow(BgsPowerOfAttorney).to receive(:bgs).and_return(bgs_service)
allow(bgs_service).to receive(:fetch_poas_by_participant_ids).and_return({})
end
it "returns nil without using file number" do
expect(subject).to be_nil
expect(bgs_service).not_to receive(:fetch_poa_by_file_number)
end
end
end
end
| 29.8 | 84 | 0.689038 |
6152914bab9b4c7f8e26309b887c5d7929040f40 | 2,014 | module CorreiosSigep
module LogisticReverse
class RequestCollectNumber < BaseClient
def initialize(logistic_reverse)
@logistic_reverse = logistic_reverse
super()
end
def process
xml = Builders::XML::RequestCollectNumber.build_xml(@logistic_reverse)
response = invoke(:solicitar_postagem_reversa, xml)
process_response response
end
private
def process_response response
response_xml = response.to_xml.encode('UTF-8', 'binary', invalid: :replace, undef: :replace, replace: '')
response_doc = Nokogiri::XML.parse(response_xml)
code = response_doc.search('//cod-erro | //cod_erro').text.to_i rescue nil
handle_errors code, response_doc
end
def handle_errors code, response_doc
result_string = '//resultado-solicitacao | //resultado_solicitacao'
result_node = response_doc.search(result_string)
case code
when Models::CorreiosResponseCodes::SUCCESS
result = result_node.search('//numero-coleta | //numero_coleta')
result.first.text rescue nil
when Models::CorreiosResponseCodes::TICKET_ALREADY_USED
error_message = result_node.search('//msg_erro | //msg_erro').text
raise Models::Errors::TicketAlreadyUsed.new error_message
when Models::CorreiosResponseCodes::UNAVAILABLE_SERVICE
raise Models::Errors::UnavailableService
when Models::CorreiosResponseCodes::INEXISTENT_ZIPCODE
raise Models::Errors::InexistentZipcode
when Models::CorreiosResponseCodes::UNAVAILABLE_HOUSE_COLLECT
raise Models::Errors::UnavailableHouseCollect
when Models::CorreiosResponseCodes::COLLECT_NOT_ANSWERED_FOR_THE_ZIPCODE
raise Models::Errors::CollectNotAnsweredForTheZipcode
else
error_message = response_doc.search("msg_erro").text
raise Models::Errors::UnknownError.new error_message
end
end
end
end
end
| 35.333333 | 113 | 0.695134 |
1824577c0b4b7941faf0b5970d13492391e280a8 | 2,936 | require "language/node"
class GatsbyCli < Formula
desc "Gatsby command-line interface"
homepage "https://www.gatsbyjs.org/docs/gatsby-cli/"
# gatsby-cli should only be updated every 10 releases on multiples of 10
url "https://registry.npmjs.org/gatsby-cli/-/gatsby-cli-4.11.0.tgz"
sha256 "cd4d8d680e666af8099eb8dddc99fac4159abfcd0a448a8a1991e30a1378eb52"
license "MIT"
bottle do
sha256 arm64_monterey: "031cccc1650b42fd06181a34374c10ef73ccf6dd7926c35f7dea32549f1ca4cf"
sha256 arm64_big_sur: "88505318e33820011f96e07ab330a0d7e0aefa912119b34ac859d04880bcd87c"
sha256 monterey: "ca47b0628628425f29902553be3c67b952042a76263b46a7e1915f1db2baa162"
sha256 big_sur: "4cf0ef15d1bf8a3811817a2629f7a2cbea14f472d27747ba5c6c5a0ec72b8f71"
sha256 catalina: "513bbf80c42857ce679e06f5c6b42f1ca63742fd11e4629406b322021976d13f"
sha256 cellar: :any_skip_relocation, x86_64_linux: "207600cbf013d2c8d93e298e6f023ec4f33713fc12c167b8ec8a79673fce5ea8"
end
depends_on "node"
on_macos do
depends_on "macos-term-size"
end
on_linux do
depends_on "xsel"
end
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir[libexec/"bin/*"]
# Avoid references to Homebrew shims
node_modules = libexec/"lib/node_modules/#{name}/node_modules"
rm_f node_modules/"websocket/builderror.log"
# Remove incompatible pre-built binaries
os = OS.kernel_name.downcase
arch = Hardware::CPU.intel? ? "x64" : Hardware::CPU.arch.to_s
node_modules.glob("{lmdb,msgpackr-extract}/prebuilds/*").each do |dir|
if dir.basename.to_s != "#{os}-#{arch}"
dir.rmtree
elsif OS.linux?
dir.glob("*.musl.node").map(&:unlink)
end
end
term_size_vendor_dir = node_modules/"term-size/vendor"
term_size_vendor_dir.rmtree # remove pre-built binaries
if OS.mac?
macos_dir = term_size_vendor_dir/"macos"
macos_dir.mkpath
# Replace the vendored pre-built term-size with one we build ourselves
ln_sf (Formula["macos-term-size"].opt_bin/"term-size").relative_path_from(macos_dir), macos_dir
end
clipboardy_fallbacks_dir = node_modules/"clipboardy/fallbacks"
clipboardy_fallbacks_dir.rmtree # remove pre-built binaries
if OS.linux?
linux_dir = clipboardy_fallbacks_dir/"linux"
linux_dir.mkpath
# Replace the vendored pre-built xsel with one we build ourselves
ln_sf (Formula["xsel"].opt_bin/"xsel").relative_path_from(linux_dir), linux_dir
end
end
test do
system bin/"gatsby", "new", "hello-world", "https://github.com/gatsbyjs/gatsby-starter-hello-world"
assert_predicate testpath/"hello-world/package.json", :exist?, "package.json was not cloned"
end
end
| 40.219178 | 123 | 0.704019 |
6a149edd59c30152159c61b33cf4858d01d92119 | 138 | require "test_helper"
class TopicsControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 17.25 | 60 | 0.73913 |
0333ed02da17aab8b21a1163ba433b8bc996e979 | 1,208 | # -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "mobile-fu/version"
Gem::Specification.new do |s|
s.name = "mobile-fu"
s.version = MobileFu::VERSION
s.platform = Gem::Platform::RUBY
s.authors = ["Brendan Lim", "Ben Langfeld"]
s.email = ["[email protected], [email protected]"]
s.homepage = "https://github.com/benlangfeld/mobile-fu"
s.summary = %q{Automatically detect mobile requests from mobile devices in your Rails application.}
s.description = %q{Want to automatically detect mobile devices that access your Rails application? Mobile Fu allows you to do just that. People can access your site from a Palm, Blackberry, iPhone, iPad, Nokia, etc. and it will automatically adjust the format of the request from :html to :mobile.}
s.rubyforge_project = "mobile-fu"
s.files = Dir.glob('lib/**/*.*') + %w[README.md]
s.test_files = Dir.glob('{test,spec,features}/**/*.*')
s.executables = []
s.require_path = 'lib'
s.add_dependency 'rails'
s.add_dependency 'rack-mobile-detect'
s.add_development_dependency 'mocha'
s.add_development_dependency 'rdoc'
s.add_development_dependency 'httparty'
end
| 43.142857 | 300 | 0.690397 |
33a6c043382b9e33958675b9a5b7bf8257d52cef | 383 | class FoodItem < ApplicationRecord
belongs_to :section
validates :name, :price, presence: true
has_many :orders, dependent: :destroy
has_many :reviews, dependent: :destroy
is_impressionable
def self.search(search)
where("name ILIKE ?", "#{search}")
end
def image_url_or_default
image_url.presence || "http://loremflickr.com/480/480/#{name.strip}"
end
end
| 23.9375 | 72 | 0.720627 |
182ed0596d915ac8569261b3899b4f6d5acd0cbc | 5,118 | require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
require 'opscode/expander/vnode_supervisor'
describe Expander::VNodeSupervisor do
before do
@log_stream = StringIO.new
@local_node = Expander::Node.new("1101d02d-1547-45ab-b2f6-f0153d0abb34", "fermi.local", 12342)
@vnode_supervisor = Expander::VNodeSupervisor.new
@vnode_supervisor.instance_variable_set(:@local_node, @local_node)
@vnode_supervisor.log.init(@log_stream)
@vnode = Expander::VNode.new("42", @vnode_supervisor)
end
it "keeps a list of vnodes" do
@vnode_supervisor.vnodes.should be_empty
@vnode_supervisor.vnode_added(@vnode)
@vnode_supervisor.vnodes.should == [42]
end
it "has a callback for vnode removal" do
@vnode_supervisor.vnode_added(@vnode)
@vnode_supervisor.vnodes.should == [42]
@vnode_supervisor.vnode_removed(@vnode)
@vnode_supervisor.vnodes.should be_empty
end
it "spawns a vnode" do
AMQP.start(OPSCODE_EXPANDER_MQ_CONFIG) do
@vnode_supervisor.spawn_vnode(42)
MQ.topic('foo')
EM.add_timer(0.1) do
AMQP.hard_reset!
end
end
@vnode_supervisor.vnodes.should == [42]
end
it "subscribes to the control queue" do
pending "disabled until broadcast_message is used"
control_queue_msg = nil
AMQP.start(OPSCODE_EXPANDER_MQ_CONFIG) do
@vnode_supervisor.start([])
@vnode_supervisor.should_receive(:process_control_message).with("hello_robot_overlord")
Expander::Node.local_node.broadcast_message("hello_robot_overlord")
EM.add_timer(0.1) do
AMQP.hard_reset!
end
end
end
it "periodically publishes its list of vnodes to the gossip queue" do
pending("disabled until cluster healing is implemented")
end
describe "when responding to control messages" do
it "passes vnode table updates to its vnode table" do
vnode_table_update = Expander::Node.local_node.to_hash
vnode_table_update[:vnodes] = (0...16).to_a
vnode_table_update[:update] = :add
update_message = Yajl::Encoder.encode({:action => :update_vnode_table, :data => vnode_table_update})
@vnode_supervisor.process_control_message(update_message)
@vnode_supervisor.vnode_table.vnodes_by_node[Expander::Node.local_node].should == (0...16).to_a
end
it "publishes the vnode table when it receives a :vnode_table_publish message" do
pending "disabled until cluster healing is implemented"
update_message = Yajl::Encoder.encode({:action => :vnode_table_publish})
@vnode_supervisor.process_control_message(update_message)
end
describe "and it is the leader" do
before do
vnode_table_update = Expander::Node.local_node.to_hash
vnode_table_update[:vnodes] = (0...16).to_a
vnode_table_update[:update] = :add
update_message = Yajl::Encoder.encode({:action => :update_vnode_table, :data => vnode_table_update})
@vnode_supervisor.process_control_message(update_message)
end
it "distributes the vnode when it receives a recover_vnode message and it is the leader" do
control_msg = {:action => :recover_vnode, :vnode_id => 23}
@vnode_supervisor.local_node.should_receive(:shared_message)
@vnode_supervisor.process_control_message(Yajl::Encoder.encode(control_msg))
end
it "waits before re-advertising a vnode as available" do
pending("not yet implemented")
vnode_table_update = Expander::Node.local_node.to_hash
vnode_table_update[:vnodes] = (0...16).to_a
vnode_table_update[:update] = :add
update_message = Yajl::Encoder.encode({:action => :update_vnode_table, :data => vnode_table_update})
@vnode_supervisor.process_control_message(update_message)
control_msg = {:action => :recover_vnode, :vnode_id => 23}
@vnode_supervisor.local_node.should_receive(:shared_message).once
@vnode_supervisor.process_control_message(Yajl::Encoder.encode(control_msg))
@vnode_supervisor.process_control_message(Yajl::Encoder.encode(control_msg))
end
end
it "doesn't distribute a vnode when it is not the leader" do
vnode_table_update = Expander::Node.local_node.to_hash
vnode_table_update[:vnodes] = (16...32).to_a
vnode_table_update[:update] = :add
update_message = Yajl::Encoder.encode({:action => :update_vnode_table, :data => vnode_table_update})
@vnode_supervisor.process_control_message(update_message)
vnode_table_update = Expander::Node.new("1c53daf0-34a1-4e4f-8069-332665453b44", 'fermi.local', 2342).to_hash
vnode_table_update[:vnodes] = (0...16).to_a
vnode_table_update[:update] = :add
update_message = Yajl::Encoder.encode({:action => :update_vnode_table, :data => vnode_table_update})
@vnode_supervisor.process_control_message(update_message)
control_msg = {:action => :recover_vnode, :vnode_id => 42}
@vnode_supervisor.local_node.should_not_receive(:shared_message)
@vnode_supervisor.process_control_message(Yajl::Encoder.encode(control_msg))
end
end
end | 40.619048 | 114 | 0.719812 |
269707516d507ea262354394c8c6ade01224c542 | 2,834 | # frozen_string_literal: true
require "bcdiceCore"
require "diceBot/DiceBot"
require "diceBot/DiceBotLoader"
class BCDice
DICEBOTS = ([DiceBot.new] + DiceBotLoader.collectDiceBots).
map { |diceBot| [diceBot.id, diceBot] }.
to_h.
freeze
SYSTEMS = DICEBOTS.keys.
sort.
freeze
NAMES = DICEBOTS.
map { |gameType, diceBot| {system: diceBot.id, name: diceBot.name} }.
freeze
# 与えられた文字列が計算コマンドのようであるかを返す
# @param [String] s 調べる文字列
# @return [true] sが計算コマンドのようであった場合
# @return [false] sが計算コマンドではない場合
#
# 詳細な構文解析は行わない。
# 計算コマンドで使われ得る文字のみで構成されているかどうかだけを調べる。
def self.seem_to_be_calc?(s)
return s.match?(%r{\AC\([-+*/()\d]+\)}i)
end
def dice_command # ダイスコマンドの分岐処理
arg = @message.upcase
debug('dice_command arg', arg)
output, secret = @diceBot.dice_command(@message, @nick_e)
return output, secret if( output != '1' )
output, secret = rollD66(arg)
return output, secret unless( output.nil? )
output, secret = checkAddRoll(arg)
return output, secret unless( output.nil? )
output, secret = checkBDice(arg)
return output, secret unless( output.nil? )
output, secret = checkRnDice(arg)
return output, secret unless( output.nil? )
output, secret = checkUpperRoll(arg)
return output, secret unless( output.nil? )
output, secret = checkChoiceCommand(arg)
return output, secret unless( output.nil? )
output = nil #BCDiceからの変更点
secret = nil
return output, secret
end
# 計算コマンドの実行を試みる
# @param [String] command 入力されたコマンド
# @return [Array<String, false>] 計算コマンドの実行に成功した場合
# @return [nil, nil] 計算コマンドの実行に失敗した場合
#
# 返り値の2つ目の要素は、`result, secret =` と受け取れるようにするために
# 用意している。
def try_calc_command(command)
# "C(1+1)" のような計算コマンドは受け付けるが、"C1" のように "C" の後に数字
# のみが書かれているコマンドなどは拒絶するために必要な処理。
# BCDice側では、設定されたメッセージが計算コマンドかどうかの判定を行って
# いないため、やむを得ずここで判定する。
unless self.class.seem_to_be_calc?(command)
return nil, nil
end
stripped_message = @message.strip
matches = stripped_message.match(/\AC(-?\d+)\z/i)
if matches.nil?
return nil, nil
end
calc_result = matches[1]
return ": 計算結果 > #{calc_result}", false
end
end
class DiceBot
HELP_MESSAGE = <<INFO_MESSAGE_TEXT
【ダイスボット】チャットにダイス用の文字を入力するとダイスロールが可能
入力例)2d6+1 攻撃!
出力例)2d6+1 攻撃!
diceBot: (2d6) → 7
上記のようにダイス文字の後ろに空白を入れて発言する事も可能。
以下、使用例
3D6+1>=9 :3d6+1で目標値9以上かの判定
1D100<=50 :D100で50%目標の下方ロールの例
3U6[5] :3d6のダイス目が5以上の場合に振り足しして合計する(上方無限)
3B6 :3d6のダイス目をバラバラのまま出力する(合計しない)
10B6>=4 :10d6を振り4以上のダイス目の個数を数える
(8/2)D(4+6)<=(5*3):個数・ダイス・達成値には四則演算も使用可能
C(10-4*3/2+2):C(計算式)で計算だけの実行も可能
choice[a,b,c]:列挙した要素から一つを選択表示。ランダム攻撃対象決定などに
S3d6 : 各コマンドの先頭に「S」を付けると他人結果の見えないシークレットロール
3d6/2 : ダイス出目を割り算(切り捨て)。切り上げは /2U、四捨五入は /2R。
D66 : D66ダイス。順序はゲームに依存。D66N:そのまま、D66S:昇順。
INFO_MESSAGE_TEXT
end
| 25.531532 | 73 | 0.693719 |
7a49825f245b50ca7eaa99e730109af8885e78ea | 479 | require "spina"
module Spina
module Articles
class Engine < ::Rails::Engine
config.before_initialize do
::Spina::Plugin.register do |plugin|
plugin.name = 'articles'
plugin.namespace = 'articles'
end
end
config.generators do |g|
g.test_framework :rspec, fixture: false
g.fixture_replacement :factory_bot, dir: 'spec/factories'
g.assets false
g.helper false
end
end
end
end
| 21.772727 | 65 | 0.609603 |
bf8d456d9d5fcbeb15eeb82a06cef0bb56fcc2c8 | 145 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_figure-server_session'
| 36.25 | 83 | 0.806897 |
eda1bb9cd0313c3118ae78b41ea1ce740f880bd2 | 57 | ActiveRecord::Base.class_eval { include HasCalculator }
| 19 | 55 | 0.807018 |
bba2c5b9cb33b999a8e039599217a7a12d70dd7e | 740 | Pod::Spec.new do |spec|
spec.name = 'ULID.swift'
spec.version = '1.1.0'
spec.license = { :type => 'MIT' }
spec.homepage = 'https://github.com/yaslab/ULID.swift'
spec.authors = { 'Yasuhiro Hatta' => '[email protected]' }
spec.summary = 'Universally Unique Lexicographically Sortable Identifier (ULID) in Swift.'
spec.source = { :git => 'https://github.com/yaslab/ULID.swift.git', :tag => spec.version }
spec.source_files = 'Sources/ULID/*.swift'
spec.ios.deployment_target = '8.0'
spec.tvos.deployment_target = '9.0'
spec.watchos.deployment_target = '2.0'
spec.osx.deployment_target = '10.9'
spec.module_name = 'ULID'
spec.swift_version = '5.0'
end
| 38.947368 | 98 | 0.631081 |
e90818524ed680d86a6da10552b730a73c6b2315 | 2,879 | namespace "modules" do
def unpacker(src_file, dest_dir)
puts "Reading #{src_file}"
array = JSON.load(IO.read(src_file))
if !array.is_a?(Array)
raise "#{src_file} does not contain a JSON array as the first object"
end
array.each do |hash|
values = hash.values_at("_id", "_type", "_source")
if values.any?(&:nil?)
puts "#{src_file} contains a JSON object that does not have _id, _type and _source fields"
next
end
id, subfolder, source = values
filename = "#{id}.json"
partial_path = ::File.join(dest_dir, subfolder)
FileUtils.mkdir_p(partial_path)
full_path = ::File.join(partial_path, filename)
FileUtils.rm_f(full_path)
content = JSON.pretty_generate(source) + "\n"
puts "Writing #{full_path}"
IO.write(full_path, content)
end
end
def collector(dashboard_dir, module_name)
file_paths = Dir.glob(::File.join(dashboard_dir, "*.json"))
filenames = file_paths.map do |file_path|
filename = File.basename(file_path, ".json")
next if filename == module_name
puts "Adding #{filename}"
filename
end.compact
full_path = ::File.join(dashboard_dir, "#{module_name}.json")
FileUtils.rm_f(full_path)
content = JSON.pretty_generate(filenames) + "\n"
puts "Writing #{full_path}"
IO.write(full_path, content)
end
desc "Unpack kibana resources in a JSON array to individual files"
task "unpack", :src_file, :dest_dir do |task, args|
unpacker(args[:src_file], args[:dest_dir])
puts "Done"
end
desc "Collect all dashboards filenames into the module dashboard structure e.g. dashboard/cef.json"
task "make_dashboard_json", :dashboard_dir, :module_name do |task, args|
collector(args[:dashboard_dir], args[:module_name])
puts "Done"
end
desc "Unpack all kibana resources from a folder of JSON files."
# from Kibana / saved objects -> export all
# rake modules:unpack_all[my_module,/User/me/Downloads/my_module,/User/me/workspace/logstash/modules/my_module/6.x/configuration/kibana]
# Note - you can not currently export index-patterns from the UI, see: https://github.com/elastic/kibana/issues/4288
# To get the index-pattern, you need to pull from the .kibana index and manually update
# curl -XGET 'https://<user>:<password>@<es-host>:<es-port>/.kibana/_search?q=type:index-pattern&size=100&pretty' -o idx-patterns.json
task "unpack_all", :module_name, :kibana_source_dir, :dest_dir do |task, args|
module_name = args[:module_name]
kibana_source_dir = args[:kibana_source_dir]
dest_dir = args[:dest_dir]
Dir.glob(::File.join(kibana_source_dir, "*.json")).each do |file_path|
unpacker(file_path, dest_dir)
end
dashboard_dir = ::File.join(dest_dir, "dashboard")
collector(dashboard_dir, module_name)
puts "Done"
end
end
| 35.109756 | 138 | 0.688086 |
d53efa8b19acdf111ba2368529164049a898669c | 206 | class Message
include ActiveModel::Model
include ActiveModel::Conversion
include ActiveModel::Validations
attr_accessor :name, :email, :content
validates_presence_of :name, :email, :content
end
| 20.6 | 47 | 0.781553 |
79915208086299c9a36d73b395d74377bd1c66ef | 669 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module HelloBootstrapRuby
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 33.45 | 82 | 0.768311 |
91fcfcdbc99b46b07e6caabcdea9a0b31f77b044 | 4,896 | #
# Be sure to run `pod spec lint NatAccelerometer.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "NatAccelerometer"
s.version = "0.0.3"
s.summary = "Nat.js Module: Accelerometer."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
Nat.js Module: Accelerometer ()
DESC
s.homepage = "http://natjs.com"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT"
# s.license = { :type => "MIT", :file => "../LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "nat" => "[email protected]" }
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
s.platform = :ios, "7.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/natjs/nat-sensor-accelerometer.git", :tag => s.version }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "ios/Classes/*.{h,m}"
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
s.frameworks = "CoreMotion", "UIKit"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 36 | 105 | 0.588031 |
6ab008816029dbd94ef9b1f467c3caa5f73f9776 | 3,240 | require 'pstore'
require 'fileutils'
module Moneta
module Adapters
# PStore backend
# @api public
class PStore
include Defaults
include NilValues
supports :create, :increment, :each_key
attr_reader :backend
# @param [Hash] options
# @option options [String] :file PStore file
# @option options [::PStore] :backend Use existing backend instance
def initialize(options = {})
@backend = options[:backend] ||
begin
raise ArgumentError, 'Option :file is required' unless options[:file]
FileUtils.mkpath(::File.dirname(options[:file]))
new_store(options)
end
@id = "Moneta::Adapters::PStore(#{object_id})"
end
# (see Proxy#key?)
def key?(key, options = {})
transaction(true) { @backend.root?(key) }
end
# (see Proxy#each_key)
def each_key(&block)
return enum_for(:each_key) { transaction(true) { @backend.roots.size } } unless block_given?
transaction(true) do
@backend.roots.each { |k| yield(k) }
end
self
end
# (see Proxy#load)
def load(key, options = {})
transaction(true) { @backend[key] }
end
# (see Proxy#store)
def store(key, value, options = {})
transaction { @backend[key] = value }
end
# (see Proxy#delete)
def delete(key, options = {})
transaction { @backend.delete(key) }
end
# (see Proxy#increment)
def increment(key, amount = 1, options = {})
transaction do
existing = @backend[key]
value = (existing == nil ? 0 : Integer(existing)) + amount
@backend[key] = value.to_s
value
end
end
# (see Proxy#create)
def create(key, value, options = {})
transaction do
if @backend.root?(key)
false
else
@backend[key] = value
true
end
end
end
# (see Proxy#clear)
def clear(options = {})
transaction do
@backend.roots.each do |key|
@backend.delete(key)
end
end
self
end
# (see Proxy#values_at)
def values_at(*keys, **options)
transaction(true) { super }
end
def fetch_values(*keys, **options)
transaction(true) { super }
end
def slice(*keys, **options)
transaction(true) { super }
end
def merge!(pairs, options = {})
transaction { super }
end
protected
class TransactionError < StandardError; end
def new_store(options)
::PStore.new(options[:file], options[:threadsafe])
end
def transaction(read_only = false)
case Thread.current[@id]
when read_only, false
yield
when true
raise TransactionError, "Attempt to start read-write transaction inside a read-only transaction"
else
begin
Thread.current[@id] = read_only
@backend.transaction(read_only) { yield }
ensure
Thread.current[@id] = nil
end
end
end
end
end
end
| 24.179104 | 106 | 0.54321 |
e8b7461c942f1eb67f71fee455aa73edd0358b01 | 415 | p1 = lambda {}
p2 = lambda {
}
p3 = lambda {
puts "hallo"
}
p4 = lambda do
# hello
end
p5 = lambda do
puts "hallo"
end
[p1, p2, p3, p4, p5].each do |p|
path, line = *p.to_s.match(/[\d\w]+@(.+):(\d+).*>/)[1..2]
puts line
end
def Given(re, &p)
path, line = *p.to_s.match(/[\d\w]+@(.+):(\d+).*>/)[1..2]
puts line
end
Given(/^whatever$/) {
$before.should == true
$step = true
}
| 10.921053 | 59 | 0.496386 |
28006909ec384ab378dd16968e2e6b7daee01102 | 717 | require 'rails_helper'
RSpec.describe Product, type: :model do
it { should be_a PgSearch }
describe '.search_by' do
let(:relation) { double }
before { expect(Product).to receive(:page).with(1).and_return(relation) }
context do
it { expect { Product.search_by 'page' => 1 }.to_not raise_error }
end
context do
before { expect(relation).to receive(:where).with('name ILIKE ?', 'abc%') }
it { expect { Product.search_by 'page' => 1, 'term' => 'abc' }.to_not raise_error }
end
context do
before { expect(relation).to receive(:search).with('word') }
it { expect { Product.search_by 'page' => 1, 'name' => 'word' }.to_not raise_error }
end
end
end | 26.555556 | 90 | 0.62622 |
26a09e7d6675b7a698aa3dfa721d75e5577c34fe | 715 | Pod::Spec.new do |s|
s.name = 'AWSTranscribe'
s.version = '2.9.9'
s.summary = 'Amazon Web Services SDK for iOS.'
s.description = 'The AWS SDK for iOS provides a library, code samples, and documentation for developers to build connected mobile applications using AWS.'
s.homepage = 'http://aws.amazon.com/mobile/sdk'
s.license = 'Apache License, Version 2.0'
s.author = { 'Amazon Web Services' => 'amazonwebservices' }
s.platform = :ios, '8.0'
s.source = { :git => 'https://github.com/aws/aws-sdk-ios.git',
:tag => s.version}
s.requires_arc = true
s.dependency 'AWSCore', '2.9.9'
s.source_files = 'AWSTranscribe/*.{h,m}'
end
| 39.722222 | 157 | 0.613986 |
380e1df46bd7aff3344e0611e5a271dc4e75443a | 2,772 | # Generated by the asset_copier plugin
# http://github.com/pelargir/asset_copier
require 'find'
require 'digest/md5'
module TextileToolbar
class AssetCopier
@source = File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'files'))
@destination = RAILS_ROOT
@deleted_files = File.expand_path(File.join(File.dirname(__FILE__), '..', '..', 'deleted_files'))
class << self
attr_accessor :source, :destination, :deleted_files
end
def self.copy(plugin_name)
begin
each_path do |path, dest_path, short_path|
if File.directory?(path)
unless File.exists?(dest_path)
FileUtils.mkdir_p(dest_path)
log "Creating directory #{short_path} for #{plugin_name}"
end
elsif !compare(path, dest_path)
FileUtils.cp(path, dest_path)
log "Copying #{short_path} from #{plugin_name}"
end
end
rescue Exception => e
log "Error trying to copy files: #{e.inspect}"
raise e
end
print_deletion_warnings(plugin_name)
end
def self.warn(plugin_name)
each_path do |path, dest_path, short_path|
next if File.directory?(path)
reinstall = false
if File.exists?(dest_path)
unless compare(path, dest_path)
log "WARNING: #{short_path} is out of date and needs to be reinstalled"
reinstall = true
end
else
reinstall = true
log "WARNING: #{short_path} is missing and needs to be installed"
end
log "WARNING: Please run rake #{plugin_name}:install" if reinstall
end
print_deletion_warnings(plugin_name)
end
def self.compare(file1, file2)
File.exists?(file1) && File.exists?(file2) &&
Digest::MD5.hexdigest(File.read(file1)) == Digest::MD5.hexdigest(File.read(file2))
end
def self.print_deletion_warnings(plugin_name)
File.open(deleted_files, "r") do |f|
f.readlines.reject { |l| l =~ /^#/ || l.strip.blank? }.each do |l|
log "WARNING: #{l} is no longer required by the #{plugin_name} plugin " <<
"and can can be safely removed" if File.exists?(l)
end
end
end
def self.paths
returning [] do |paths|
Find.find(source) do |path|
Find.prune if path =~ /\/\..+/
Find.prune if path =~ /(CVS|.svn|.git)/
paths << path
end
end
end
def self.each_path
paths.each do |path|
dest_path = path.gsub(source, destination)
short_path = dest_path.gsub("#{destination}/", "")
yield path, dest_path, short_path
end
end
def self.log(msg)
puts msg
end
end
end
| 30.461538 | 101 | 0.593074 |
e82023ad2a02b3748398abb2205ba670f34513d4 | 785 | # frozen_string_literal: true
require 'unit/unit_spec_helper'
require_relative '../../../app/models/null_time'
describe NullTime do
let(:nt) { described_class.new }
describe '#<=>' do
context 'when given a time' do
it 'returns -1' do
expect(nt.<=>(Time.new(2000, 1, 1, 0, 0, 0))).to eq -1
end
end
context 'when given a null time' do
it 'returns 0' do
expect(nt.<=>(described_class.new)).to eq 0
end
end
end
describe '#<' do
context 'when given a time' do
it 'returns true' do
expect(nt < Time.new(2000, 1, 1, 0, 0, 0)).to eq true
end
end
context 'when given a null time' do
it 'returns false' do
expect(nt < described_class.new).to eq false
end
end
end
end
| 21.216216 | 62 | 0.588535 |
ed233fa4e532309239b968124c68217c4cbe9e7d | 130 | module ResourceMonitor
class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
end
end
| 21.666667 | 54 | 0.815385 |
e2e69ceb8ffa352c370df9fbb5893d645e640b73 | 230 | class ChangeOrderItemsCountDefaultInOrders < ActiveRecord::Migration[5.0]
def change
change_column_null :orders, :order_items_count, false, 0
change_column_default :orders, :order_items_count, from: nil, to: 0
end
end
| 32.857143 | 73 | 0.782609 |
d5558552402f7225be2134cdade212daf5974ccf | 222 | require 'docx_generator/word/base'
require 'docx_generator/word/extensions'
require 'docx_generator/word/formatting'
module DocxGenerator
# Namesapce for all XML Elements defined in the standard.
module Word
end
end | 24.666667 | 59 | 0.810811 |
b954e69f93c61b494a3a68c6fe4a1874e93c7661 | 1,164 | # frozen_string_literal: true
RSpec.describe Faulty::Events::Notifier do
let(:listener_class) do
Class.new do
attr_reader :events
def initialize
@events = []
end
def handle(event, payload)
@events << [event, payload]
end
end
end
let(:failing_class) do
Class.new do
def self.name
'Failing'
end
def handle(_event, _payload)
raise 'fail'
end
end
end
it 'calls handle for each listener' do
listeners = [listener_class.new, listener_class.new]
notifier = described_class.new(listeners)
notifier.notify(:circuit_closed, {})
expect(listeners[0].events).to eq([[:circuit_closed, {}]])
expect(listeners[1].events).to eq([[:circuit_closed, {}]])
end
it 'suppresses and prints errors' do
notifier = described_class.new([failing_class.new])
expect { notifier.notify(:circuit_opened, {}) }
.to output("Faulty listener Failing crashed: fail\n").to_stderr
end
it 'raises error for incorrect event' do
notifier = described_class.new
expect { notifier.notify(:foo, {}) }.to raise_error(ArgumentError)
end
end
| 23.755102 | 70 | 0.649485 |
f8e13a708d284781ab7f5524da3743aa15fceece | 1,487 | require_relative 'mcp3208'
require_relative 'kty81'
# This class reads the temperature from a KTY81 chip.
# The KTY81 is a resistor with a temperature dependent resistance.
# The circuit consist of a constant resistor R in line with the temperature sensor T.
# With the measurement of the voltage over T (Ut) the resistance of T (Rt) is calculated.
# With resistance of T we know the temperature.
#
# ___ _______ + Vtotal
# | |
# Ur | [ ] R
# | |
# --- |______ AD converter
# --- |
# | |
# Ut | [ ] T (KTY81)
# | |
# --- --------- -
#
class Temperature_Sensor
def initialize(calibration = 0, u_total=3.3, resistance_r = 1200)
@u_total = u_total
@r_r = resistance_r
if calibration.nil? do
@calibration = 0
end
else
@calibration = calibration
end
@ad_converter= MCP3208.new(@u_total)
@temperature_sensor = KTY81_220.new
end
def read_sensor(channel = 0)
u_sum = 0
50.times {
u_sum = u_sum + @ad_converter.read_value(channel)
}
u_t = u_sum / 50.0
# u_t = 2.0 # Value for testing
r_t = calculate_resistance(u_t)
@temperature_sensor.to_temp(r_t)
end
private
def calculate_resistance(u_t)
# U = Ur + Ut
u_r = @u_total - u_t
# R = U / I -> I = U / R
i = u_r / @r_r.to_f
r_t = u_t / i + @calibration
$LOG.debug "Voltage sensor: %f, Resistance sensor: %f " % [u_r, r_t]
r_t
end
end | 23.983871 | 89 | 0.594486 |
62208cbefc8a84d9a7529b5b50afe7ef0248dc9b | 2,415 | # Encoding: utf-8
# Cloud Foundry Java Buildpack
# Copyright 2013 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require 'component_helper'
require 'java_buildpack/component/mutable_java_home'
require 'java_buildpack/jre/open_jdk_jre'
require 'java_buildpack/jre/memory/weight_balancing_memory_heuristic'
describe JavaBuildpack::Jre::OpenJdkJRE do
include_context 'component_helper'
let(:java_home) { JavaBuildpack::Component::MutableJavaHome.new }
let(:memory_heuristic) { double('MemoryHeuristic', resolve: %w(opt-1 opt-2)) }
before do
allow(JavaBuildpack::Jre::WeightBalancingMemoryHeuristic).to receive(:new).and_return(memory_heuristic)
end
it 'detects with id of openjdk_jre-<version>' do
expect(component.detect).to eq("open-jdk-jre=#{version}")
end
it 'extracts Java from a GZipped TAR',
cache_fixture: 'stub-java.tar.gz' do
component.detect
component.compile
expect(sandbox + 'bin/java').to exist
end
it 'adds the JAVA_HOME to java_home' do
component
expect(java_home.root).to eq(sandbox)
end
it 'adds memory options to java_opts' do
component.detect
component.release
expect(java_opts).to include('opt-1')
expect(java_opts).to include('opt-2')
end
it 'adds OnOutOfMemoryError to java_opts' do
component.detect
component.release
expect(java_opts).to include('-XX:OnOutOfMemoryError=$PWD/.java-buildpack/open_jdk_jre/bin/killjava.sh')
end
it 'places the killjava script (with appropriately substituted content) in the diagnostics directory',
cache_fixture: 'stub-java.tar.gz' do
component.detect
component.compile
expect(sandbox + 'bin/killjava.sh').to exist
end
it 'adds java.io.tmpdir to java_opts' do
component.detect
component.release
expect(java_opts).to include('-Djava.io.tmpdir=$TMPDIR')
end
end
| 28.411765 | 108 | 0.743685 |
f841f809f872512ef6d726051f19f330093a6361 | 651 | require 'test_helper'
require 'fixtures/rails_mail_plugin'
class ComplianceTest < ActiveSupport::TestCase
include ActiveModel::Lint::Tests
def setup
@model = SampleMail.new
end
test "model_name exposes singular and human name" do
assert_equal "sample_mail", @model.class.model_name.singular
assert_equal "Sample mail", @model.class.model_name.human
end
test "model_name.human uses I18n" do
begin
I18n.backend.store_translations :en,
activemodel: { models: { sample_mail: "My sample Mail"} }
assert_equal "My sample Mail", @model.class.model_name.human
ensure
I18n.reload!
end
end
end | 24.111111 | 66 | 0.72043 |
1c11567c2cf6e65429648bad006419d853fc0757 | 1,059 | class Libical < Formula
desc "Implementation of iCalendar protocols and data formats"
homepage "https://libical.github.io/libical/"
url "https://github.com/libical/libical/releases/download/v2.0.0/libical-2.0.0.tar.gz"
sha256 "654c11f759c19237be39f6ad401d917e5a05f36f1736385ed958e60cf21456da"
bottle do
sha256 "4b8b3165661fca6ae137559f3b9d0436bee37284ce84c75e9e81677512bacd43" => :sierra
sha256 "80cd45eebc20492169a98e26c2ac384d9e7d42c60c97dfb31cf15fa3c978ea27" => :el_capitan
sha256 "f4cbcfb04208a01f1589f119e785c656b74713d033949e8a6a367a759ea142eb" => :yosemite
end
depends_on "cmake" => :build
def install
# Fix libical-glib build failure due to undefined symbol
# Upstream issue https://github.com/libical/libical/issues/225
inreplace "src/libical/icallangbind.h", "*callangbind_quote_as_ical_r(",
"*icallangbind_quote_as_ical_r("
mkdir "build" do
system "cmake", "..", "-DSHARED_ONLY=true", *std_cmake_args
system "make", "install"
end
end
end
| 39.222222 | 92 | 0.7356 |
333fb742e484b37173d9b8a9185317b2bdf25d56 | 22,656 | require 'stringio'
require 'uri'
module ActionController
module Integration #:nodoc:
# An integration Session instance represents a set of requests and responses
# performed sequentially by some virtual user. Becase you can instantiate
# multiple sessions and run them side-by-side, you can also mimic (to some
# limited extent) multiple simultaneous users interacting with your system.
#
# Typically, you will instantiate a new session using IntegrationTest#open_session,
# rather than instantiating Integration::Session directly.
class Session
include Test::Unit::Assertions
include ActionController::TestCase::Assertions
include ActionController::TestProcess
# The integer HTTP status code of the last request.
attr_reader :status
# The status message that accompanied the status code of the last request.
attr_reader :status_message
# The URI of the last request.
attr_reader :path
# The hostname used in the last request.
attr_accessor :host
# The remote_addr used in the last request.
attr_accessor :remote_addr
# The Accept header to send.
attr_accessor :accept
# A map of the cookies returned by the last response, and which will be
# sent with the next request.
attr_reader :cookies
# A map of the headers returned by the last response.
attr_reader :headers
# A reference to the controller instance used by the last request.
attr_reader :controller
# A reference to the request instance used by the last request.
attr_reader :request
# A reference to the response instance used by the last request.
attr_reader :response
# A running counter of the number of requests processed.
attr_accessor :request_count
class MultiPartNeededException < Exception
end
# Create and initialize a new Session instance.
def initialize
reset!
end
# Resets the instance. This can be used to reset the state information
# in an existing session instance, so it can be used from a clean-slate
# condition.
#
# session.reset!
def reset!
@status = @path = @headers = nil
@result = @status_message = nil
@https = false
@cookies = {}
@controller = @request = @response = nil
@request_count = 0
self.host = "www.example.com"
self.remote_addr = "127.0.0.1"
self.accept = "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5"
unless defined? @named_routes_configured
# install the named routes in this session instance.
klass = class<<self; self; end
Routing::Routes.install_helpers(klass)
# the helpers are made protected by default--we make them public for
# easier access during testing and troubleshooting.
klass.module_eval { public *Routing::Routes.named_routes.helpers }
@named_routes_configured = true
end
end
# Specify whether or not the session should mimic a secure HTTPS request.
#
# session.https!
# session.https!(false)
def https!(flag=true)
@https = flag
end
# Return +true+ if the session is mimicking a secure HTTPS request.
#
# if session.https?
# ...
# end
def https?
@https
end
# Set the host name to use in the next request.
#
# session.host! "www.example.com"
def host!(name)
@host = name
end
# Follow a single redirect response. If the last response was not a
# redirect, an exception will be raised. Otherwise, the redirect is
# performed on the location header.
def follow_redirect!
raise "not a redirect! #{@status} #{@status_message}" unless redirect?
get(interpret_uri(headers['location'].first))
status
end
# Performs a request using the specified method, following any subsequent
# redirect. Note that the redirects are followed until the response is
# not a redirect--this means you may run into an infinite loop if your
# redirect loops back to itself.
def request_via_redirect(http_method, path, parameters = nil, headers = nil)
send(http_method, path, parameters, headers)
follow_redirect! while redirect?
status
end
# Performs a GET request, following any subsequent redirect.
# See +request_via_redirect+ for more information.
def get_via_redirect(path, parameters = nil, headers = nil)
request_via_redirect(:get, path, parameters, headers)
end
# Performs a POST request, following any subsequent redirect.
# See +request_via_redirect+ for more information.
def post_via_redirect(path, parameters = nil, headers = nil)
request_via_redirect(:post, path, parameters, headers)
end
# Performs a PUT request, following any subsequent redirect.
# See +request_via_redirect+ for more information.
def put_via_redirect(path, parameters = nil, headers = nil)
request_via_redirect(:put, path, parameters, headers)
end
# Performs a DELETE request, following any subsequent redirect.
# See +request_via_redirect+ for more information.
def delete_via_redirect(path, parameters = nil, headers = nil)
request_via_redirect(:delete, path, parameters, headers)
end
# Returns +true+ if the last response was a redirect.
def redirect?
status/100 == 3
end
# Performs a GET request with the given parameters.
#
# - +path+: The URI (as a String) on which you want to perform a GET request.
# - +parameters+: The HTTP parameters that you want to pass. This may be +nil+,
# a Hash, or a String that is appropriately encoded
# (<tt>application/x-www-form-urlencoded</tt> or <tt>multipart/form-data</tt>).
# - +headers+: Additional HTTP headers to pass, as a Hash. The keys will
# automatically be upcased, with the prefix 'HTTP_' added if needed.
#
# This method returns an AbstractResponse object, which one can use to inspect
# the details of the response. Furthermore, if this method was called from an
# ActionController::IntegrationTest object, then that object's <tt>@response</tt>
# instance variable will point to the same response object.
#
# You can also perform POST, PUT, DELETE, and HEAD requests with +post+,
# +put+, +delete+, and +head+.
def get(path, parameters = nil, headers = nil)
process :get, path, parameters, headers
end
# Performs a POST request with the given parameters. See get() for more details.
def post(path, parameters = nil, headers = nil)
process :post, path, parameters, headers
end
# Performs a PUT request with the given parameters. See get() for more details.
def put(path, parameters = nil, headers = nil)
process :put, path, parameters, headers
end
# Performs a DELETE request with the given parameters. See get() for more details.
def delete(path, parameters = nil, headers = nil)
process :delete, path, parameters, headers
end
# Performs a HEAD request with the given parameters. See get() for more details.
def head(path, parameters = nil, headers = nil)
process :head, path, parameters, headers
end
# Performs an XMLHttpRequest request with the given parameters, mirroring
# a request from the Prototype library.
#
# The request_method is :get, :post, :put, :delete or :head; the
# parameters are +nil+, a hash, or a url-encoded or multipart string;
# the headers are a hash. Keys are automatically upcased and prefixed
# with 'HTTP_' if not already.
def xml_http_request(request_method, path, parameters = nil, headers = nil)
headers ||= {}
headers['X-Requested-With'] = 'XMLHttpRequest'
headers['Accept'] ||= 'text/javascript, text/html, application/xml, text/xml, */*'
process(request_method, path, parameters, headers)
end
alias xhr :xml_http_request
# Returns the URL for the given options, according to the rules specified
# in the application's routes.
def url_for(options)
controller ? controller.url_for(options) : generic_url_rewriter.rewrite(options)
end
private
# Tailors the session based on the given URI, setting the HTTPS value
# and the hostname.
def interpret_uri(path)
location = URI.parse(path)
https! URI::HTTPS === location if location.scheme
host! location.host if location.host
location.query ? "#{location.path}?#{location.query}" : location.path
end
# Performs the actual request.
def process(method, path, parameters = nil, headers = nil)
data = requestify(parameters)
path = interpret_uri(path) if path =~ %r{://}
path = "/#{path}" unless path[0] == ?/
@path = path
env = {}
if method == :get
env["QUERY_STRING"] = data
data = nil
end
env.update(
"REQUEST_METHOD" => method.to_s.upcase,
"REQUEST_URI" => path,
"HTTP_HOST" => host,
"REMOTE_ADDR" => remote_addr,
"SERVER_PORT" => (https? ? "443" : "80"),
"CONTENT_TYPE" => "application/x-www-form-urlencoded",
"CONTENT_LENGTH" => data ? data.length.to_s : nil,
"HTTP_COOKIE" => encode_cookies,
"HTTPS" => https? ? "on" : "off",
"HTTP_ACCEPT" => accept
)
(headers || {}).each do |key, value|
key = key.to_s.upcase.gsub(/-/, "_")
key = "HTTP_#{key}" unless env.has_key?(key) || key =~ /^HTTP_/
env[key] = value
end
unless ActionController::Base.respond_to?(:clear_last_instantiation!)
ActionController::Base.module_eval { include ControllerCapture }
end
ActionController::Base.clear_last_instantiation!
env['rack.input'] = data.is_a?(IO) ? data : StringIO.new(data || '')
@status, @headers, result_body = ActionController::Dispatcher.new.mark_as_test_request!.call(env)
@request_count += 1
@controller = ActionController::Base.last_instantiation
@request = @controller.request
@response = @controller.response
# Decorate the response with the standard behavior of the TestResponse
# so that things like assert_response can be used in integration
# tests.
@response.extend(TestResponseBehavior)
@html_document = nil
# Inject status back in for backwords compatibility with CGI
@headers['Status'] = @status
@status, @status_message = @status.split(/ /)
@status = @status.to_i
cgi_headers = Hash.new { |h,k| h[k] = [] }
@headers.each do |key, value|
cgi_headers[key.downcase] << value
end
cgi_headers['set-cookie'] = cgi_headers['set-cookie'].first
@headers = cgi_headers
@response.headers['cookie'] ||= []
(@headers['set-cookie'] || []).each do |cookie|
name, value = cookie.match(/^([^=]*)=([^;]*);/)[1,2]
@cookies[name] = value
# Fake CGI cookie header
# DEPRECATE: Use response.headers["Set-Cookie"] instead
@response.headers['cookie'] << CGI::Cookie::new("name" => name, "value" => value)
end
return status
rescue MultiPartNeededException
boundary = "----------XnJLe9ZIbbGUYtzPQJ16u1"
status = process(method, path, multipart_body(parameters, boundary), (headers || {}).merge({"CONTENT_TYPE" => "multipart/form-data; boundary=#{boundary}"}))
return status
end
# Encode the cookies hash in a format suitable for passing to a
# request.
def encode_cookies
cookies.inject("") do |string, (name, value)|
string << "#{name}=#{value}; "
end
end
# Get a temporary URL writer object
def generic_url_rewriter
env = {
'REQUEST_METHOD' => "GET",
'QUERY_STRING' => "",
"REQUEST_URI" => "/",
"HTTP_HOST" => host,
"SERVER_PORT" => https? ? "443" : "80",
"HTTPS" => https? ? "on" : "off"
}
ActionController::UrlRewriter.new(ActionController::RackRequest.new(env), {})
end
def name_with_prefix(prefix, name)
prefix ? "#{prefix}[#{name}]" : name.to_s
end
# Convert the given parameters to a request string. The parameters may
# be a string, +nil+, or a Hash.
def requestify(parameters, prefix=nil)
if TestUploadedFile === parameters
raise MultiPartNeededException
elsif Hash === parameters
return nil if parameters.empty?
parameters.map { |k,v| requestify(v, name_with_prefix(prefix, k)) }.join("&")
elsif Array === parameters
parameters.map { |v| requestify(v, name_with_prefix(prefix, "")) }.join("&")
elsif prefix.nil?
parameters
else
"#{CGI.escape(prefix)}=#{CGI.escape(parameters.to_s)}"
end
end
def multipart_requestify(params, first=true)
returning Hash.new do |p|
params.each do |key, value|
k = first ? CGI.escape(key.to_s) : "[#{CGI.escape(key.to_s)}]"
if Hash === value
multipart_requestify(value, false).each do |subkey, subvalue|
p[k + subkey] = subvalue
end
else
p[k] = value
end
end
end
end
def multipart_body(params, boundary)
multipart_requestify(params).map do |key, value|
if value.respond_to?(:original_filename)
File.open(value.path) do |f|
f.set_encoding(Encoding::BINARY) if f.respond_to?(:set_encoding)
<<-EOF
--#{boundary}\r
Content-Disposition: form-data; name="#{key}"; filename="#{CGI.escape(value.original_filename)}"\r
Content-Type: #{value.content_type}\r
Content-Length: #{File.stat(value.path).size}\r
\r
#{f.read}\r
EOF
end
else
<<-EOF
--#{boundary}\r
Content-Disposition: form-data; name="#{key}"\r
\r
#{value}\r
EOF
end
end.join("")+"--#{boundary}--\r"
end
end
# A module used to extend ActionController::Base, so that integration tests
# can capture the controller used to satisfy a request.
module ControllerCapture #:nodoc:
def self.included(base)
base.extend(ClassMethods)
base.class_eval do
class << self
alias_method_chain :new, :capture
end
end
end
module ClassMethods #:nodoc:
mattr_accessor :last_instantiation
def clear_last_instantiation!
self.last_instantiation = nil
end
def new_with_capture(*args)
controller = new_without_capture(*args)
self.last_instantiation ||= controller
controller
end
end
end
module Runner
# Reset the current session. This is useful for testing multiple sessions
# in a single test case.
def reset!
@integration_session = open_session
end
%w(get post put head delete cookies assigns
xml_http_request xhr get_via_redirect post_via_redirect).each do |method|
define_method(method) do |*args|
reset! unless @integration_session
# reset the html_document variable, but only for new get/post calls
@html_document = nil unless %w(cookies assigns).include?(method)
returning @integration_session.__send__(method, *args) do
copy_session_variables!
end
end
end
# Open a new session instance. If a block is given, the new session is
# yielded to the block before being returned.
#
# session = open_session do |sess|
# sess.extend(CustomAssertions)
# end
#
# By default, a single session is automatically created for you, but you
# can use this method to open multiple sessions that ought to be tested
# simultaneously.
def open_session
session = Integration::Session.new
# delegate the fixture accessors back to the test instance
extras = Module.new { attr_accessor :delegate, :test_result }
if self.class.respond_to?(:fixture_table_names)
self.class.fixture_table_names.each do |table_name|
name = table_name.tr(".", "_")
next unless respond_to?(name)
extras.__send__(:define_method, name) { |*args| delegate.send(name, *args) }
end
end
# delegate add_assertion to the test case
extras.__send__(:define_method, :add_assertion) { test_result.add_assertion }
session.extend(extras)
session.delegate = self
session.test_result = @_result
yield session if block_given?
session
end
# Copy the instance variables from the current session instance into the
# test instance.
def copy_session_variables! #:nodoc:
return unless @integration_session
%w(controller response request).each do |var|
instance_variable_set("@#{var}", @integration_session.__send__(var))
end
end
# Delegate unhandled messages to the current session instance.
def method_missing(sym, *args, &block)
reset! unless @integration_session
returning @integration_session.__send__(sym, *args, &block) do
copy_session_variables!
end
end
end
end
# An IntegrationTest is one that spans multiple controllers and actions,
# tying them all together to ensure they work together as expected. It tests
# more completely than either unit or functional tests do, exercising the
# entire stack, from the dispatcher to the database.
#
# At its simplest, you simply extend IntegrationTest and write your tests
# using the get/post methods:
#
# require "#{File.dirname(__FILE__)}/test_helper"
#
# class ExampleTest < ActionController::IntegrationTest
# fixtures :people
#
# def test_login
# # get the login page
# get "/login"
# assert_equal 200, status
#
# # post the login and follow through to the home page
# post "/login", :username => people(:jamis).username,
# :password => people(:jamis).password
# follow_redirect!
# assert_equal 200, status
# assert_equal "/home", path
# end
# end
#
# However, you can also have multiple session instances open per test, and
# even extend those instances with assertions and methods to create a very
# powerful testing DSL that is specific for your application. You can even
# reference any named routes you happen to have defined!
#
# require "#{File.dirname(__FILE__)}/test_helper"
#
# class AdvancedTest < ActionController::IntegrationTest
# fixtures :people, :rooms
#
# def test_login_and_speak
# jamis, david = login(:jamis), login(:david)
# room = rooms(:office)
#
# jamis.enter(room)
# jamis.speak(room, "anybody home?")
#
# david.enter(room)
# david.speak(room, "hello!")
# end
#
# private
#
# module CustomAssertions
# def enter(room)
# # reference a named route, for maximum internal consistency!
# get(room_url(:id => room.id))
# assert(...)
# ...
# end
#
# def speak(room, message)
# xml_http_request "/say/#{room.id}", :message => message
# assert(...)
# ...
# end
# end
#
# def login(who)
# open_session do |sess|
# sess.extend(CustomAssertions)
# who = people(who)
# sess.post "/login", :username => who.username,
# :password => who.password
# assert(...)
# end
# end
# end
class IntegrationTest < ActiveSupport::TestCase
include Integration::Runner
# Work around a bug in test/unit caused by the default test being named
# as a symbol (:default_test), which causes regex test filters
# (like "ruby test.rb -n /foo/") to fail because =~ doesn't work on
# symbols.
def initialize(name) #:nodoc:
super(name.to_s)
end
# Work around test/unit's requirement that every subclass of TestCase have
# at least one test method. Note that this implementation extends to all
# subclasses, as well, so subclasses of IntegrationTest may also exist
# without any test methods.
def run(*args) #:nodoc:
return if @method_name == "default_test"
super
end
# Because of how use_instantiated_fixtures and use_transactional_fixtures
# are defined, we need to treat them as special cases. Otherwise, users
# would potentially have to set their values for both Test::Unit::TestCase
# ActionController::IntegrationTest, since by the time the value is set on
# TestCase, IntegrationTest has already been defined and cannot inherit
# changes to those variables. So, we make those two attributes copy-on-write.
class << self
def use_transactional_fixtures=(flag) #:nodoc:
@_use_transactional_fixtures = true
@use_transactional_fixtures = flag
end
def use_instantiated_fixtures=(flag) #:nodoc:
@_use_instantiated_fixtures = true
@use_instantiated_fixtures = flag
end
def use_transactional_fixtures #:nodoc:
@_use_transactional_fixtures ?
@use_transactional_fixtures :
superclass.use_transactional_fixtures
end
def use_instantiated_fixtures #:nodoc:
@_use_instantiated_fixtures ?
@use_instantiated_fixtures :
superclass.use_instantiated_fixtures
end
end
end
end
| 36.076433 | 166 | 0.616879 |
e9f13a0ef352df4ac1738abef7af63c1f4ddbb56 | 1,741 | # Copyright (c) 2021-2022 Andy Maleh
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require 'glimmer/libui/control_proxy'
require 'glimmer/libui/control_proxy/column'
module Glimmer
module LibUI
class ControlProxy
module Column
# Proxy for LibUI image column objects
#
# Follows the Proxy Design Pattern
class ImageColumnProxy < ControlProxy
class << self
def default_value
Glimmer::LibUI::ICON
end
end
include Column
private
def build_control
@parent_proxy.append_image_column(name, column_index)
end
end
end
end
end
end
| 34.137255 | 72 | 0.698449 |
08dbeb6fa3272a66ff76d5456cabf49b5ae93810 | 1,136 | require 'spec_helper'
RSpec.describe RenderWithView do
describe "#render_with_view" do
it "calls render with template and assigned view" do
user = { id: 1 }
ctrl = FakeController.new
ctrl.render_with_view :index, user: user
tmpl, args = ctrl.calls.last
expect(tmpl).to eq :index
end
it "defaults to action_name template" do
user = { id: 1 }
ctrl = FakeController.new
ctrl.render_with_view user: user
tmpl, * = ctrl.calls.last
expect(tmpl).to eq :new
end
it "saves to an instance var behind the scenes" do
user = { id: 1 }
ctrl = FakeController.new
ctrl.render_with_view :index, user: user
ivar = ctrl.instance_variable_get(:@__view__)
expect(ivar).to be_a RenderWithView::View
expect(ivar.user).to eq user
end
it "includes options" do
user = { id: 1 }
opts = { status: 401 }
ctrl = FakeController.new
ctrl.render_with_view(:edit, {user: user}, opts)
tmpl, opts = ctrl.calls.last
expect(tmpl).to eq :edit
expect(opts[:status]).to eq(401)
end
end
end
| 22.72 | 56 | 0.623239 |
d58180c3b8b18a80b32627d5b739157713c714c2 | 1,721 | require 'test_helper'
class UsersSignupTest < ActionDispatch::IntegrationTest
def setup
ActionMailer::Base.deliveries.clear
end
test "invalid signup information" do
get signup_path
assert_no_difference 'User.count' do
post users_path, params: { user: { name: "",
email: "user@invalid",
password: "foo",
password_confirmation: "bar" } }
end
assert_template 'users/new'
assert_select 'div#error_explanation'
assert_select 'div.field_with_errors'
end
test "valid signup information" do
get signup_path
assert_difference 'User.count', 1 do
post users_path, params: { user: { name: "Example User",
email: "[email protected]",
password: "foobar",
password_confirmation: "foobar" } }
end
assert_equal 1, ActionMailer::Base.deliveries.size
user = assigns(:user)
assert_not user.activated?
#Try to log in before activation
log_in_as(user)
assert_not is_logged_in?
#Invalid token
get edit_account_activation_path("invalid token", email: user.email)
assert_not is_logged_in?
#Valid token, wrong email
get edit_account_activation_path(user.activation_token, email: "wrong")
assert_not is_logged_in?
#Valid activation token, valid email
get edit_account_activation_path(user.activation_token, email: user.email)
assert user.reload.activated?
follow_redirect!
assert_template 'users/show'
assert is_logged_in?
end
end
| 34.42 | 78 | 0.61011 |
1807be0c452fae905cd83d6913af3d7c1ca222af | 1,502 | class PostsController < ApplicationController
before_action :set_post, only: [:show, :edit, :update, :destroy]
def index
@posts = Post.all
end
def show
end
def new
@post = Post.new
end
def edit
end
def create
@post = Post.new(post_params)
respond_to do |format|
if @post.save
format.html { redirect_to @post, notice: 'Post was successfully created.' }
format.json { render :show, status: :created, location: @post }
else
format.html { render :new }
format.json { render json: @post.errors, status: :unprocessable_entity }
end
end
end
def update
respond_to do |format|
if @post.update(post_params)
format.html { redirect_to @post, notice: 'Post was successfully updated.' }
format.json { render :show, status: :ok, location: @post }
else
format.html { render :edit }
format.json { render json: @post.errors, status: :unprocessable_entity }
end
end
end
def destroy
@post.destroy
respond_to do |format|
format.html { redirect_to posts_url, notice: 'Post was successfully destroyed.' }
format.json { head :no_content }
end
end
private
def set_post
@post = Post.find(params[:id])
end
def post_params
params.require(:post).permit(:title, :description)
end
end | 23.84127 | 89 | 0.577896 |
ed9c3e2216fad74073cc5621a860a45a8bb89d88 | 926 | # coding: utf-8
require File.expand_path('../lib/styledown/source/version', __FILE__)
Gem::Specification.new do |spec|
spec.name = 'styledown2-source'
spec.version = Styledown::Source::VERSION
spec.authors = ['Rico Sta. Cruz']
spec.email = ['[email protected]']
spec.summary = 'Write maintainable CSS styleguides using Markdown'
spec.description = 'Styledown lets you write maintainable CSS styleguides using Markdown.'
spec.homepage = 'https://github.com/styledown/styledown2'
spec.license = 'MIT'
# spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.files = [
'lib/styledown/source/version.rb',
'lib/styledown/source.rb'
]
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
end
| 38.583333 | 106 | 0.638229 |
1af9c1c2822cff33ed32b9e3248f9a5953f8e2aa | 553 | module Rubillow
module Models
# Chart for a property
class PropertyChart < Chart
# @return [String] url for chart
attr_accessor :graphs_and_data
# Returns HTML for the chart.
# @return [String] chart HTML.
def to_html
"<a href='#{@graphs_and_data}'>" + super + "</a>"
end
protected
# @private
def parse
super
return if !success?
@graphs_and_data = @parser.xpath('//response/graphsanddata').text
end
end
end
end | 21.269231 | 73 | 0.553345 |
7a7f1e0831e183c6138eba3decc2f2b817bd0872 | 9,153 | #
# Author:: AJ Christensen (<[email protected]>)
# Copyright:: Copyright 2008-2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
describe Chef::Provider::Service::Invokercd, "load_current_resource" do
before(:each) do
@node = Chef::Node.new
@node.automatic_attrs[:command] = { :ps => "ps -ef" }
@events = Chef::EventDispatch::Dispatcher.new
@run_context = Chef::RunContext.new(@node, {}, @events)
@new_resource = Chef::Resource::Service.new("chef")
@current_resource = Chef::Resource::Service.new("chef")
@provider = Chef::Provider::Service::Invokercd.new(@new_resource, @run_context)
allow(Chef::Resource::Service).to receive(:new).and_return(@current_resource)
@stdout = StringIO.new(<<-PS)
aj 7842 5057 0 21:26 pts/2 00:00:06 vi init.rb
aj 7903 5016 0 21:26 pts/5 00:00:00 /bin/bash
aj 8119 6041 0 21:34 pts/3 00:00:03 vi init_service_spec.rb
PS
@status = double("Status", :exitstatus => 0, :stdout => @stdout)
allow(@provider).to receive(:shell_out!).and_return(@status)
end
it "should create a current resource with the name of the new resource" do
@provider.load_current_resource
expect(@provider.current_resource).to equal(@current_resource)
end
it "should set the current resources service name to the new resources service name" do
@provider.load_current_resource
expect(@current_resource.service_name).to eq("chef")
end
describe "when the service supports status" do
before do
@new_resource.supports({ :status => true })
end
it "should run '/usr/sbin/invoke-rc.d service_name status'" do
expect(@provider).to receive(:shell_out).with("/usr/sbin/invoke-rc.d #{@current_resource.service_name} status").and_return(@status)
@provider.load_current_resource
end
it "should set running to true if the status command returns 0" do
allow(@provider).to receive(:shell_out).with("/usr/sbin/invoke-rc.d #{@current_resource.service_name} status").and_return(@status)
@provider.load_current_resource
expect(@current_resource.running).to be_truthy
end
it "should set running to false if the status command returns anything except 0" do
allow(@status).to receive(:exitstatus).and_return(1)
allow(@provider).to receive(:shell_out).with("/usr/sbin/invoke-rc.d #{@current_resource.service_name} status").and_return(@status)
@provider.load_current_resource
expect(@current_resource.running).to be_falsey
end
it "should set running to false if the status command raises" do
allow(@provider).to receive(:shell_out).with("/usr/sbin/invoke-rc.d #{@current_resource.service_name} status").and_raise(Mixlib::ShellOut::ShellCommandFailed)
@provider.load_current_resource
expect(@current_resource.running).to be_falsey
end
end
describe "when a status command has been specified" do
before do
@new_resource.status_command("/usr/sbin/invoke-rc.d chefhasmonkeypants status")
end
it "should run the services status command if one has been specified" do
expect(@provider).to receive(:shell_out).with("/usr/sbin/invoke-rc.d chefhasmonkeypants status").and_return(@status)
@provider.load_current_resource
end
end
describe "when the node has not specified a ps command" do
it "should raise error if the node has a nil ps attribute and no other means to get status" do
@node.automatic_attrs[:command] = { :ps => nil }
@provider.action = :start
@provider.define_resource_requirements
expect { @provider.process_resource_requirements }.to raise_error(Chef::Exceptions::Service)
end
it "should raise error if the node has an empty ps attribute and no other means to get status" do
@node.automatic_attrs[:command] = { :ps => "" }
@provider.action = :start
@provider.define_resource_requirements
expect { @provider.process_resource_requirements }.to raise_error(Chef::Exceptions::Service)
end
end
describe "when we have a 'ps' attribute" do
it "should shell_out! the node's ps command" do
@status = double("Status", :exitstatus => 0, :stdout => @stdout)
expect(@provider).to receive(:shell_out!).with(@node[:command][:ps]).and_return(@status)
@provider.load_current_resource
end
it "should set running to true if the regex matches the output" do
@stdout = StringIO.new(<<-RUNNING_PS)
aj 7842 5057 0 21:26 pts/2 00:00:06 chef
aj 7842 5057 0 21:26 pts/2 00:00:06 poos
RUNNING_PS
@status = double("Status", :exitstatus => 0, :stdout => @stdout)
expect(@provider).to receive(:shell_out!).and_return(@status)
@provider.load_current_resource
expect(@current_resource.running).to be_truthy
end
it "should set running to false if the regex doesn't match" do
@status = double("Status", :exitstatus => 0, :stdout => @stdout)
expect(@provider).to receive(:shell_out!).and_return(@status)
@provider.load_current_resource
expect(@current_resource.running).to be_falsey
end
it "should raise an exception if ps fails" do
allow(@provider).to receive(:shell_out!).and_raise(Mixlib::ShellOut::ShellCommandFailed)
@provider.action = :start
@provider.load_current_resource
@provider.define_resource_requirements
expect { @provider.process_resource_requirements }.to raise_error(Chef::Exceptions::Service)
end
end
it "should return the current resource" do
expect(@provider.load_current_resource).to eql(@current_resource)
end
describe "when starting the service" do
it "should call the start command if one is specified" do
@new_resource.start_command("/usr/sbin/invoke-rc.d chef startyousillysally")
expect(@provider).to receive(:shell_out_with_systems_locale!).with("/usr/sbin/invoke-rc.d chef startyousillysally")
@provider.start_service()
end
it "should call '/usr/sbin/invoke-rc.d service_name start' if no start command is specified" do
expect(@provider).to receive(:shell_out_with_systems_locale!).with("/usr/sbin/invoke-rc.d #{@new_resource.service_name} start")
@provider.start_service()
end
end
describe Chef::Provider::Service::Invokercd, "stop_service" do
it "should call the stop command if one is specified" do
@new_resource.stop_command("/usr/sbin/invoke-rc.d chef itoldyoutostop")
expect(@provider).to receive(:shell_out_with_systems_locale!).with("/usr/sbin/invoke-rc.d chef itoldyoutostop")
@provider.stop_service()
end
it "should call '/usr/sbin/invoke-rc.d service_name stop' if no stop command is specified" do
expect(@provider).to receive(:shell_out_with_systems_locale!).with("/usr/sbin/invoke-rc.d #{@new_resource.service_name} stop")
@provider.stop_service()
end
end
describe "when restarting a service" do
it "should call 'restart' on the service_name if the resource supports it" do
@new_resource.supports({ :restart => true })
expect(@provider).to receive(:shell_out_with_systems_locale!).with("/usr/sbin/invoke-rc.d #{@new_resource.service_name} restart")
@provider.restart_service()
end
it "should call the restart_command if one has been specified" do
@new_resource.restart_command("/usr/sbin/invoke-rc.d chef restartinafire")
expect(@provider).to receive(:shell_out_with_systems_locale!).with("/usr/sbin/invoke-rc.d #{@new_resource.service_name} restartinafire")
@provider.restart_service()
end
it "should just call stop, then start when the resource doesn't support restart and no restart_command is specified" do
expect(@provider).to receive(:stop_service)
expect(@provider).to receive(:sleep).with(1)
expect(@provider).to receive(:start_service)
@provider.restart_service()
end
end
describe "when reloading a service" do
it "should call 'reload' on the service if it supports it" do
@new_resource.supports({ :reload => true })
expect(@provider).to receive(:shell_out_with_systems_locale!).with("/usr/sbin/invoke-rc.d chef reload")
@provider.reload_service()
end
it "should should run the user specified reload command if one is specified and the service doesn't support reload" do
@new_resource.reload_command("/usr/sbin/invoke-rc.d chef lollerpants")
expect(@provider).to receive(:shell_out_with_systems_locale!).with("/usr/sbin/invoke-rc.d chef lollerpants")
@provider.reload_service()
end
end
end
| 43.174528 | 164 | 0.713427 |
ab64b6e6150452e17de1555819824b2ba453626f | 4,306 | #!/usr/local/bin/ruby
# frozen_string_literal: true
require 'fluent/plugin/filter'
module Fluent::Plugin
require 'logger'
require 'yajl/json_gem'
require_relative 'oms_common'
require_relative "ApplicationInsightsUtility"
Dir[File.join(__dir__, './health', '*.rb')].each { |file| require file }
class CAdvisor2ContainerHealthFilter < Filter
include HealthModel
Fluent::Plugin.register_filter('cadvisor_health_container', self)
config_param :log_path, :string, :default => '/var/opt/microsoft/docker-cimprov/log/health_monitors.log'
config_param :metrics_to_collect, :string, :default => 'cpuUsageNanoCores,memoryRssBytes'
config_param :container_resource_refresh_interval_minutes, :integer, :default => 5
@@object_name_k8s_container = 'K8SContainer'
@@counter_name_cpu = 'cpuusagenanocores'
@@counter_name_memory_rss = 'memoryrssbytes'
@@cluster_health_model_enabled = HealthMonitorUtils.is_cluster_health_model_enabled
def initialize
begin
super
@metrics_to_collect_hash = {}
@formatter = HealthContainerCpuMemoryRecordFormatter.new
rescue => e
@log.info "Error in filter_cadvisor_health_container initialize #{e.backtrace}"
ApplicationInsightsUtility.sendExceptionTelemetry(e, {"FeatureArea" => "Health"})
end
end
def configure(conf)
begin
super
@log = HealthMonitorUtils.get_log_handle
@log.debug {'Starting filter_cadvisor2health plugin'}
rescue => e
@log.info "Error in filter_cadvisor_health_container configure #{e.backtrace}"
ApplicationInsightsUtility.sendExceptionTelemetry(e, {"FeatureArea" => "Health"})
end
end
def start
begin
super
@metrics_to_collect_hash = HealthMonitorUtils.build_metrics_hash(@metrics_to_collect)
ApplicationInsightsUtility.sendCustomEvent("filter_cadvisor_health_container Plugin Start", {})
rescue => e
@log.info "Error in filter_cadvisor_health_container start #{e.backtrace}"
ApplicationInsightsUtility.sendExceptionTelemetry(e, {"FeatureArea" => "Health"})
end
end
def filter_stream(tag, es)
if !@@cluster_health_model_enabled
@log.info "Cluster Health Model disabled in filter_cadvisor_health_container"
return Fluent::MultiEventStream.new
end
new_es = Fluent::MultiEventStream.new
records_count = 0
es.each { |time, record|
begin
filtered_record = filter(tag, time, record)
if !filtered_record.nil?
new_es.add(time, filtered_record)
records_count += 1
end
rescue => e
@log.info "Error in filter_cadvisor_health_container filter_stream #{e.backtrace}"
ApplicationInsightsUtility.sendExceptionTelemetry(e, {"FeatureArea" => "Health"})
end
}
@log.debug "filter_cadvisor_health_container Records Count #{records_count}"
new_es
end
def filter(tag, time, record)
begin
if record.key?("MonitorLabels")
return record
end
object_name = record['ObjectName']
counter_name = JSON.parse(record['json_Collections'])[0]['CounterName'].downcase
if @metrics_to_collect_hash.key?(counter_name)
if object_name == @@object_name_k8s_container
return @formatter.get_record_from_cadvisor_record(record)
end
end
return nil
rescue => e
@log.debug "Error in filter #{e}"
@log.debug "record #{record}"
@log.debug "backtrace #{e.backtrace}"
ApplicationInsightsUtility.sendExceptionTelemetry(e, {"FeatureArea" => "Health"})
return nil
end
end
end
end
| 40.242991 | 112 | 0.595216 |
1dc2b602eee18486a5fad1d498897f2643a18e49 | 301 | require 'forked/version'
require 'forked/worker'
require 'forked/retry_strategies/always'
require 'forked/retry_strategies/exponential_backoff'
require 'forked/retry_strategies/exponential_backoff_with_limit'
require 'forked/process_manager'
require 'forked/with_graceful_shutdown'
module Forked
end
| 27.363636 | 64 | 0.860465 |
1dffa2dae7421d7257c2180936280c750ea03e0b | 53,893 | # frozen_string_literal: true
require "active_record/migration/join_table"
require "active_support/core_ext/string/access"
require "digest/sha2"
module ActiveRecord
module ConnectionAdapters # :nodoc:
module SchemaStatements
include ActiveRecord::Migration::JoinTable
# Returns a hash of mappings from the abstract data types to the native
# database types. See TableDefinition#column for details on the recognized
# abstract data types.
def native_database_types
{}
end
def table_options(table_name)
nil
end
# Returns the table comment that's stored in database metadata.
def table_comment(table_name)
nil
end
# Truncates a table alias according to the limits of the current adapter.
def table_alias_for(table_name)
table_name[0...table_alias_length].tr(".", "_")
end
# Returns the relation names useable to back Active Record models.
# For most adapters this means all #tables and #views.
def data_sources
query_values(data_source_sql, "SCHEMA")
rescue NotImplementedError
tables | views
end
# Checks to see if the data source +name+ exists on the database.
#
# data_source_exists?(:ebooks)
#
def data_source_exists?(name)
query_values(data_source_sql(name), "SCHEMA").any? if name.present?
rescue NotImplementedError
data_sources.include?(name.to_s)
end
# Returns an array of table names defined in the database.
def tables
query_values(data_source_sql(type: "BASE TABLE"), "SCHEMA")
end
# Checks to see if the table +table_name+ exists on the database.
#
# table_exists?(:developers)
#
def table_exists?(table_name)
query_values(data_source_sql(table_name, type: "BASE TABLE"), "SCHEMA").any? if table_name.present?
rescue NotImplementedError
tables.include?(table_name.to_s)
end
# Returns an array of view names defined in the database.
def views
query_values(data_source_sql(type: "VIEW"), "SCHEMA")
end
# Checks to see if the view +view_name+ exists on the database.
#
# view_exists?(:ebooks)
#
def view_exists?(view_name)
query_values(data_source_sql(view_name, type: "VIEW"), "SCHEMA").any? if view_name.present?
rescue NotImplementedError
views.include?(view_name.to_s)
end
# Returns an array of indexes for the given table.
def indexes(table_name)
raise NotImplementedError, "#indexes is not implemented"
end
# Checks to see if an index exists on a table for a given index definition.
#
# # Check an index exists
# index_exists?(:suppliers, :company_id)
#
# # Check an index on multiple columns exists
# index_exists?(:suppliers, [:company_id, :company_type])
#
# # Check a unique index exists
# index_exists?(:suppliers, :company_id, unique: true)
#
# # Check an index with a custom name exists
# index_exists?(:suppliers, :company_id, name: "idx_company_id")
#
def index_exists?(table_name, column_name, options = {})
column_names = Array(column_name).map(&:to_s)
checks = []
checks << lambda { |i| Array(i.columns) == column_names }
checks << lambda { |i| i.unique } if options[:unique]
checks << lambda { |i| i.name == options[:name].to_s } if options[:name]
indexes(table_name).any? { |i| checks.all? { |check| check[i] } }
end
# Returns an array of +Column+ objects for the table specified by +table_name+.
def columns(table_name)
table_name = table_name.to_s
column_definitions(table_name).map do |field|
new_column_from_field(table_name, field)
end
end
# Checks to see if a column exists in a given table.
#
# # Check a column exists
# column_exists?(:suppliers, :name)
#
# # Check a column exists of a particular type
# column_exists?(:suppliers, :name, :string)
#
# # Check a column exists with a specific definition
# column_exists?(:suppliers, :name, :string, limit: 100)
# column_exists?(:suppliers, :name, :string, default: 'default')
# column_exists?(:suppliers, :name, :string, null: false)
# column_exists?(:suppliers, :tax, :decimal, precision: 8, scale: 2)
#
def column_exists?(table_name, column_name, type = nil, options = {})
column_name = column_name.to_s
checks = []
checks << lambda { |c| c.name == column_name }
checks << lambda { |c| c.type == type } if type
column_options_keys.each do |attr|
checks << lambda { |c| c.send(attr) == options[attr] } if options.key?(attr)
end
columns(table_name).any? { |c| checks.all? { |check| check[c] } }
end
# Returns just a table's primary key
def primary_key(table_name)
pk = primary_keys(table_name)
pk = pk.first unless pk.size > 1
pk
end
# Creates a new table with the name +table_name+. +table_name+ may either
# be a String or a Symbol.
#
# There are two ways to work with #create_table. You can use the block
# form or the regular form, like this:
#
# === Block form
#
# # create_table() passes a TableDefinition object to the block.
# # This form will not only create the table, but also columns for the
# # table.
#
# create_table(:suppliers) do |t|
# t.column :name, :string, limit: 60
# # Other fields here
# end
#
# === Block form, with shorthand
#
# # You can also use the column types as method calls, rather than calling the column method.
# create_table(:suppliers) do |t|
# t.string :name, limit: 60
# # Other fields here
# end
#
# === Regular form
#
# # Creates a table called 'suppliers' with no columns.
# create_table(:suppliers)
# # Add a column to 'suppliers'.
# add_column(:suppliers, :name, :string, {limit: 60})
#
# The +options+ hash can include the following keys:
# [<tt>:id</tt>]
# Whether to automatically add a primary key column. Defaults to true.
# Join tables for {ActiveRecord::Base.has_and_belongs_to_many}[rdoc-ref:Associations::ClassMethods#has_and_belongs_to_many] should set it to false.
#
# A Symbol can be used to specify the type of the generated primary key column.
# [<tt>:primary_key</tt>]
# The name of the primary key, if one is to be added automatically.
# Defaults to +id+. If <tt>:id</tt> is false, then this option is ignored.
#
# If an array is passed, a composite primary key will be created.
#
# Note that Active Record models will automatically detect their
# primary key. This can be avoided by using
# {self.primary_key=}[rdoc-ref:AttributeMethods::PrimaryKey::ClassMethods#primary_key=] on the model
# to define the key explicitly.
#
# [<tt>:options</tt>]
# Any extra options you want appended to the table definition.
# [<tt>:temporary</tt>]
# Make a temporary table.
# [<tt>:force</tt>]
# Set to true to drop the table before creating it.
# Set to +:cascade+ to drop dependent objects as well.
# Defaults to false.
# [<tt>:as</tt>]
# SQL to use to generate the table. When this option is used, the block is
# ignored, as are the <tt>:id</tt> and <tt>:primary_key</tt> options.
#
# ====== Add a backend specific option to the generated SQL (MySQL)
#
# create_table(:suppliers, options: 'ENGINE=InnoDB DEFAULT CHARSET=utf8')
#
# generates:
#
# CREATE TABLE suppliers (
# id bigint auto_increment PRIMARY KEY
# ) ENGINE=InnoDB DEFAULT CHARSET=utf8
#
# ====== Rename the primary key column
#
# create_table(:objects, primary_key: 'guid') do |t|
# t.column :name, :string, limit: 80
# end
#
# generates:
#
# CREATE TABLE objects (
# guid bigint auto_increment PRIMARY KEY,
# name varchar(80)
# )
#
# ====== Change the primary key column type
#
# create_table(:tags, id: :string) do |t|
# t.column :label, :string
# end
#
# generates:
#
# CREATE TABLE tags (
# id varchar PRIMARY KEY,
# label varchar
# )
#
# ====== Create a composite primary key
#
# create_table(:orders, primary_key: [:product_id, :client_id]) do |t|
# t.belongs_to :product
# t.belongs_to :client
# end
#
# generates:
#
# CREATE TABLE order (
# product_id bigint NOT NULL,
# client_id bigint NOT NULL
# );
#
# ALTER TABLE ONLY "orders"
# ADD CONSTRAINT orders_pkey PRIMARY KEY (product_id, client_id);
#
# ====== Do not add a primary key column
#
# create_table(:categories_suppliers, id: false) do |t|
# t.column :category_id, :bigint
# t.column :supplier_id, :bigint
# end
#
# generates:
#
# CREATE TABLE categories_suppliers (
# category_id bigint,
# supplier_id bigint
# )
#
# ====== Create a temporary table based on a query
#
# create_table(:long_query, temporary: true,
# as: "SELECT * FROM orders INNER JOIN line_items ON order_id=orders.id")
#
# generates:
#
# CREATE TEMPORARY TABLE long_query AS
# SELECT * FROM orders INNER JOIN line_items ON order_id=orders.id
#
# See also TableDefinition#column for details on how to create columns.
def create_table(table_name, comment: nil, **options)
td = create_table_definition table_name, options[:temporary], options[:options], options[:as], comment: comment
if options[:id] != false && !options[:as]
pk = options.fetch(:primary_key) do
Base.get_primary_key table_name.to_s.singularize
end
if pk.is_a?(Array)
td.primary_keys pk
else
td.primary_key pk, options.fetch(:id, :primary_key), options
end
end
yield td if block_given?
if options[:force]
drop_table(table_name, options.merge(if_exists: true))
end
result = execute schema_creation.accept td
unless supports_indexes_in_create?
td.indexes.each do |column_name, index_options|
add_index(table_name, column_name, index_options)
end
end
if supports_comments? && !supports_comments_in_create?
change_table_comment(table_name, comment) if comment.present?
td.columns.each do |column|
change_column_comment(table_name, column.name, column.comment) if column.comment.present?
end
end
result
end
# Creates a new join table with the name created using the lexical order of the first two
# arguments. These arguments can be a String or a Symbol.
#
# # Creates a table called 'assemblies_parts' with no id.
# create_join_table(:assemblies, :parts)
#
# You can pass an +options+ hash which can include the following keys:
# [<tt>:table_name</tt>]
# Sets the table name, overriding the default.
# [<tt>:column_options</tt>]
# Any extra options you want appended to the columns definition.
# [<tt>:options</tt>]
# Any extra options you want appended to the table definition.
# [<tt>:temporary</tt>]
# Make a temporary table.
# [<tt>:force</tt>]
# Set to true to drop the table before creating it.
# Defaults to false.
#
# Note that #create_join_table does not create any indices by default; you can use
# its block form to do so yourself:
#
# create_join_table :products, :categories do |t|
# t.index :product_id
# t.index :category_id
# end
#
# ====== Add a backend specific option to the generated SQL (MySQL)
#
# create_join_table(:assemblies, :parts, options: 'ENGINE=InnoDB DEFAULT CHARSET=utf8')
#
# generates:
#
# CREATE TABLE assemblies_parts (
# assembly_id bigint NOT NULL,
# part_id bigint NOT NULL,
# ) ENGINE=InnoDB DEFAULT CHARSET=utf8
#
def create_join_table(table_1, table_2, column_options: {}, **options)
join_table_name = find_join_table_name(table_1, table_2, options)
column_options.reverse_merge!(null: false, index: false)
t1_ref, t2_ref = [table_1, table_2].map { |t| t.to_s.singularize }
create_table(join_table_name, options.merge!(id: false)) do |td|
td.references t1_ref, column_options
td.references t2_ref, column_options
yield td if block_given?
end
end
# Drops the join table specified by the given arguments.
# See #create_join_table for details.
#
# Although this command ignores the block if one is given, it can be helpful
# to provide one in a migration's +change+ method so it can be reverted.
# In that case, the block will be used by #create_join_table.
def drop_join_table(table_1, table_2, options = {})
join_table_name = find_join_table_name(table_1, table_2, options)
drop_table(join_table_name)
end
# A block for changing columns in +table+.
#
# # change_table() yields a Table instance
# change_table(:suppliers) do |t|
# t.column :name, :string, limit: 60
# # Other column alterations here
# end
#
# The +options+ hash can include the following keys:
# [<tt>:bulk</tt>]
# Set this to true to make this a bulk alter query, such as
#
# ALTER TABLE `users` ADD COLUMN age INT, ADD COLUMN birthdate DATETIME ...
#
# Defaults to false.
#
# Only supported on the MySQL and PostgreSQL adapter, ignored elsewhere.
#
# ====== Add a column
#
# change_table(:suppliers) do |t|
# t.column :name, :string, limit: 60
# end
#
# ====== Add 2 integer columns
#
# change_table(:suppliers) do |t|
# t.integer :width, :height, null: false, default: 0
# end
#
# ====== Add created_at/updated_at columns
#
# change_table(:suppliers) do |t|
# t.timestamps
# end
#
# ====== Add a foreign key column
#
# change_table(:suppliers) do |t|
# t.references :company
# end
#
# Creates a <tt>company_id(bigint)</tt> column.
#
# ====== Add a polymorphic foreign key column
#
# change_table(:suppliers) do |t|
# t.belongs_to :company, polymorphic: true
# end
#
# Creates <tt>company_type(varchar)</tt> and <tt>company_id(bigint)</tt> columns.
#
# ====== Remove a column
#
# change_table(:suppliers) do |t|
# t.remove :company
# end
#
# ====== Remove several columns
#
# change_table(:suppliers) do |t|
# t.remove :company_id
# t.remove :width, :height
# end
#
# ====== Remove an index
#
# change_table(:suppliers) do |t|
# t.remove_index :company_id
# end
#
# See also Table for details on all of the various column transformations.
def change_table(table_name, options = {})
if supports_bulk_alter? && options[:bulk]
recorder = ActiveRecord::Migration::CommandRecorder.new(self)
yield update_table_definition(table_name, recorder)
bulk_change_table(table_name, recorder.commands)
else
yield update_table_definition(table_name, self)
end
end
# Renames a table.
#
# rename_table('octopuses', 'octopi')
#
def rename_table(table_name, new_name)
raise NotImplementedError, "rename_table is not implemented"
end
# Drops a table from the database.
#
# [<tt>:force</tt>]
# Set to +:cascade+ to drop dependent objects as well.
# Defaults to false.
# [<tt>:if_exists</tt>]
# Set to +true+ to only drop the table if it exists.
# Defaults to false.
#
# Although this command ignores most +options+ and the block if one is given,
# it can be helpful to provide these in a migration's +change+ method so it can be reverted.
# In that case, +options+ and the block will be used by #create_table.
def drop_table(table_name, options = {})
execute "DROP TABLE#{' IF EXISTS' if options[:if_exists]} #{quote_table_name(table_name)}"
end
# Add a new +type+ column named +column_name+ to +table_name+.
#
# The +type+ parameter is normally one of the migrations native types,
# which is one of the following:
# <tt>:primary_key</tt>, <tt>:string</tt>, <tt>:text</tt>,
# <tt>:integer</tt>, <tt>:bigint</tt>, <tt>:float</tt>, <tt>:decimal</tt>, <tt>:numeric</tt>,
# <tt>:datetime</tt>, <tt>:time</tt>, <tt>:date</tt>,
# <tt>:binary</tt>, <tt>:boolean</tt>.
#
# You may use a type not in this list as long as it is supported by your
# database (for example, "polygon" in MySQL), but this will not be database
# agnostic and should usually be avoided.
#
# Available options are (none of these exists by default):
# * <tt>:limit</tt> -
# Requests a maximum column length. This is the number of characters for a <tt>:string</tt> column
# and number of bytes for <tt>:text</tt>, <tt>:binary</tt> and <tt>:integer</tt> columns.
# This option is ignored by some backends.
# * <tt>:default</tt> -
# The column's default value. Use +nil+ for +NULL+.
# * <tt>:null</tt> -
# Allows or disallows +NULL+ values in the column.
# * <tt>:precision</tt> -
# Specifies the precision for the <tt>:decimal</tt> and <tt>:numeric</tt> columns.
# * <tt>:scale</tt> -
# Specifies the scale for the <tt>:decimal</tt> and <tt>:numeric</tt> columns.
# * <tt>:comment</tt> -
# Specifies the comment for the column. This option is ignored by some backends.
#
# Note: The precision is the total number of significant digits,
# and the scale is the number of digits that can be stored following
# the decimal point. For example, the number 123.45 has a precision of 5
# and a scale of 2. A decimal with a precision of 5 and a scale of 2 can
# range from -999.99 to 999.99.
#
# Please be aware of different RDBMS implementations behavior with
# <tt>:decimal</tt> columns:
# * The SQL standard says the default scale should be 0, <tt>:scale</tt> <=
# <tt>:precision</tt>, and makes no comments about the requirements of
# <tt>:precision</tt>.
# * MySQL: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..30].
# Default is (10,0).
# * PostgreSQL: <tt>:precision</tt> [1..infinity],
# <tt>:scale</tt> [0..infinity]. No default.
# * SQLite3: No restrictions on <tt>:precision</tt> and <tt>:scale</tt>,
# but the maximum supported <tt>:precision</tt> is 16. No default.
# * Oracle: <tt>:precision</tt> [1..38], <tt>:scale</tt> [-84..127].
# Default is (38,0).
# * DB2: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..62].
# Default unknown.
# * SqlServer: <tt>:precision</tt> [1..38], <tt>:scale</tt> [0..38].
# Default (38,0).
#
# == Examples
#
# add_column(:users, :picture, :binary, limit: 2.megabytes)
# # ALTER TABLE "users" ADD "picture" blob(2097152)
#
# add_column(:articles, :status, :string, limit: 20, default: 'draft', null: false)
# # ALTER TABLE "articles" ADD "status" varchar(20) DEFAULT 'draft' NOT NULL
#
# add_column(:answers, :bill_gates_money, :decimal, precision: 15, scale: 2)
# # ALTER TABLE "answers" ADD "bill_gates_money" decimal(15,2)
#
# add_column(:measurements, :sensor_reading, :decimal, precision: 30, scale: 20)
# # ALTER TABLE "measurements" ADD "sensor_reading" decimal(30,20)
#
# # While :scale defaults to zero on most databases, it
# # probably wouldn't hurt to include it.
# add_column(:measurements, :huge_integer, :decimal, precision: 30)
# # ALTER TABLE "measurements" ADD "huge_integer" decimal(30)
#
# # Defines a column that stores an array of a type.
# add_column(:users, :skills, :text, array: true)
# # ALTER TABLE "users" ADD "skills" text[]
#
# # Defines a column with a database-specific type.
# add_column(:shapes, :triangle, 'polygon')
# # ALTER TABLE "shapes" ADD "triangle" polygon
def add_column(table_name, column_name, type, options = {})
at = create_alter_table table_name
at.add_column(column_name, type, options)
execute schema_creation.accept at
end
# Removes the given columns from the table definition.
#
# remove_columns(:suppliers, :qualification, :experience)
#
def remove_columns(table_name, *column_names)
raise ArgumentError.new("You must specify at least one column name. Example: remove_columns(:people, :first_name)") if column_names.empty?
column_names.each do |column_name|
remove_column(table_name, column_name)
end
end
# Removes the column from the table definition.
#
# remove_column(:suppliers, :qualification)
#
# The +type+ and +options+ parameters will be ignored if present. It can be helpful
# to provide these in a migration's +change+ method so it can be reverted.
# In that case, +type+ and +options+ will be used by #add_column.
def remove_column(table_name, column_name, type = nil, options = {})
execute "ALTER TABLE #{quote_table_name(table_name)} #{remove_column_for_alter(table_name, column_name, type, options)}"
end
# Changes the column's definition according to the new options.
# See TableDefinition#column for details of the options you can use.
#
# change_column(:suppliers, :name, :string, limit: 80)
# change_column(:accounts, :description, :text)
#
def change_column(table_name, column_name, type, options = {})
raise NotImplementedError, "change_column is not implemented"
end
# Sets a new default value for a column:
#
# change_column_default(:suppliers, :qualification, 'new')
# change_column_default(:accounts, :authorized, 1)
#
# Setting the default to +nil+ effectively drops the default:
#
# change_column_default(:users, :email, nil)
#
# Passing a hash containing +:from+ and +:to+ will make this change
# reversible in migration:
#
# change_column_default(:posts, :state, from: nil, to: "draft")
#
def change_column_default(table_name, column_name, default_or_changes)
raise NotImplementedError, "change_column_default is not implemented"
end
# Sets or removes a <tt>NOT NULL</tt> constraint on a column. The +null+ flag
# indicates whether the value can be +NULL+. For example
#
# change_column_null(:users, :nickname, false)
#
# says nicknames cannot be +NULL+ (adds the constraint), whereas
#
# change_column_null(:users, :nickname, true)
#
# allows them to be +NULL+ (drops the constraint).
#
# The method accepts an optional fourth argument to replace existing
# <tt>NULL</tt>s with some other value. Use that one when enabling the
# constraint if needed, since otherwise those rows would not be valid.
#
# Please note the fourth argument does not set a column's default.
def change_column_null(table_name, column_name, null, default = nil)
raise NotImplementedError, "change_column_null is not implemented"
end
# Renames a column.
#
# rename_column(:suppliers, :description, :name)
#
def rename_column(table_name, column_name, new_column_name)
raise NotImplementedError, "rename_column is not implemented"
end
# Adds a new index to the table. +column_name+ can be a single Symbol, or
# an Array of Symbols.
#
# The index will be named after the table and the column name(s), unless
# you pass <tt>:name</tt> as an option.
#
# ====== Creating a simple index
#
# add_index(:suppliers, :name)
#
# generates:
#
# CREATE INDEX suppliers_name_index ON suppliers(name)
#
# ====== Creating a unique index
#
# add_index(:accounts, [:branch_id, :party_id], unique: true)
#
# generates:
#
# CREATE UNIQUE INDEX accounts_branch_id_party_id_index ON accounts(branch_id, party_id)
#
# ====== Creating a named index
#
# add_index(:accounts, [:branch_id, :party_id], unique: true, name: 'by_branch_party')
#
# generates:
#
# CREATE UNIQUE INDEX by_branch_party ON accounts(branch_id, party_id)
#
# ====== Creating an index with specific key length
#
# add_index(:accounts, :name, name: 'by_name', length: 10)
#
# generates:
#
# CREATE INDEX by_name ON accounts(name(10))
#
# ====== Creating an index with specific key lengths for multiple keys
#
# add_index(:accounts, [:name, :surname], name: 'by_name_surname', length: {name: 10, surname: 15})
#
# generates:
#
# CREATE INDEX by_name_surname ON accounts(name(10), surname(15))
#
# Note: SQLite doesn't support index length.
#
# ====== Creating an index with a sort order (desc or asc, asc is the default)
#
# add_index(:accounts, [:branch_id, :party_id, :surname], order: {branch_id: :desc, party_id: :asc})
#
# generates:
#
# CREATE INDEX by_branch_desc_party ON accounts(branch_id DESC, party_id ASC, surname)
#
# Note: MySQL only supports index order from 8.0.1 onwards (earlier versions accepted the syntax but ignored it).
#
# ====== Creating a partial index
#
# add_index(:accounts, [:branch_id, :party_id], unique: true, where: "active")
#
# generates:
#
# CREATE UNIQUE INDEX index_accounts_on_branch_id_and_party_id ON accounts(branch_id, party_id) WHERE active
#
# Note: Partial indexes are only supported for PostgreSQL and SQLite 3.8.0+.
#
# ====== Creating an index with a specific method
#
# add_index(:developers, :name, using: 'btree')
#
# generates:
#
# CREATE INDEX index_developers_on_name ON developers USING btree (name) -- PostgreSQL
# CREATE INDEX index_developers_on_name USING btree ON developers (name) -- MySQL
#
# Note: only supported by PostgreSQL and MySQL
#
# ====== Creating an index with a specific operator class
#
# add_index(:developers, :name, using: 'gist', opclass: :gist_trgm_ops)
# # CREATE INDEX developers_on_name ON developers USING gist (name gist_trgm_ops) -- PostgreSQL
#
# add_index(:developers, [:name, :city], using: 'gist', opclass: { city: :gist_trgm_ops })
# # CREATE INDEX developers_on_name_and_city ON developers USING gist (name, city gist_trgm_ops) -- PostgreSQL
#
# add_index(:developers, [:name, :city], using: 'gist', opclass: :gist_trgm_ops)
# # CREATE INDEX developers_on_name_and_city ON developers USING gist (name gist_trgm_ops, city gist_trgm_ops) -- PostgreSQL
#
# Note: only supported by PostgreSQL
#
# ====== Creating an index with a specific type
#
# add_index(:developers, :name, type: :fulltext)
#
# generates:
#
# CREATE FULLTEXT INDEX index_developers_on_name ON developers (name) -- MySQL
#
# Note: only supported by MySQL.
def add_index(table_name, column_name, options = {})
index_name, index_type, index_columns, index_options = add_index_options(table_name, column_name, options)
execute "CREATE #{index_type} INDEX #{quote_column_name(index_name)} ON #{quote_table_name(table_name)} (#{index_columns})#{index_options}"
end
# Removes the given index from the table.
#
# Removes the index on +branch_id+ in the +accounts+ table if exactly one such index exists.
#
# remove_index :accounts, :branch_id
#
# Removes the index on +branch_id+ in the +accounts+ table if exactly one such index exists.
#
# remove_index :accounts, column: :branch_id
#
# Removes the index on +branch_id+ and +party_id+ in the +accounts+ table if exactly one such index exists.
#
# remove_index :accounts, column: [:branch_id, :party_id]
#
# Removes the index named +by_branch_party+ in the +accounts+ table.
#
# remove_index :accounts, name: :by_branch_party
#
def remove_index(table_name, options = {})
index_name = index_name_for_remove(table_name, options)
execute "DROP INDEX #{quote_column_name(index_name)} ON #{quote_table_name(table_name)}"
end
# Renames an index.
#
# Rename the +index_people_on_last_name+ index to +index_users_on_last_name+:
#
# rename_index :people, 'index_people_on_last_name', 'index_users_on_last_name'
#
def rename_index(table_name, old_name, new_name)
validate_index_length!(table_name, new_name)
# this is a naive implementation; some DBs may support this more efficiently (PostgreSQL, for instance)
old_index_def = indexes(table_name).detect { |i| i.name == old_name }
return unless old_index_def
add_index(table_name, old_index_def.columns, name: new_name, unique: old_index_def.unique)
remove_index(table_name, name: old_name)
end
def index_name(table_name, options) #:nodoc:
if Hash === options
if options[:column]
"index_#{table_name}_on_#{Array(options[:column]) * '_and_'}"
elsif options[:name]
options[:name]
else
raise ArgumentError, "You must specify the index name"
end
else
index_name(table_name, index_name_options(options))
end
end
# Verifies the existence of an index with a given name.
def index_name_exists?(table_name, index_name)
index_name = index_name.to_s
indexes(table_name).detect { |i| i.name == index_name }
end
# Adds a reference. The reference column is a bigint by default,
# the <tt>:type</tt> option can be used to specify a different type.
# Optionally adds a +_type+ column, if <tt>:polymorphic</tt> option is provided.
# #add_reference and #add_belongs_to are acceptable.
#
# The +options+ hash can include the following keys:
# [<tt>:type</tt>]
# The reference column type. Defaults to +:bigint+.
# [<tt>:index</tt>]
# Add an appropriate index. Defaults to true.
# See #add_index for usage of this option.
# [<tt>:foreign_key</tt>]
# Add an appropriate foreign key constraint. Defaults to false.
# [<tt>:polymorphic</tt>]
# Whether an additional +_type+ column should be added. Defaults to false.
# [<tt>:null</tt>]
# Whether the column allows nulls. Defaults to true.
#
# ====== Create a user_id bigint column
#
# add_reference(:products, :user)
#
# ====== Create a user_id string column
#
# add_reference(:products, :user, type: :string)
#
# ====== Create supplier_id, supplier_type columns and appropriate index
#
# add_reference(:products, :supplier, polymorphic: true, index: true)
#
# ====== Create a supplier_id column with a unique index
#
# add_reference(:products, :supplier, index: { unique: true })
#
# ====== Create a supplier_id column with a named index
#
# add_reference(:products, :supplier, index: { name: "my_supplier_index" })
#
# ====== Create a supplier_id column and appropriate foreign key
#
# add_reference(:products, :supplier, foreign_key: true)
#
# ====== Create a supplier_id column and a foreign key to the firms table
#
# add_reference(:products, :supplier, foreign_key: {to_table: :firms})
#
def add_reference(table_name, ref_name, **options)
ReferenceDefinition.new(ref_name, options).add_to(update_table_definition(table_name, self))
end
alias :add_belongs_to :add_reference
# Removes the reference(s). Also removes a +type+ column if one exists.
# #remove_reference and #remove_belongs_to are acceptable.
#
# ====== Remove the reference
#
# remove_reference(:products, :user, index: true)
#
# ====== Remove polymorphic reference
#
# remove_reference(:products, :supplier, polymorphic: true)
#
# ====== Remove the reference with a foreign key
#
# remove_reference(:products, :user, index: true, foreign_key: true)
#
def remove_reference(table_name, ref_name, foreign_key: false, polymorphic: false, **options)
if foreign_key
reference_name = Base.pluralize_table_names ? ref_name.to_s.pluralize : ref_name
if foreign_key.is_a?(Hash)
foreign_key_options = foreign_key
else
foreign_key_options = { to_table: reference_name }
end
foreign_key_options[:column] ||= "#{ref_name}_id"
remove_foreign_key(table_name, foreign_key_options)
end
remove_column(table_name, "#{ref_name}_id")
remove_column(table_name, "#{ref_name}_type") if polymorphic
end
alias :remove_belongs_to :remove_reference
# Returns an array of foreign keys for the given table.
# The foreign keys are represented as ForeignKeyDefinition objects.
def foreign_keys(table_name)
raise NotImplementedError, "foreign_keys is not implemented"
end
# Adds a new foreign key. +from_table+ is the table with the key column,
# +to_table+ contains the referenced primary key.
#
# The foreign key will be named after the following pattern: <tt>fk_rails_<identifier></tt>.
# +identifier+ is a 10 character long string which is deterministically generated from the
# +from_table+ and +column+. A custom name can be specified with the <tt>:name</tt> option.
#
# ====== Creating a simple foreign key
#
# add_foreign_key :articles, :authors
#
# generates:
#
# ALTER TABLE "articles" ADD CONSTRAINT fk_rails_e74ce85cbc FOREIGN KEY ("author_id") REFERENCES "authors" ("id")
#
# ====== Creating a foreign key on a specific column
#
# add_foreign_key :articles, :users, column: :author_id, primary_key: "lng_id"
#
# generates:
#
# ALTER TABLE "articles" ADD CONSTRAINT fk_rails_58ca3d3a82 FOREIGN KEY ("author_id") REFERENCES "users" ("lng_id")
#
# ====== Creating a cascading foreign key
#
# add_foreign_key :articles, :authors, on_delete: :cascade
#
# generates:
#
# ALTER TABLE "articles" ADD CONSTRAINT fk_rails_e74ce85cbc FOREIGN KEY ("author_id") REFERENCES "authors" ("id") ON DELETE CASCADE
#
# The +options+ hash can include the following keys:
# [<tt>:column</tt>]
# The foreign key column name on +from_table+. Defaults to <tt>to_table.singularize + "_id"</tt>
# [<tt>:primary_key</tt>]
# The primary key column name on +to_table+. Defaults to +id+.
# [<tt>:name</tt>]
# The constraint name. Defaults to <tt>fk_rails_<identifier></tt>.
# [<tt>:on_delete</tt>]
# Action that happens <tt>ON DELETE</tt>. Valid values are +:nullify+, +:cascade+ and +:restrict+
# [<tt>:on_update</tt>]
# Action that happens <tt>ON UPDATE</tt>. Valid values are +:nullify+, +:cascade+ and +:restrict+
# [<tt>:validate</tt>]
# (Postgres only) Specify whether or not the constraint should be validated. Defaults to +true+.
def add_foreign_key(from_table, to_table, options = {})
return unless supports_foreign_keys?
options = foreign_key_options(from_table, to_table, options)
at = create_alter_table from_table
at.add_foreign_key to_table, options
execute schema_creation.accept(at)
end
# Removes the given foreign key from the table. Any option parameters provided
# will be used to re-add the foreign key in case of a migration rollback.
# It is recommended that you provide any options used when creating the foreign
# key so that the migration can be reverted properly.
#
# Removes the foreign key on +accounts.branch_id+.
#
# remove_foreign_key :accounts, :branches
#
# Removes the foreign key on +accounts.owner_id+.
#
# remove_foreign_key :accounts, column: :owner_id
#
# Removes the foreign key named +special_fk_name+ on the +accounts+ table.
#
# remove_foreign_key :accounts, name: :special_fk_name
#
# The +options+ hash accepts the same keys as SchemaStatements#add_foreign_key.
def remove_foreign_key(from_table, options_or_to_table = {})
return unless supports_foreign_keys?
fk_name_to_delete = foreign_key_for!(from_table, options_or_to_table).name
at = create_alter_table from_table
at.drop_foreign_key fk_name_to_delete
execute schema_creation.accept(at)
end
# Checks to see if a foreign key exists on a table for a given foreign key definition.
#
# # Checks to see if a foreign key exists.
# foreign_key_exists?(:accounts, :branches)
#
# # Checks to see if a foreign key on a specified column exists.
# foreign_key_exists?(:accounts, column: :owner_id)
#
# # Checks to see if a foreign key with a custom name exists.
# foreign_key_exists?(:accounts, name: "special_fk_name")
#
def foreign_key_exists?(from_table, options_or_to_table = {})
foreign_key_for(from_table, options_or_to_table).present?
end
def foreign_key_column_for(table_name) # :nodoc:
prefix = Base.table_name_prefix
suffix = Base.table_name_suffix
name = table_name.to_s =~ /#{prefix}(.+)#{suffix}/ ? $1 : table_name.to_s
"#{name.singularize}_id"
end
def foreign_key_options(from_table, to_table, options) # :nodoc:
options = options.dup
options[:column] ||= foreign_key_column_for(to_table)
options[:name] ||= foreign_key_name(from_table, options)
options
end
def dump_schema_information #:nodoc:
versions = ActiveRecord::SchemaMigration.all_versions
insert_versions_sql(versions) if versions.any?
end
def internal_string_options_for_primary_key # :nodoc:
{ primary_key: true }
end
def assume_migrated_upto_version(version, migrations_paths)
migrations_paths = Array(migrations_paths)
version = version.to_i
sm_table = quote_table_name(ActiveRecord::SchemaMigration.table_name)
migrated = ActiveRecord::SchemaMigration.all_versions.map(&:to_i)
versions = migration_context.migration_files.map do |file|
migration_context.parse_migration_filename(file).first.to_i
end
unless migrated.include?(version)
execute "INSERT INTO #{sm_table} (version) VALUES (#{quote(version)})"
end
inserting = (versions - migrated).select { |v| v < version }
if inserting.any?
if (duplicate = inserting.detect { |v| inserting.count(v) > 1 })
raise "Duplicate migration #{duplicate}. Please renumber your migrations to resolve the conflict."
end
if supports_multi_insert?
execute insert_versions_sql(inserting)
else
inserting.each do |v|
execute insert_versions_sql(v)
end
end
end
end
def type_to_sql(type, limit: nil, precision: nil, scale: nil, **) # :nodoc:
type = type.to_sym if type
if native = native_database_types[type]
column_type_sql = (native.is_a?(Hash) ? native[:name] : native).dup
if type == :decimal # ignore limit, use precision and scale
scale ||= native[:scale]
if precision ||= native[:precision]
if scale
column_type_sql << "(#{precision},#{scale})"
else
column_type_sql << "(#{precision})"
end
elsif scale
raise ArgumentError, "Error adding decimal column: precision cannot be empty if scale is specified"
end
elsif [:datetime, :timestamp, :time, :interval].include?(type) && precision ||= native[:precision]
if (0..6) === precision
column_type_sql << "(#{precision})"
else
raise(ActiveRecordError, "No #{native[:name]} type has precision of #{precision}. The allowed range of precision is from 0 to 6")
end
elsif (type != :primary_key) && (limit ||= native.is_a?(Hash) && native[:limit])
column_type_sql << "(#{limit})"
end
column_type_sql
else
type.to_s
end
end
# Given a set of columns and an ORDER BY clause, returns the columns for a SELECT DISTINCT.
# PostgreSQL, MySQL, and Oracle override this for custom DISTINCT syntax - they
# require the order columns appear in the SELECT.
#
# columns_for_distinct("posts.id", ["posts.created_at desc"])
#
def columns_for_distinct(columns, orders) # :nodoc:
columns
end
# Adds timestamps (+created_at+ and +updated_at+) columns to +table_name+.
# Additional options (like +:null+) are forwarded to #add_column.
#
# add_timestamps(:suppliers, null: true)
#
def add_timestamps(table_name, options = {})
options[:null] = false if options[:null].nil?
add_column table_name, :created_at, :datetime, options
add_column table_name, :updated_at, :datetime, options
end
# Removes the timestamp columns (+created_at+ and +updated_at+) from the table definition.
#
# remove_timestamps(:suppliers)
#
def remove_timestamps(table_name, options = {})
remove_column table_name, :updated_at
remove_column table_name, :created_at
end
def update_table_definition(table_name, base) #:nodoc:
Table.new(table_name, base)
end
def add_index_options(table_name, column_name, comment: nil, **options) # :nodoc:
column_names = index_column_names(column_name)
options.assert_valid_keys(:unique, :order, :name, :where, :length, :internal, :using, :algorithm, :type, :opclass)
index_type = options[:type].to_s if options.key?(:type)
index_type ||= options[:unique] ? "UNIQUE" : ""
index_name = options[:name].to_s if options.key?(:name)
index_name ||= index_name(table_name, column_names)
if options.key?(:algorithm)
algorithm = index_algorithms.fetch(options[:algorithm]) {
raise ArgumentError.new("Algorithm must be one of the following: #{index_algorithms.keys.map(&:inspect).join(', ')}")
}
end
using = "USING #{options[:using]}" if options[:using].present?
if supports_partial_index?
index_options = options[:where] ? " WHERE #{options[:where]}" : ""
end
validate_index_length!(table_name, index_name, options.fetch(:internal, false))
if data_source_exists?(table_name) && index_name_exists?(table_name, index_name)
raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' already exists"
end
index_columns = quoted_columns_for_index(column_names, options).join(", ")
[index_name, index_type, index_columns, index_options, algorithm, using, comment]
end
def options_include_default?(options)
options.include?(:default) && !(options[:null] == false && options[:default].nil?)
end
# Changes the comment for a table or removes it if +nil+.
def change_table_comment(table_name, comment)
raise NotImplementedError, "#{self.class} does not support changing table comments"
end
# Changes the comment for a column or removes it if +nil+.
def change_column_comment(table_name, column_name, comment)
raise NotImplementedError, "#{self.class} does not support changing column comments"
end
def create_schema_dumper(options) # :nodoc:
SchemaDumper.create(self, options)
end
private
def column_options_keys
[:limit, :precision, :scale, :default, :null, :collation, :comment]
end
def add_index_sort_order(quoted_columns, **options)
orders = options_for_index_columns(options[:order])
quoted_columns.each do |name, column|
column << " #{orders[name].upcase}" if orders[name].present?
end
end
def options_for_index_columns(options)
if options.is_a?(Hash)
options.symbolize_keys
else
Hash.new { |hash, column| hash[column] = options }
end
end
# Overridden by the MySQL adapter for supporting index lengths and by
# the PostgreSQL adapter for supporting operator classes.
def add_options_for_index_columns(quoted_columns, **options)
if supports_index_sort_order?
quoted_columns = add_index_sort_order(quoted_columns, options)
end
quoted_columns
end
def quoted_columns_for_index(column_names, **options)
return [column_names] if column_names.is_a?(String)
quoted_columns = Hash[column_names.map { |name| [name.to_sym, quote_column_name(name).dup] }]
add_options_for_index_columns(quoted_columns, options).values
end
def index_name_for_remove(table_name, options = {})
return options[:name] if can_remove_index_by_name?(options)
checks = []
if options.is_a?(Hash)
checks << lambda { |i| i.name == options[:name].to_s } if options.key?(:name)
column_names = index_column_names(options[:column])
else
column_names = index_column_names(options)
end
if column_names.present?
checks << lambda { |i| index_name(table_name, i.columns) == index_name(table_name, column_names) }
end
raise ArgumentError, "No name or columns specified" if checks.none?
matching_indexes = indexes(table_name).select { |i| checks.all? { |check| check[i] } }
if matching_indexes.count > 1
raise ArgumentError, "Multiple indexes found on #{table_name} columns #{column_names}. " \
"Specify an index name from #{matching_indexes.map(&:name).join(', ')}"
elsif matching_indexes.none?
raise ArgumentError, "No indexes found on #{table_name} with the options provided."
else
matching_indexes.first.name
end
end
def rename_table_indexes(table_name, new_name)
indexes(new_name).each do |index|
generated_index_name = index_name(table_name, column: index.columns)
if generated_index_name == index.name
rename_index new_name, generated_index_name, index_name(new_name, column: index.columns)
end
end
end
def rename_column_indexes(table_name, column_name, new_column_name)
column_name, new_column_name = column_name.to_s, new_column_name.to_s
indexes(table_name).each do |index|
next unless index.columns.include?(new_column_name)
old_columns = index.columns.dup
old_columns[old_columns.index(new_column_name)] = column_name
generated_index_name = index_name(table_name, column: old_columns)
if generated_index_name == index.name
rename_index table_name, generated_index_name, index_name(table_name, column: index.columns)
end
end
end
def schema_creation
SchemaCreation.new(self)
end
def create_table_definition(*args)
TableDefinition.new(*args)
end
def create_alter_table(name)
AlterTable.new create_table_definition(name)
end
def fetch_type_metadata(sql_type)
cast_type = lookup_cast_type(sql_type)
SqlTypeMetadata.new(
sql_type: sql_type,
type: cast_type.type,
limit: cast_type.limit,
precision: cast_type.precision,
scale: cast_type.scale,
)
end
def index_column_names(column_names)
if column_names.is_a?(String) && /\W/.match?(column_names)
column_names
else
Array(column_names)
end
end
def index_name_options(column_names)
if column_names.is_a?(String) && /\W/.match?(column_names)
column_names = column_names.scan(/\w+/).join("_")
end
{ column: column_names }
end
def foreign_key_name(table_name, options)
options.fetch(:name) do
identifier = "#{table_name}_#{options.fetch(:column)}_fk"
hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
"fk_rails_#{hashed_identifier}"
end
end
def foreign_key_for(from_table, options_or_to_table = {})
return unless supports_foreign_keys?
foreign_keys(from_table).detect { |fk| fk.defined_for? options_or_to_table }
end
def foreign_key_for!(from_table, options_or_to_table = {})
foreign_key_for(from_table, options_or_to_table) || \
raise(ArgumentError, "Table '#{from_table}' has no foreign key for #{options_or_to_table}")
end
def extract_foreign_key_action(specifier)
case specifier
when "CASCADE"; :cascade
when "SET NULL"; :nullify
when "RESTRICT"; :restrict
end
end
def validate_index_length!(table_name, new_name, internal = false)
max_index_length = internal ? index_name_length : allowed_index_name_length
if new_name.length > max_index_length
raise ArgumentError, "Index name '#{new_name}' on table '#{table_name}' is too long; the limit is #{allowed_index_name_length} characters"
end
end
def extract_new_default_value(default_or_changes)
if default_or_changes.is_a?(Hash) && default_or_changes.has_key?(:from) && default_or_changes.has_key?(:to)
default_or_changes[:to]
else
default_or_changes
end
end
def can_remove_index_by_name?(options)
options.is_a?(Hash) && options.key?(:name) && options.except(:name, :algorithm).empty?
end
def add_column_for_alter(table_name, column_name, type, options = {})
td = create_table_definition(table_name)
cd = td.new_column_definition(column_name, type, options)
schema_creation.accept(AddColumnDefinition.new(cd))
end
def remove_column_for_alter(table_name, column_name, type = nil, options = {})
"DROP COLUMN #{quote_column_name(column_name)}"
end
def remove_columns_for_alter(table_name, *column_names)
column_names.map { |column_name| remove_column_for_alter(table_name, column_name) }
end
def insert_versions_sql(versions)
sm_table = quote_table_name(ActiveRecord::SchemaMigration.table_name)
if versions.is_a?(Array)
sql = "INSERT INTO #{sm_table} (version) VALUES\n".dup
sql << versions.map { |v| "(#{quote(v)})" }.join(",\n")
sql << ";\n\n"
sql
else
"INSERT INTO #{sm_table} (version) VALUES (#{quote(versions)});"
end
end
def data_source_sql(name = nil, type: nil)
raise NotImplementedError
end
def quoted_scope(name = nil, type: nil)
raise NotImplementedError
end
end
end
end
| 38.577666 | 155 | 0.615423 |
916e85acb1ffb46d59f142b94fe2ecc52e919a4c | 3,233 | require 'rails_helper'
RSpec.describe PrisonerDetailsController, type: :controller do
render_views
let(:prisoner_hash) do
{
prisoner: {
first_name: 'Jimmy',
last_name: 'Harris',
date_of_birth: {
day: '20',
month: '04',
year: '1986'
},
number: 'g3133ff',
prison_name: 'Rochester'
}
}
end
it_behaves_like "a browser without a session present"
it_behaves_like "a session timed out"
context "always" do
it "creates a new session" do
controller.new_session.tap do |visit|
expect(visit.visit_id).to be_a String
expect(visit.visit_id.size).to eq(32)
expect(visit.prisoner).to be_a Prisoner
expect(visit.visitors).to eq([])
expect(visit.slots).to eq([])
end
end
end
context "when cookies are disabled" do
it "redirects the user to a page telling them that they won't be able to use the site" do
get :edit
expect(response).to be_success
post :update, prisoner_hash
expect(response).to redirect_to(cookies_disabled_path)
end
end
context "cookies are enabled" do
before :each do
cookies['cookies-enabled'] = 1
allow(request).to receive(:ssl?).and_return(true)
end
it "renders the form for entering prisoner details, and assigns the session" do
expect(SecureRandom).to receive(:hex).and_return(visit_id = 'LOL' * 10)
expect(controller).to receive(:logstasher_add_visit_id).with(visit_id)
expect {
get :edit
expect(response).to be_success
}.to change { session[:visit] }
end
it "sets the 'cookies-enabled' cookie" do
allow(controller).to receive(:service_domain).and_return('lol.biz.info')
get :edit
expect(response).to be_success
response['Set-Cookie'].tap do |c|
expect(c).to match(/secure/i)
expect(c).to match(/httponly/i)
expect(c).to match(/domain=lol.biz.info/i)
end
end
context "given valid prisoner details" do
before :each do
get :edit
end
it "updates prisoner details and redirects to the email flow" do
post :update, prisoner_hash
expect(response).to redirect_to(edit_visitors_details_path)
end
it "updates prisoner details with bad date and redirects back" do
bad_prisoner_hash = prisoner_hash.dup
bad_prisoner_hash[:prisoner].except!(:date_of_birth)
post :update, bad_prisoner_hash
expect(response).to redirect_to(edit_prisoner_details_path)
end
it "doesn't accept the year as having two digits" do
prisoner_hash[:prisoner][:date_of_birth] = {
day: '5', month: '2', year: '12'
}
post :update, prisoner_hash
expect(response).to redirect_to(edit_prisoner_details_path)
end
context "whitespace trimming" do
it "removes whitespace from strings" do
post :update, { prisoner: { first_name: ' Jimmy ', last_name: ' Harris ' } }
expect(controller.visit.prisoner.first_name).to eq('Jimmy')
expect(controller.visit.prisoner.last_name).to eq('Harris')
end
end
end
end
end
| 29.126126 | 93 | 0.637488 |
6251272e4e58d149967cef056e06df69cfbd2d80 | 1,240 | class Tcptraceroute < Formula
desc "Traceroute implementation using TCP packets"
homepage "https://github.com/mct/tcptraceroute"
url "https://github.com/mct/tcptraceroute/archive/tcptraceroute-1.5beta7.tar.gz"
version "1.5beta7"
sha256 "57fd2e444935bc5be8682c302994ba218a7c738c3a6cae00593a866cd85be8e7"
bottle do
cellar :any
sha256 "c2d7b7b4d4274897669112375be7873f7387b729c66023ae81a5cb5a518786d5" => :sierra
sha256 "e45c866a01dd651b307b0f83798adbd2f53b9fa1ca4be3b0e830adcf3df67e66" => :el_capitan
sha256 "e44ef687b867ae96dbce19cdc305eb8561b076758690b300ea97552092de578e" => :yosemite
sha256 "1df9e820ccefadd97512902853c9849dfe29598b361be548202a7e32a77a3b35" => :mavericks
end
depends_on "libnet"
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-libnet=#{HOMEBREW_PREFIX}",
"--mandir=#{man}"
system "make", "install"
end
def caveats; <<-EOS.undent
tcptraceroute requires root privileges so you will need to run
`sudo tcptraceroute`.
You should be certain that you trust any software you grant root privileges.
EOS
end
end
| 37.575758 | 92 | 0.720161 |
1c1d19b30e3641be9d862a9865c1297bf7c2d0f1 | 208 | require "pathname"
require "erb"
module Dklet
class << self
def lib_path
Pathname(__dir__)
end
end
end
require "dklet/version"
require 'dklet/util'
require 'dklet/dsl'
require 'dklet/cli'
| 13 | 23 | 0.697115 |
032a6d5d4a70e7966691a94dcb0dba92f426bae6 | 1,895 | class Treefrog < Formula
desc "High-speed C++ MVC Framework for Web Application"
homepage "http://www.treefrogframework.org/"
url "https://github.com/treefrogframework/treefrog-framework/archive/v1.22.0.tar.gz"
sha256 "0b9d79d0e17266ff603c1ff812289e8d2500d8f758d3c700ccc3aaad51e3751d"
head "https://github.com/treefrogframework/treefrog-framework.git"
bottle do
sha256 "c556be38f614f516c43ccf411d1c6002349b4931b2e32015e162087231dc5649" => :mojave
sha256 "246236215fdb5e23dd57a58b53c2c4c41ea802468c614c8a7075c576e24644bc" => :high_sierra
sha256 "451ff0b20b9ca36b1eb6eaf6af9c13a56e74031d6c15586fa8920f1189d67262" => :sierra
sha256 "6ea31d04a301bee2b365c5affaea54286814718636386e2e0c9a2bfa9472c1f3" => :el_capitan
end
deprecated_option "with-qt5" => "with-qt"
option "with-mysql", "enable --with-mysql option for Qt build"
option "with-postgresql", "enable --with-postgresql option for Qt build"
option "with-qt", "build and link with QtGui module"
depends_on :macos => :el_capitan
depends_on :xcode => [:build, "8.0"]
qt_build_options = []
qt_build_options << "with-mysql" if build.with?("mysql")
qt_build_options << "with-postgresql" if build.with?("postgresql")
depends_on "qt" => qt_build_options
def install
args = ["--prefix=#{prefix}"]
args << "--enable-gui-mod" if build.with? "qt"
system "./configure", *args
cd "src" do
system "make"
system "make", "install"
end
cd "tools" do
system "make"
system "make", "install"
end
end
test do
system bin/"tspawn", "new", "hello"
assert_predicate testpath/"hello", :exist?
cd "hello" do
assert_predicate Pathname.pwd/"hello.pro", :exist?
system HOMEBREW_PREFIX/"opt/qt/bin/qmake"
assert_predicate Pathname.pwd/"Makefile", :exist?
system "make"
system bin/"treefrog", "-v"
end
end
end
| 32.672414 | 93 | 0.709763 |
08223013808c246d563535792406464420516f0b | 1,380 | # frozen_string_literal: true
module GraphQL
module StaticValidation
# Implement validate_node
class ArgumentsValidator
include GraphQL::StaticValidation::Message::MessageHelper
def validate(context)
visitor = context.visitor
visitor[GraphQL::Language::Nodes::Argument] << ->(node, parent) {
if parent.is_a?(GraphQL::Language::Nodes::InputObject)
arg_defn = context.argument_definition
if arg_defn.nil?
return
else
parent_defn = arg_defn.type.unwrap
if parent_defn.is_a?(GraphQL::ScalarType)
return
end
end
elsif parent.is_a?(GraphQL::Language::Nodes::Directive)
parent_defn = context.schema.directives[parent.name]
else
parent_defn = context.field_definition
end
validate_node(parent, node, parent_defn, context)
}
end
private
def parent_name(parent, type_defn)
field_name = if parent.is_a?(GraphQL::Language::Nodes::Field)
parent.alias || parent.name
elsif parent.is_a?(GraphQL::Language::Nodes::InputObject)
type_defn.name
else
parent.name
end
end
def node_type(parent)
parent.class.name.split("::").last
end
end
end
end
| 28.75 | 73 | 0.597101 |
b92bce0b88d5c0f9a2e73cf5581d6227d5419ec8 | 1,927 | class UserMailer < ApplicationMailer
default from: "Feedbin <#{ENV["FROM_ADDRESS"]}>", skip_premailer: true
include ApplicationHelper
helper ApplicationHelper
def payment_receipt(billing_event)
@billing_event = BillingEvent.find(billing_event)
@user = @billing_event.billable
mail to: @user.email, subject: "[Feedbin] Payment Receipt"
end
def payment_failed(billing_event)
@billing_event = BillingEvent.find(billing_event)
@user = @billing_event.billable
mail to: @user.email, subject: "[Feedbin] Please Update Your Billing Information"
end
def password_reset(user_id, reset_token)
@user = User.find(user_id)
@reset_token = reset_token
mail to: @user.email, subject: "[Feedbin] Password Reset"
end
def trial_expiration(user_id)
@user = User.find(user_id)
mail to: @user.email, subject: "[Feedbin] Your Trial is About to End"
end
def timed_plan_expiration(user_id)
@user = User.find(user_id)
mail to: @user.email, subject: "[Feedbin] Your Account has Expired"
end
def starred_export_download(user_id, download_link)
@user = User.find(user_id)
@download_link = download_link
mail to: @user.email, subject: "[Feedbin] Starred Items Export Complete"
end
def kindle(kindle_address, mobi_file)
attachments["kindle.mobi"] = File.read(mobi_file)
mail to: kindle_address, subject: "Kindle Content", body: ".", from: ENV["KINDLE_EMAIL"]
end
def mailtest(user_id)
@user = User.find(user_id)
mail to: @user.email, subject: "[Feedbin] Starred Items Export Complete", body: ""
end
def account_closed(user_id, opml)
@user = User.find(user_id)
attachments["subscriptions.xml"] = opml
mail to: @user.email, subject: "[Feedbin] Account Closed"
end
def twitter_connection_error(user_id)
@user = User.find(user_id)
mail to: @user.email, subject: "[Feedbin] Twitter Connection Error"
end
end
| 31.590164 | 92 | 0.714063 |
38c587284c2b6e972491e6b548659512dce9642f | 2,236 | require 'spree_core'
require 'spree_multi_tenant/engine'
require 'multitenant'
module SpreeMultiTenant
def self.tenanted_models
[
Spree::Activator,
Spree::Address,
Spree::Adjustment,
Spree::Asset,
Spree::Calculator,
Spree::Configuration,
Spree::Country,
Spree::CreditCard,
Spree::Gateway,
Spree::InventoryUnit,
Spree::LineItem,
Spree::LogEntry,
Spree::MailMethod,
Spree::OptionType,
Spree::OptionValue,
Spree::Order,
Spree::PaymentMethod,
Spree::Payment,
Spree::Preference,
Spree::ProductOptionType,
Spree::ProductProperty,
Spree::Product,
Spree::Property,
Spree::Prototype,
Spree::ReturnAuthorization,
Spree::Role,
Spree::Shipment,
Spree::ShippingCategory,
Spree::ShippingMethod,
Spree::StateChange,
Spree::State,
Spree::TaxCategory,
Spree::TaxRate,
Spree::Taxonomy,
Spree::Taxon,
Spree::TokenizedPermission,
Spree::Tracker,
# Spree::User,
Spree::Variant,
Spree::ZoneMember,
Spree::Zone,
# Spree::OptionTypesPrototype,
# Spree::OptionValuesVariant,
# Spree::PendingPromotion,
# Spree::ProductScope,
# Spree::ProductsPromotionRule,
# Spree::ProductsTaxon,
# Spree::PromotionRulesUser,
# Spree::PropertiesPrototype,
# Spree::RolesUser,
Spree::Promotion,
Spree::PromotionRule,
Spree::PromotionAction,
Spree::PromotionActionLineItem,
Spree::Promotion::Actions::CreateLineItems,
Spree::Promotion::Actions::CreateAdjustment,
Spree::Promotion::Rules::FirstOrder,
Spree::Promotion::Rules::ItemTotal,
Spree::Promotion::Rules::Product,
Spree::Promotion::Rules::User,
Spree::Promotion::Rules::UserLoggedIn,
]
end
def self.tenanted_controllers
[
Spree::BaseController,
# Spree::UserPasswordsController,
# Spree::UserSessionsController,
# Spree::UserRegistrationsController
]
end
def self.with_tenant(tenant, &block)
Multitenant.with_tenant tenant do
SpreeMultiTenant.init_preferences
yield
end
end
end
| 24.304348 | 50 | 0.639088 |
e2dba0286fa4396c6c83cfca85f0deeb5d884543 | 924 | require 'rails_helper'
describe CompanyRegistrationDataCorrection do
it_behaves_like 'a basic data correction presenter', :company_registration, '654321', nil, { legal_form_id: LegalForm::PLC.id }
describe "validations" do
subject(:data_correction) { FactoryGirl.build(:company_registration_data_correction, loan: loan) }
LegalForm.all.each do |legal_form|
context "when legal_form is #{legal_form.name}" do
let(:loan) { FactoryGirl.build(:loan, legal_form_id: legal_form.id) }
it "#{legal_form.requires_company_registration ? 'requires' : 'does not require' } company registration number" do
data_correction.company_registration = nil
expect(data_correction.valid?).to eql(!legal_form.requires_company_registration)
data_correction.company_registration = "B1234567890"
expect(data_correction).to be_valid
end
end
end
end
end
| 40.173913 | 129 | 0.729437 |
1ceed2d40c27c59c8811028bc5c51b1780461449 | 18,270 | # frozen_string_literal: true
require "spec_helper"
require_relative './copyable_spec_models'
describe Mongoid::Copyable do
[ :clone, :dup ].each do |method|
describe "##{method}" do
let(:person) do
Person.new(
title: "Sir",
version: 4,
created_at: Time.now,
updated_at: Time.now,
desc: "description"
) do |p|
p.owner_id = 5
end
end
let!(:address) do
person.addresses.build(street: "Bond", name: "Bond")
end
let!(:name) do
person.build_name(first_name: "Judy")
end
let!(:posts) do
person.posts.build(title: "testing")
end
let!(:game) do
person.build_game(name: "Tron")
end
let!(:name_translations) do
person.name.translations.build(language: 'en')
end
context "when the document has an id field in the database" do
let!(:band) do
Band.create(name: "Tool")
end
before do
Band.collection.find(_id: band.id).update_one("$set" => { "id" => 1234 })
end
let!(:cloned) do
band.reload.send(method)
end
it "does not set the id field as the _id" do
expect(cloned.id).to_not eq(1234)
end
end
context "when a document has fields from a legacy schema" do
shared_examples 'behaves as expected' do
let!(:instance) do
cls.create(name: "test")
end
before do
legacy_fields = { "this_is_not_a_field" => 1, "this_legacy_field_is_nil" => nil }
cls.collection.find(_id: instance.id).update_one("$set" => legacy_fields)
end
let(:cloned) do
instance.reload.send(method)
end
it "sets the legacy attribute" do
expect(cloned.attributes['this_is_not_a_field']).to eq(1)
end
it "contains legacy attributes that are nil" do
expect(cloned.attributes.key?('this_legacy_field_is_nil')).to eq(true)
end
it "copies the known attributes" do
expect(cloned.name).to eq('test')
end
it 'calls constructor with explicitly declared attributes only' do
expect(cls).to receive(:new).with('name' => 'test').and_call_original
cloned
end
end
context 'without Attributes::Dynamic' do
let(:cls) { CopyableSpec::Reg }
before do
cls.should_not include(Mongoid::Attributes::Dynamic)
end
include_examples 'behaves as expected'
end
context 'with Attributes::Dynamic' do
let(:cls) { CopyableSpec::Dyn }
before do
cls.should include(Mongoid::Attributes::Dynamic)
end
include_examples 'behaves as expected'
end
end
context "when using store_as" do
context "and dynamic attributes are not set" do
context 'embeds_one' do
it "clones" do
t = StoreAsDupTest1.new(:name => "hi")
t.build_store_as_dup_test2(:name => "there")
t.save
copy = t.send(method)
expect(copy.object_id).not_to eq(t.object_id)
expect(copy.store_as_dup_test2.name).to eq(t.store_as_dup_test2.name)
end
end
context 'embeds_many' do
it "clones" do
t = StoreAsDupTest3.new(:name => "hi")
t.store_as_dup_test4s << StoreAsDupTest4.new
t.save
copy = t.send(method)
expect(copy.object_id).not_to eq(t.object_id)
expect(copy.store_as_dup_test4s).not_to be_empty
expect(copy.store_as_dup_test4s.first.object_id).not_to eq(t.store_as_dup_test4s.first.object_id)
end
end
end
end
context 'nested embeds_many' do
it 'works' do
a = CopyableSpec::A.new
a.locations << CopyableSpec::Location.new
a.locations.first.buildings << CopyableSpec::Building.new
a.save!
new_a = a.send(method)
expect(new_a.locations.length).to be 1
expect(new_a.locations.first.buildings.length).to be 1
end
end
context "when cloning a document with multiple languages field" do
before do
I18n.enforce_available_locales = false
I18n.locale = 'pt_BR'
person.desc = "descrição"
person.addresses.first.name = "descrição"
person.save
end
after do
I18n.locale = :en
end
let!(:from_db) do
Person.find(person.id)
end
let(:copy) do
from_db.send(method)
end
it "sets the pt_BR version" do
I18n.locale = 'pt_BR'
expect(copy.desc).to eq("descrição")
end
it "sets the english version" do
I18n.locale = :en
expect(copy.desc).to eq("description")
end
it "sets to nil an nonexistent lang" do
I18n.locale = :fr
expect(copy.desc).to be_nil
end
it 'sets embedded translations' do
I18n.locale = 'pt_BR'
expect(copy.addresses.first.name).to eq("descrição")
end
it 'sets embedded english version' do
I18n.locale = :en
expect(copy.addresses.first.name).to eq("Bond")
end
end
context "when cloning a document with polymorphic embedded documents with multiple language field" do
let!(:shipment_address) do
person.addresses.build({ shipping_name: "Title" }, ShipmentAddress)
end
before do
I18n.enforce_available_locales = false
I18n.locale = 'pt_BR'
person.addresses.type(ShipmentAddress).each { |address| address.shipping_name = "Título" }
person.save!
end
after do
I18n.locale = :en
end
let!(:from_db) do
Person.find(person.id)
end
let(:copy) do
from_db.send(method)
end
it 'sets embedded translations' do
I18n.locale = 'pt_BR'
copy.addresses.type(ShipmentAddress).each do |address|
expect(address.shipping_name).to eq("Título")
end
end
end
context "when cloning a loaded document" do
before do
person.save
end
let!(:from_db) do
Person.find(person.id)
end
let(:copy) do
from_db.send(method)
end
it "marks the fields as dirty" do
expect(copy.changes["age"]).to eq([ nil, 100 ])
end
it "flags the document as changed" do
expect(copy).to be_changed
end
it "copies protected fields" do
expect(copy.owner_id).to eq(5)
end
end
context "when the document is new" do
context "when there are changes" do
let(:copy) do
person.send(method)
end
before do
person[:versions] = [ { number: 1 } ]
end
it "returns a new document" do
expect(copy).to_not be_persisted
end
it "has an id" do
expect(copy.id).to_not be_nil
end
it "flags the document as changed" do
expect(copy).to be_changed
end
it "marks fields as dirty" do
expect(copy.changes["age"]).to eq([ nil, 100 ])
end
it "has a different id from the original" do
expect(copy.id).to_not eq(person.id)
end
it "returns a new instance" do
expect(copy).to_not be_eql(person)
end
it "copys embeds many documents" do
expect(copy.addresses).to eq(person.addresses)
end
it "copys deep embeds many documents" do
expect(copy.name.translations).to eq(person.name.translations)
end
it "sets the embedded many documents as new" do
expect(copy.addresses.first).to be_new_record
end
it "sets the deep embedded many documents as new" do
expect(copy.name.translations.first).to be_new_record
end
it "creates new embeds many instances" do
expect(copy.addresses).to_not equal(person.addresses)
end
it "creates new deep embeds many instances" do
expect(copy.name.translations).to_not equal(person.name.translations)
end
it "copys embeds one documents" do
expect(copy.name).to eq(person.name)
end
it "flags the embeds one documents as new" do
expect(copy.name).to be_new_record
end
it "creates a new embeds one instance" do
expect(copy.name).to_not equal(person.name)
end
it "does not copy referenced many documents" do
expect(copy.posts).to be_empty
end
it "does not copy references one documents" do
expect(copy.game).to be_nil
end
it "copies localized fields" do
expect(copy.desc).to eq("description")
end
context "when saving the copy" do
let(:reloaded) do
copy.reload
end
before do
copy.save(validate: false)
end
it "persists the attributes" do
expect(reloaded.title).to eq("Sir")
end
it "persists the embeds many relation" do
expect(reloaded.addresses).to eq(person.addresses)
end
it "persists the embeds one relation" do
expect(reloaded.name).to eq(person.name)
end
end
end
context "when using a custom discriminator_key" do
before do
Person.discriminator_key = "dkey"
end
after do
Person.discriminator_key = nil
end
let(:copy) do
person.send(method)
end
before do
person[:versions] = [ { number: 1 } ]
end
it "copys embeds many documents" do
expect(copy.addresses).to eq(person.addresses)
end
it "copys deep embeds many documents" do
expect(copy.name.translations).to eq(person.name.translations)
end
it "sets the embedded many documents as new" do
expect(copy.addresses.first).to be_new_record
end
it "sets the deep embedded many documents as new" do
expect(copy.name.translations.first).to be_new_record
end
it "creates new embeds many instances" do
expect(copy.addresses).to_not equal(person.addresses)
end
it "creates new deep embeds many instances" do
expect(copy.name.translations).to_not equal(person.name.translations)
end
it "copys embeds one documents" do
expect(copy.name).to eq(person.name)
end
it "flags the embeds one documents as new" do
expect(copy.name).to be_new_record
end
it "creates a new embeds one instance" do
expect(copy.name).to_not equal(person.name)
end
context "when saving the copy" do
let(:reloaded) do
copy.reload
end
before do
copy.save(validate: false)
end
it "persists the attributes" do
expect(reloaded.title).to eq("Sir")
end
it "persists the embeds many relation" do
expect(reloaded.addresses).to eq(person.addresses)
end
it "persists the embeds one relation" do
expect(reloaded.name).to eq(person.name)
end
end
end
end
context "when the document is not new" do
before do
person.new_record = false
end
context "when there are changes" do
let(:copy) do
person.send(method)
end
before do
person[:versions] = [ { number: 1 } ]
end
it "flags the document as changed" do
expect(copy).to be_changed
end
it "marks fields as dirty" do
expect(copy.changes["age"]).to eq([ nil, 100 ])
end
it "returns a new document" do
expect(copy).to_not be_persisted
end
it "has an id" do
expect(copy.id).to_not be_nil
end
it "has a different id from the original" do
expect(copy.id).to_not eq(person.id)
end
it "returns a new instance" do
expect(copy).to_not be_eql(person)
end
it "copys embeds many documents" do
expect(copy.addresses).to eq(person.addresses)
end
it "creates new embeds many instances" do
expect(copy.addresses).to_not equal(person.addresses)
end
it "copys embeds one documents" do
expect(copy.name).to eq(person.name)
end
it "creates a new embeds one instance" do
expect(copy.name).to_not equal(person.name)
end
it "does not copy referenced many documents" do
expect(copy.posts).to be_empty
end
it "does not copy references one documents" do
expect(copy.game).to be_nil
end
context "when saving the copy" do
let(:reloaded) do
copy.reload
end
before do
copy.save(validate: false)
end
it "persists the attributes" do
expect(reloaded.title).to eq("Sir")
end
it "persists the embeds many relation" do
expect(reloaded.addresses).to eq(person.addresses)
end
it "persists the embeds one relation" do
expect(reloaded.name).to eq(person.name)
end
end
end
end
context "when the document is frozen" do
let!(:copy) do
person.freeze.send(method)
end
it "returns a new document" do
expect(copy).to_not be_persisted
end
it "has an id" do
expect(copy.id).to_not be_nil
end
it "has a different id from the original" do
expect(copy.id).to_not eq(person.id)
end
it "returns a new instance" do
expect(copy).to_not be_eql(person)
end
it "copys embeds many documents" do
expect(copy.addresses).to eq(person.addresses)
end
it "creates new embeds many instances" do
expect(copy.addresses).to_not equal(person.addresses)
end
it "copys embeds one documents" do
expect(copy.name).to eq(person.name)
end
it "creates a new embeds one instance" do
expect(copy.name).to_not equal(person.name)
end
it "does not copy referenced many documents" do
expect(copy.posts).to be_empty
end
it "does not copy references one documents" do
expect(copy.game).to be_nil
end
it "keeps the original attributes frozen" do
expect(person.attributes).to be_frozen
end
context "when saving the copy" do
let(:reloaded) do
copy.reload
end
before do
copy.save(validate: false)
end
it "persists the attributes" do
expect(reloaded.title).to eq("Sir")
end
it "persists the embeds many relation" do
expect(reloaded.addresses).to eq(person.addresses)
end
it "persists the embeds one relation" do
expect(reloaded.name).to eq(person.name)
end
end
end
context "when cloning a document with an embedded child class and a custom discriminator value" do
before do
ShipmentAddress.discriminator_value = "dvalue"
end
after do
ShipmentAddress.discriminator_value = nil
end
let!(:shipment_address) do
person.addresses.build({}, ShipmentAddress)
end
before do
person.save
end
let!(:from_db) do
Person.find(person.id)
end
let(:copy) do
from_db.send(method)
end
it "copys embeds many documents" do
expect(copy.addresses).to eq(person.addresses)
end
end
context 'when cloning a document with embedded child that uses inheritance' do
let(:original) do
CopyableSpec::A.new(influencers: [child_cls.new])
end
let(:copy) do
original.send(method)
end
context 'embedded child is root of hierarchy' do
let(:child_cls) do
CopyableSpec::Influencer
end
before do
# When embedded class is the root in hierarchy, their
# discriminator value is not explicitly stored.
child_cls.discriminator_mapping[child_cls.name].should be nil
end
it 'works' do
copy.class.should be original.class
copy.object_id.should_not == original.object_id
end
end
context 'embedded child is leaf of hierarchy' do
let(:child_cls) do
CopyableSpec::Youtuber
end
before do
# When embedded class is a leaf in hierarchy, their
# discriminator value is explicitly stored.
child_cls.discriminator_mapping[child_cls.name].should_not be nil
end
it 'works' do
copy.class.should be original.class
copy.object_id.should_not == original.object_id
end
end
end
end
end
end
| 26.062767 | 111 | 0.548823 |
d5b2528ab8801e1d51d49f6fd5653e03bf0f8790 | 1,369 | require 'ebay/types/item'
require 'ebay/types/member_message'
require 'ebay/types/message_media'
module Ebay # :nodoc:
module Types # :nodoc:
# == Attributes
# object_node :item, 'Item', :class => Item, :optional => true
# object_node :question, 'Question', :class => MemberMessage, :optional => true
# value_array_node :responses, 'Response', :default_value => []
# text_node :message_status, 'MessageStatus', :optional => true
# time_node :creation_date, 'CreationDate', :optional => true
# time_node :last_modified_date, 'LastModifiedDate', :optional => true
# array_node :message_media, 'MessageMedia', :class => MessageMedia, :default_value => []
class MemberMessageExchange
include XML::Mapping
include Initializer
root_element_name 'MemberMessageExchange'
object_node :item, 'Item', :class => Item, :optional => true
object_node :question, 'Question', :class => MemberMessage, :optional => true
value_array_node :responses, 'Response', :default_value => []
text_node :message_status, 'MessageStatus', :optional => true
time_node :creation_date, 'CreationDate', :optional => true
time_node :last_modified_date, 'LastModifiedDate', :optional => true
array_node :message_media, 'MessageMedia', :class => MessageMedia, :default_value => []
end
end
end
| 44.16129 | 94 | 0.688093 |
5d364999ba292454f9479d1ed6735e2859b1d4d4 | 1,010 | require 'sinatra'
require 'open-uri'
require 'digest/sha1'
require 'haml'
def heroku_url
"https://#{params[:app]}.herokuapp.com/"
end
def badge(status_code)
case status_code
when '200' then 'heroku-up-brightgreen'
when '503' then 'heroku-down-red'
else
'heroku-unknown-orange'
end
rescue => e
'heroku-unknown-orange'
end
def badge_url(badge)
"https://img.shields.io/badge/#{badge}.svg"
end
def redirect_to_badge(query)
begin
status_code = open(heroku_url).status.first
rescue OpenURI::HTTPError => error
response = error.io
status_code = response.status.first
end
redirect badge_url(badge status_code) + query
end
get '/' do
haml :index
end
get '/badge/:app' do
app = params[:app]
"#{request.base_url}/#{app}"
end
get '/:app' do
response.headers['Cache-Control'] = 'no-cache'
response.headers['Last-Modified'] = Time.now.httpdate
response.headers['ETag'] = Time.now.utc.strftime("%s%L")
redirect_to_badge request.env['rack.request.query_string']
end
| 19.803922 | 60 | 0.70495 |
011e38d4293492e5d92edbd03290a0699ee8ddbf | 3,708 | module CustomValidators
class Emails
# please refer to : http://stackoverflow.com/questions/703060/valid-email-address-regular-expression
def self.email_validator
/\A(|(([A-Za-z0-9]+_+)|([A-Za-z0-9]+\-+)|([A-Za-z0-9]+\.+)|([A-Za-z0-9]+\++))*[A-Za-z0-9]+@((\w+\-+)|(\w+\.))*\w{1,63}\.[a-zA-Z]{2,15})\z/i
end
end
class Numbers
def self.phone_number_validator
#/\A\d?(?:(?:[\+]?(?:[\d]{1,3}(?:[ ]+|[\-.])))?[(]?(?:[\d]{3})[\-)]?(?:[ ]+)?)?(?:[a-zA-Z2-9][a-zA-Z0-9 \-.]{6,})(?:(?:[ ]+|[xX]|(i:ext[\.]?)){1,2}(?:[\d]{1,5}))?\z/
/(?:\+?|\b)[0-9]{10}\b/
end
def self.us_and_canda_zipcode_validator
/(\A\d{5}(-\d{4})?\z)|(\A[ABCEGHJKLMNPRSTVXYabceghjklmnprstvxy]{1}\d{1}[A-Za-z]{1} *\d{1}[A-Za-z]{1}\d{1}\z)/
end
def self.usps_tracking_number_validator
/\b(91\d\d ?\d\d\d\d ?\d\d\d\d ?\d\d\d\d ?\d\d\d\d ?\d\d|91\d\d ?\d\d\d\d ?\d\d\d\d ?\d\d\d\d ?\d\d\d\d)\b/i
end
def self.fedex_tracking_number_validator
/\A([0-9]{15}|4[0-9]{11})\z/
end
def self.ups_tracking_number_validator
#/\b(1Z ?[0-9A-Z]{3} ?[0-9A-Z]{3} ?[0-9A-Z]{2} ?[0-9A-Z]{4} ?[0-9A-Z]{3} ?[0-9A-Z]|[\dT]\d\d\d ?\d\d\d\d ?\d\d\d)\b/i
/\A(1Z\s*\d{3}\s*\d{3}\s*\d{2}\s*\d{4}\s*\d{3}\s*\d|[0-35-9]\d{3}\s*\d{4}\s*\d{4}|T\d{3}\s*\d{4}\s*\d{3})\z/
end
end
class Names
def self.name_validator
#/([a-zA-Z-’'` ].+)/ \A and \z
#/^([a-z])+([\\']|[']|[\.]|[\s]|[-]|)+([a-z]|[\.])+$/i
#/^([a-z]|[\\']|[']|[\.]|[\s]|[-]|)+([a-z]|[\.])+$/i
/\A([[:alpha:]]|[\\']|[']|[\.]|[\s]|[-]|)+([[:alpha:]]|[\.])+\z/i
end
end
def validates_ssn(*attr_names)
attr_names.each do |attr_name|
validates_format_of attr_name,
:with => /\A[\d]{3}-[\d]{2}-[\d]{4}\z/,
:message => "must be of format ###-##-####"
end
end
=begin
# CREDIT CARDS
# * Visa: ^4[0-9]{12}(?:[0-9]{3})?$ All Visa card numbers start with a 4. New cards have 16 digits. Old cards have 13.
# * MasterCard: ^5[1-5][0-9]{14}$ All MasterCard numbers start with the numbers 51 through 55. All have 16 digits.
# * American Express: ^3[47][0-9]{13}$ American Express card numbers start with 34 or 37 and have 15 digits.
# * Diners Club: ^3(?:0[0-5]|[68][0-9])[0-9]{11}$ Diners Club card numbers begin with 300 through 305, 36 or 38. All have 14 digits. There are Diners Club cards that begin with 5 and have 16 digits. These are a joint venture between Diners Club and MasterCard, and should be processed like a MasterCard.
# * Discover: ^6(?:011|5[0-9]{2})[0-9]{12}$ Discover card numbers begin with 6011 or 65. All have 16 digits.
# * JCB: ^(?:2131|1800|35\d{3})\d{11}$ JCB cards beginning with 2131 or 1800 have 15 digits. JCB cards beginning with 35 have 16 digits.
=end
class CreditCards
def self.validate_card(type)
case type
when 'VISA' || 'visa'
self.visa_validator
when 'MC' || 'MasterCard'
self.mastercard_validator
when 'AMEX' || 'AmericanExpress'
self.american_express_validator
when 'Diners' || 'DinersClub'
self.diners_club_validator
when 'Discover' || 'DiscoverCard'
self.discover_validator
when 'JCB'
self.jcb_validator
end
end
def self.visa_validator
/^4[0-9]{12}(?:[0-9]{3})?$/
end
def self.mastercard_validator
/^5[1-5][0-9]{14}$/
end
def self.american_express_validator
/^3[47][0-9]{13}$/
end
def self.diners_club_validator
/^3(?:0[0-5]|[68][0-9])[0-9]{11}$/
end
def self.discover_validator
/^6(?:011|5[0-9]{2})[0-9]{12}$/
end
def self.jcb_validator
/^(?:2131|1800|35\d{3})\d{11}$/
end
end
end
| 38.226804 | 305 | 0.540992 |
28ff7ea5e61e13b1e3ed40e769132cd18cc9e0c4 | 1,669 | class Docker2aci < Formula
desc "Library and CLI tool to convert Docker images to ACIs"
homepage "https://github.com/appc/docker2aci"
url "https://github.com/appc/docker2aci/archive/v0.17.2.tar.gz"
sha256 "43cb18a3647ca8bae48a283fa3359e9555ab7a366c7ee9ef8a561797cebe2593"
license "Apache-2.0"
bottle do
sha256 cellar: :any_skip_relocation, catalina: "88e274de48f853b78bf647cf5860f74cb99ef4ca3a99c9cbb8500318e20952bc"
sha256 cellar: :any_skip_relocation, mojave: "38c55da3d7dae54ac615b1ef70d3b793ace880a3df8324c94586cbdcb0069a47"
sha256 cellar: :any_skip_relocation, high_sierra: "786e30d746607eea372c8eaa2705f850320dd74e28385fd3b75946e6e8c8e52d"
sha256 cellar: :any_skip_relocation, sierra: "6cfeb751ff7db4e703938e2bfc4e28d4ec9a30e59261e75aa5adf690d0f33061"
sha256 cellar: :any_skip_relocation, el_capitan: "b1a61fc4d329ef1e3ad97ea701e2c0be392f29e8d4a8bd2f1934bf7bac620121"
sha256 cellar: :any_skip_relocation, x86_64_linux: "cd729e36e463154b10f4198024ca0760669ebf198f12dc2d3b73162684ae2ddd" # linuxbrew-core
end
# See https://github.com/rkt/rkt/issues/4024
deprecate! date: "2020-02-24", because: :repo_archived
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/appc").mkpath
ln_s buildpath, buildpath/"src/github.com/appc/docker2aci"
system "go", "build", "-o", bin/"docker2aci", "-ldflags",
"-X github.com/appc/docker2aci/lib.Version=#{version}",
"github.com/appc/docker2aci"
end
test do
assert_match version.to_s, shell_output("#{bin}/docker2aci -version")
system "#{bin}/docker2aci", "docker://busybox"
end
end
| 46.361111 | 138 | 0.769323 |
7ac3d8a3f459a80971ef428a27536f31680600dc | 462 | class Balloonfs < Formula
desc "balloonfs (FUSE) for OS X"
homepage "https://github.com/gyselroth/balloon-client-fuse"
url "https://github.com/gyselroth/tubee-client-cli/releases/download/v1.0.0-alpha5/balloonfs-osx-x64"
sha256 "adedb4efe8affb669da567138f4faa342df3d759587faa1777d90018e17bc9f5"
version "v1.0.0-alpha5"
bottle :unneeded
def install
bin.install "balloonfs-osx-x64"
mv bin/"balloonfs-osx-x64", bin/"mount.balloon"
end
end
| 30.8 | 103 | 0.757576 |
4a40d5b51abdbd76d17d46e68c2e45b4c8aa149c | 1,190 | class Docker2aci < Formula
desc "Library and CLI tool to convert Docker images to ACIs"
homepage "https://github.com/appc/docker2aci"
url "https://github.com/appc/docker2aci/archive/v0.17.2.tar.gz"
sha256 "43cb18a3647ca8bae48a283fa3359e9555ab7a366c7ee9ef8a561797cebe2593"
bottle do
cellar :any_skip_relocation
# sha256 "38c55da3d7dae54ac615b1ef70d3b793ace880a3df8324c94586cbdcb0069a47" => :mojave
sha256 "786e30d746607eea372c8eaa2705f850320dd74e28385fd3b75946e6e8c8e52d" => :high_sierra
sha256 "6cfeb751ff7db4e703938e2bfc4e28d4ec9a30e59261e75aa5adf690d0f33061" => :sierra
sha256 "b1a61fc4d329ef1e3ad97ea701e2c0be392f29e8d4a8bd2f1934bf7bac620121" => :el_capitan
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/appc").mkpath
ln_s buildpath, buildpath/"src/github.com/appc/docker2aci"
system "go", "build", "-o", bin/"docker2aci", "-ldflags",
"-X github.com/appc/docker2aci/lib.Version=#{version}",
"github.com/appc/docker2aci"
end
test do
assert_match version.to_s, shell_output("#{bin}/docker2aci -version")
system "#{bin}/docker2aci", "docker://busybox"
end
end
| 38.387097 | 93 | 0.753782 |
39d5c33b685985f58b6a779c082e33cd599220f8 | 310 | require 'overcommit/hook/shared/yarn_install'
module Overcommit::Hook::PostMerge
# Runs `yarn install` when a change is detected in the repository's
# dependencies.
#
# @see {Overcommit::Hook::Shared::YarnInstall}
class YarnInstall < Base
include Overcommit::Hook::Shared::YarnInstall
end
end
| 25.833333 | 69 | 0.741935 |
ffa814dbbe8feb2e8eb0ccb548b694f769c5935c | 7,617 | require 'rails/generators'
require 'highline/import'
require 'bundler'
require 'bundler/cli'
require 'active_support/core_ext/string/indent'
module Spree
class InstallGenerator < Rails::Generators::Base
class_option :migrate, type: :boolean, default: true, banner: 'Run Spree migrations'
class_option :seed, type: :boolean, default: true, banner: 'load seed data (migrations must be run)'
class_option :sample, type: :boolean, default: true, banner: 'load sample data (migrations must be run)'
class_option :auto_accept, type: :boolean
class_option :user_class, type: :string
class_option :admin_email, type: :string
class_option :admin_password, type: :string
class_option :lib_name, type: :string, default: 'spree'
class_option :enforce_available_locales, type: :boolean, default: nil
def self.source_paths
paths = self.superclass.source_paths
paths << File.expand_path('../templates', "../../#{__FILE__}")
paths << File.expand_path('../templates', "../#{__FILE__}")
paths << File.expand_path('../templates', __FILE__)
paths.flatten
end
def prepare_options
@run_migrations = options[:migrate]
@load_seed_data = options[:seed]
@load_sample_data = options[:sample]
unless @run_migrations
@load_seed_data = false
@load_sample_data = false
end
end
def add_files
template 'config/initializers/spree.rb', 'config/initializers/spree.rb'
end
def additional_tweaks
return unless File.exists? 'public/robots.txt'
append_file "public/robots.txt", <<-ROBOTS.strip_heredoc
User-agent: *
Disallow: /checkout
Disallow: /cart
Disallow: /orders
Disallow: /user
Disallow: /account
Disallow: /api
Disallow: /password
ROBOTS
end
def setup_assets
@lib_name = 'spree'
%w{javascripts stylesheets images}.each do |path|
empty_directory "vendor/assets/#{path}/spree/frontend" if defined? Spree::Frontend || Rails.env.test?
empty_directory "vendor/assets/#{path}/spree/backend" if defined? Spree::Backend || Rails.env.test?
end
if defined? Spree::Frontend || Rails.env.test?
template "vendor/assets/javascripts/spree/frontend/all.js"
template "vendor/assets/stylesheets/spree/frontend/all.css"
end
if defined? Spree::Backend || Rails.env.test?
template "vendor/assets/javascripts/spree/backend/all.js"
template "vendor/assets/stylesheets/spree/backend/all.css"
end
end
def create_overrides_directory
empty_directory "app/overrides"
end
def configure_application
application <<-APP.strip_heredoc.indent!(4)
config.to_prepare do
# Load application's model / class decorators
Dir.glob(File.join(File.dirname(__FILE__), "../app/**/*_decorator*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
# Load application's view overrides
Dir.glob(File.join(File.dirname(__FILE__), "../app/overrides/*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
end
APP
if !options[:enforce_available_locales].nil?
application <<-APP.strip_heredoc.indent!(4)
# Prevent this deprecation message: https://github.com/svenfuchs/i18n/commit/3b6e56e
I18n.enforce_available_locales = #{options[:enforce_available_locales]}
APP
end
end
def include_seed_data
append_file "db/seeds.rb", <<-SEEDS.strip_heredoc
Spree::Core::Engine.load_seed if defined?(Spree::Core)
Spree::Auth::Engine.load_seed if defined?(Spree::Auth)
SEEDS
end
def install_migrations
say_status :copying, "migrations"
silence_stream(STDOUT) do
silence_warnings { rake 'railties:install:migrations' }
end
end
def create_database
say_status :creating, "database"
silence_stream(STDOUT) do
silence_stream(STDERR) do
silence_warnings { rake 'db:create' }
end
end
end
def run_migrations
if @run_migrations
say_status :running, "migrations"
silence_stream(STDOUT) do
silence_stream(STDERR) do
silence_warnings { rake 'db:migrate' }
end
end
else
say_status :skipping, "migrations (don't forget to run rake db:migrate)"
end
end
def populate_seed_data
if @load_seed_data
say_status :loading, "seed data"
rake_options=[]
rake_options << "AUTO_ACCEPT=1" if options[:auto_accept]
rake_options << "ADMIN_EMAIL=#{options[:admin_email]}" if options[:admin_email]
rake_options << "ADMIN_PASSWORD=#{options[:admin_password]}" if options[:admin_password]
cmd = lambda { rake("db:seed #{rake_options.join(' ')}") }
if options[:auto_accept] || (options[:admin_email] && options[:admin_password])
silence_stream(STDOUT) do
silence_stream(STDERR) do
silence_warnings &cmd
end
end
else
cmd.call
end
else
say_status :skipping, "seed data (you can always run rake db:seed)"
end
end
def load_sample_data
if @load_sample_data
say_status :loading, "sample data"
silence_stream(STDOUT) do
silence_stream(STDERR) do
silence_warnings { rake 'spree_sample:load' }
end
end
else
say_status :skipping, "sample data (you can always run rake spree_sample:load)"
end
end
def notify_about_routes
insert_into_file(File.join('config', 'routes.rb'),
after: "Rails.application.routes.draw do\n") do
<<-ROUTES.strip_heredoc.indent!(2)
# This line mounts Spree's routes at the root of your application.
# This means, any requests to URLs such as /products, will go to
# Spree::ProductsController.
# If you would like to change where this engine is mounted, simply change the
# :at option to something different.
#
# We ask that you don't use the :as option here, as Spree relies on it being
# the default of "spree".
mount Spree::Core::Engine, at: '/'
ROUTES
end
unless options[:quiet]
puts "*" * 50
puts "We added the following line to your application's config/routes.rb file:"
puts " "
puts " mount Spree::Core::Engine, at: '/'"
end
end
def complete
unless options[:quiet]
puts "*" * 50
puts "Spree has been installed successfully. You're all ready to go!"
puts " "
puts "Enjoy!"
end
end
protected
def javascript_exists?(script)
extensions = %w(.js.coffee .js.erb .js.coffee.erb .js)
file_exists?(extensions, script)
end
def stylesheet_exists?(stylesheet)
extensions = %w(.css.scss .css.erb .css.scss.erb .css)
file_exists?(extensions, stylesheet)
end
def file_exists?(extensions, filename)
extensions.detect do |extension|
File.exists?("#{filename}#{extension}")
end
end
private
def silence_stream(stream)
old_stream = stream.dup
stream.reopen(RbConfig::CONFIG['host_os'] =~ /mswin|mingw/ ? 'NUL:' : '/dev/null')
stream.sync = true
yield
ensure
stream.reopen(old_stream)
old_stream.close
end
end
end
| 31.870293 | 109 | 0.63122 |
4a2d2d29d1c59b07daf4b9061e8825f71d6d84e1 | 3,050 | require 'formula'
# Documentation: https://github.com/mxcl/homebrew/wiki/Formula-Cookbook
# PLEASE REMOVE ALL GENERATED COMMENTS BEFORE SUBMITTING YOUR PULL REQUEST!
class Clippoly < Formula
homepage 'http://clippoly.sourceforge.net/'
url 'http://downloads.sourceforge.net/project/clippoly/clippoly/clippoly-pl11/clippoly-pl11.tar.gz'
version 'pl11'
sha256 '675d8dcf1c37e707aa3064c9a8d840964a0775502500a9d2c080f6924d3c1824'
# def patches
# DATA
# end
#patch :DATA
#patch :p0, :DATA
# depends_on 'cmake' => :build
#depends_on :x11 # if your formula requires any X11/XQuartz components
def install
# ENV.j1 # if your formula's build system can't parallelize
system "make lib" # if this fails, try separate make/make install steps
lib.install 'libPolyClip.a'
def includeSubdir
include/'libPolyClip'
end
includeSubdir.install Dir['*.h']
end
def test
# This test will fail and we won't accept that! It's enough to just replace
# "false" with the main program this formula installs, but it'd be nice if you
# were more thorough. Run the test with `brew test clippoly`.
system "false"
end
end
__END__
diff -rupN clippoly-pl11/Makefile clippoly-pl11/Makefile
--- clippoly-pl11/Makefile 2005-02-28 21:12:04.000000000 +0000
+++ clippoly-pl11/Makefile 2012-09-07 03:40:41.000000000 +0100
@@ -1,5 +1,6 @@
#CCC = g++ -B/usr/local/lib/gcc-lib/sparc-sun-solaris2.3/rotd/
-CCC = g++ -fno-implicit-templates
+##CCC = g++ -fno-implicit-templates
+CCC = g++
#CCC = CC
# You can use gcc as a C compiler if cc doesn't work
CC = gcc
@@ -30,6 +31,11 @@ test: prog
prog < in_file > out_file
diff -b out_file.dist out_file
+lib: nclip.o primitives.o posadder.o poly.o poly_io.o templates.o err.o graphadd.o graphmat.o graphmat++.o
+ @echo Creating Library..
+ ar rc libPolyClip.a nclip.o primitives.o posadder.o poly.o poly_io.o templates.o err.o graphadd.o graphmat.o graphmat++.o
+
+
clean:
rm -f *.o prog core out_file
diff -rupN clippoly-pl11/graphmat.h clippoly-pl11/graphmat.h
--- clippoly-pl11/graphmat.h 2005-02-28 21:12:05.000000000 +0000
+++ clippoly-pl11/graphmat.h 2012-09-07 03:54:13.000000000 +0100
@@ -68,7 +68,7 @@
#include <stdio.h>
#endif
#ifndef __malloc_h
-#include <malloc.h>
+#include <sys/malloc.h>
#endif
#ifndef __math_h
#include <math.h>
diff -rupN clippoly-pl11/nclip.cc clippoly-pl11/nclip.cc
--- clippoly-pl11/nclip.cc 2005-02-28 17:21:12.000000000 +0000
+++ clippoly-pl11/nclip.cc 2012-09-07 03:49:33.000000000 +0100
@@ -47,7 +47,7 @@ static const char rcs_id[] = "$Header: /
#include <graphadd.h>
#include <err.h>
-#include <malloc.h>
+#include <sys/malloc.h>
#include <poly.h>
#include <primitives.h>
diff -rupN clippoly-pl11/poly.h clippoly-pl11/poly.h
--- clippoly-pl11/poly.h 2005-02-28 21:12:05.000000000 +0000
+++ clippoly-pl11/poly.h 2012-09-07 03:53:49.000000000 +0100
@@ -66,6 +66,8 @@ enum EdgeState { Unknown, None, Shared,
// enum LogicStates;
class Vec;
+class Poly;
+
class PolyNode
{
friend class Poly;
| 29.326923 | 123 | 0.707869 |
791e42e644fd9c48002bc93739fd8996063997e7 | 9,002 | # frozen_string_literal: true
require 'base64'
require 'saml/url_service'
require 'saml/responses/login'
require 'saml/responses/logout'
module V1
class SessionsController < ApplicationController
REDIRECT_URLS = %w[signup mhv dslogon idme mfa verify slo ssoe_slo].freeze
STATSD_SSO_NEW_KEY = 'api.auth.new'
STATSD_SSO_CALLBACK_KEY = 'api.auth.saml_callback'
STATSD_SSO_CALLBACK_TOTAL_KEY = 'api.auth.login_callback.total'
STATSD_SSO_CALLBACK_FAILED_KEY = 'api.auth.login_callback.failed'
STATSD_LOGIN_NEW_USER_KEY = 'api.auth.new_user'
STATSD_LOGIN_STATUS = 'api.auth.login'
STATSD_LOGIN_SHARED_COOKIE = 'api.auth.sso_shared_cookie'
# Collection Action: auth is required for certain types of requests
# @type is set automatically by the routes in config/routes.rb
# For more details see SAML::SettingsService and SAML::URLService
def new
type = params[:type]
raise Common::Exceptions::RoutingError, params[:path] unless REDIRECT_URLS.include?(type)
StatsD.increment(STATSD_SSO_NEW_KEY,
tags: ["context:#{type}", "forceauthn:#{force_authn?}"])
url = url_service.send("#{type}_url")
if %w[slo ssoe_slo].include?(type)
Rails.logger.info("LOGOUT of type #{type}", sso_logging_info)
reset_session
end
# clientId must be added at the end or the URL will be invalid for users using various "Do not track"
# extensions with their browser.
redirect_to params[:client_id].present? ? url + "&clientId=#{params[:client_id]}" : url
end
def ssoe_slo_callback
redirect_to url_service.logout_redirect_url
end
def saml_logout_callback
saml_response = SAML::Responses::Logout.new(params[:SAMLResponse], saml_settings, raw_get_params: params)
Raven.extra_context(in_response_to: saml_response.try(:in_response_to) || 'ERROR')
if saml_response.valid?
user_logout(saml_response)
else
log_error(saml_response)
Rails.logger.info("SLO callback response invalid for originating_request_id '#{originating_request_id}'")
end
rescue => e
log_exception_to_sentry(e, {}, {}, :error)
ensure
redirect_to url_service.logout_redirect_url
end
def saml_callback
saml_response = SAML::Responses::Login.new(params[:SAMLResponse], settings: saml_settings)
if saml_response.valid?
user_login(saml_response)
else
log_error(saml_response)
redirect_to url_service.login_redirect_url(auth: 'fail', code: auth_error_code(saml_response.error_code))
callback_stats(:failure, saml_response, saml_response.error_instrumentation_code)
end
rescue => e
log_exception_to_sentry(e, {}, {}, :error)
redirect_to url_service.login_redirect_url(auth: 'fail', code: '007') unless performed?
callback_stats(:failed_unknown)
ensure
callback_stats(:total)
end
def metadata
meta = OneLogin::RubySaml::Metadata.new
render xml: meta.generate(saml_settings), content_type: 'application/xml'
end
private
def force_authn?
params[:force]&.downcase == 'true'
end
def saml_settings(options = {})
# add a forceAuthn value to the saml settings based on the initial options or
# the "force" value in the query params
options[:force_authn] ||= force_authn?
SAML::SSOeSettingsService.saml_settings(options)
end
def auth_error_code(code)
if code == '005' && validate_session
UserSessionForm::ERRORS[:saml_replay_valid_session][:code]
else
code
end
end
def authenticate
return unless action_name == 'new'
if %w[mfa verify slo].include?(params[:type])
super
else
reset_session
end
end
def log_error(saml_response)
log_message_to_sentry(saml_response.errors_message,
saml_response.errors_hash[:level],
saml_error_context: saml_response.errors_context)
end
def user_login(saml_response)
user_session_form = UserSessionForm.new(saml_response)
if user_session_form.valid?
@current_user, @session_object = user_session_form.persist
set_cookies
after_login_actions
redirect_to url_service.login_redirect_url
if location.start_with?(url_service.base_redirect_url)
# only record success stats if the user is being redirect to the site
# some users will need to be up-leveled and this will be redirected
# back to the identity provider
login_stats(:success, saml_response)
end
else
log_message_to_sentry(
user_session_form.errors_message, user_session_form.errors_hash[:level], user_session_form.errors_context
)
redirect_to url_service.login_redirect_url(auth: 'fail', code: user_session_form.error_code)
login_stats(:failure, saml_response, user_session_form)
end
end
def user_logout(saml_response)
logout_request = SingleLogoutRequest.find(saml_response&.in_response_to)
if logout_request.present?
logout_request.destroy
Rails.logger.info("SLO callback response to '#{saml_response&.in_response_to}' for originating_request_id "\
"'#{originating_request_id}'")
else
Rails.logger.info('SLO callback response could not resolve logout request for originating_request_id '\
"'#{originating_request_id}'")
end
end
def login_stats(status, saml_response, user_session_form = nil)
case status
when :success
StatsD.increment(STATSD_LOGIN_NEW_USER_KEY) if request_type == 'signup'
# track users who have a shared sso cookie
StatsD.increment(STATSD_LOGIN_SHARED_COOKIE,
tags: ["loa:#{@current_user.loa[:current]}",
"idp:#{@current_user.identity.sign_in[:service_name]}"])
StatsD.increment(STATSD_LOGIN_STATUS,
tags: ['status:success',
"idp:#{@current_user.identity.sign_in[:service_name]}",
"context:#{saml_response.authn_context}"])
callback_stats(:success, saml_response)
when :failure
StatsD.increment(STATSD_LOGIN_STATUS,
tags: ['status:failure',
"idp:#{params[:type]}",
"context:#{saml_response.authn_context}",
"error:#{user_session_form.error_instrumentation_code}"])
callback_stats(:failure, saml_response, user_session_form.error_instrumentation_code)
end
end
def callback_stats(status, saml_response = nil, failure_tag = nil)
case status
when :success
StatsD.increment(STATSD_SSO_CALLBACK_KEY,
tags: ['status:success', "context:#{saml_response.authn_context}"])
# track users who have a shared sso cookie
when :failure
StatsD.increment(STATSD_SSO_CALLBACK_KEY,
tags: ['status:failure', "context:#{saml_response.authn_context}"])
StatsD.increment(STATSD_SSO_CALLBACK_FAILED_KEY, tags: [failure_tag])
when :failed_unknown
StatsD.increment(STATSD_SSO_CALLBACK_KEY,
tags: ['status:failure', 'context:unknown'])
StatsD.increment(STATSD_SSO_CALLBACK_FAILED_KEY, tags: ['error:unknown'])
when :total
StatsD.increment(STATSD_SSO_CALLBACK_TOTAL_KEY)
end
end
def set_cookies
Rails.logger.info('SSO: LOGIN', sso_logging_info)
set_api_cookie!
set_sso_cookie!
end
def after_login_actions
AfterLoginJob.perform_async('user_uuid' => @current_user&.uuid)
log_persisted_session_and_warnings
end
def log_persisted_session_and_warnings
obscure_token = Session.obscure_token(@session_object.token)
Rails.logger.info("Logged in user with id #{@session_object.uuid}, token #{obscure_token}")
# We want to log when SSNs do not match between MVI and SAML Identity. And might take future
# action if this appears to be happening frquently.
if current_user.ssn_mismatch?
additional_context = StringHelpers.heuristics(current_user.identity.ssn, current_user.va_profile.ssn)
log_message_to_sentry('SSNS DO NOT MATCH!!', :warn, identity_compared_with_mvi: additional_context)
end
end
def originating_request_id
JSON.parse(params[:RelayState] || '{}')['originating_request_id']
rescue
'UNKNOWN'
end
def request_type
JSON.parse(params[:RelayState] || '{}')['type']
rescue
'UNKNOWN'
end
def url_service
SAML::URLService.new(saml_settings, session: @session_object, user: current_user,
params: params, loa3_context: LOA::IDME_LOA3)
end
end
end
| 38.306383 | 116 | 0.670629 |
013fbcc5e2efa7649ff46708869d96fd1329d82b | 1,465 | # -*- encoding: utf-8 -*-
# stub: jekyll-commonmark-ghpages 0.1.6 ruby lib
Gem::Specification.new do |s|
s.name = "jekyll-commonmark-ghpages".freeze
s.version = "0.1.6"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Ashe Connor".freeze]
s.date = "2019-01-21"
s.email = "[email protected]".freeze
s.homepage = "https://github.com/github/jekyll-commonmark-ghpages".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "3.2.7".freeze
s.summary = "CommonMark generator for Jekyll".freeze
s.installed_by_version = "3.2.7" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<jekyll-commonmark>.freeze, ["~> 1.2"])
s.add_runtime_dependency(%q<commonmarker>.freeze, ["~> 0.17.6"])
s.add_runtime_dependency(%q<rouge>.freeze, [">= 2.0", "< 4.0"])
s.add_development_dependency(%q<rspec>.freeze, ["~> 3.0"])
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
else
s.add_dependency(%q<jekyll-commonmark>.freeze, ["~> 1.2"])
s.add_dependency(%q<commonmarker>.freeze, ["~> 0.17.6"])
s.add_dependency(%q<rouge>.freeze, [">= 2.0", "< 4.0"])
s.add_dependency(%q<rspec>.freeze, ["~> 3.0"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
end
end
| 38.552632 | 112 | 0.672355 |
1d1e4c105e5b63e3b064d261f5eda02255458de5 | 260 | module Hedgehog
module BinaryInPathFinder
class Ruby
def call(binary)
binary = Regexp.escape(binary)
Hedgehog::Environment::Path
.binaries
.find { |path| path.match(/\/#{binary}$/) }
end
end
end
end
| 18.571429 | 53 | 0.576923 |
e2cc4b7e4e69dc1c18278a7017dc5aa0a5770175 | 2,338 | require_relative '../../../test_helper'
module Troo
describe List do
let(:described_class) { List }
let(:described_instance) { Fabricate.build(:list) }
subject { described_instance }
context 'attributes' do
it 'should have a name attribute' do
subject.name.must_equal 'My Test List'
end
it 'should have a position attribute' do
subject.position.must_equal 32_768
end
it 'should have a default attribute' do
subject.default.must_equal false
end
it 'should have a closed attribute' do
subject.closed.must_equal false
end
it 'should have a external_board_id attribute' do
subject.external_board_id.must_equal '20010'
end
it 'should have an external_id attribute' do
subject.external_id.must_equal '20040'
end
end
describe '.remote' do
subject { described_class.remote }
it 'returns the remote class for this model' do
subject.must_equal(Remote::List)
end
end
describe '.type' do
subject { described_class.type }
it { subject.must_be_instance_of(Symbol) }
it 'returns the type of model' do
subject.must_equal(:list)
end
end
context 'associations' do
before do
@board = Fabricate(:board)
@card = Fabricate(:card)
end
after { database_cleanup }
it 'belongs to a board' do
subject.board.must_equal @board
end
it 'can have many cards' do
subject.cards.size.must_equal 1
end
end
describe '#decorator' do
let(:options) { {} }
subject { described_class.new.decorator(options) }
it 'returns a new instance of the decorator for this model' do
subject.must_be_instance_of(Decorators::Resource)
end
end
describe '#presenter' do
let(:options) { {} }
subject { described_class.new.presenter(options) }
it 'returns a new instance of the presenter for this model' do
subject.must_be_instance_of(Presenters::List)
end
end
describe '#type' do
subject { described_class.new.type }
it { subject.must_be_instance_of(Symbol) }
it 'returns the type of the model instance' do
subject.must_equal(:list)
end
end
end
end
| 22.921569 | 68 | 0.631737 |
384a02c43bfe8ed7b7b60544957888c4a624ee8c | 2,674 | # A bullet MUST be a duck-typed renderable.
# SimpleCircleBullet is an example of a sprite bullet, while
# SimpleBoxBullet is an example of a solid bullet.
class AbstractBullet
attr_accessor :damage
# @return [GeoGeo::Shape2D]
attr_accessor :collider
# @return [Integral]
attr_accessor :x, :y
# @return [nil]
# @param [Array<Object>] arguments
def initialize(*arguments)
@collider = GeoGeo::Shape2D.new(0,0,0,0)
end
def <=> o
@collider.bottom <=> o.collider.top
end
# @return [Integer]
# @param [AbstractBullet] other
def compare_bottom(other)
@collider.bottom <=> other.collider.bottom
end
# @return [Integer]
# @param [AbstractBullet] other
def compare_left(other)
@collider.left <=> other.collider.left
end
# @return [nil]
def move
end
end
class SimpleCircleBullet < AbstractBullet
# @return [nil]
# @param [Integral] x
# @param [Integral] y
# @param [Integral] rad
# @param [Integral] vx
# @param [Integral] vy
# @param [Integral] r
# @param [Integral] g
# @param [Integral] b
# @param [Integral] damage
# @param [Boolean] show_trajectory
def initialize(x, y, rad, vx, vy, r, g, b, damage = 1, show_trajectory=true)
@x = x
@y = y
@rad = rad
@vx = vx
@vy = vy
@r = r
@g = g
@b = b
@collider = GeoGeo::Circle.new(x + rad, y + rad, rad)
@damage = damage
end
# @return [nil]
def move
@x += @vx
@y += @vy
@collider.shift(@vx, @vy)
end
# @return [nil]
# @param [FFI::Draw] ffi_draw
def draw_override(ffi_draw)
ffi_draw.draw_sprite_3(@x - @rad, @y - @rad, 2 * @rad, 2 * @rad, "sprites/rad_3_bullet.png", nil, nil, @r, @g, @b,
nil, nil, nil, nil, nil, nil,
nil, nil, nil, nil, nil, nil)
end
# @return [Symbol]
def primitive_marker
:sprite
end
end
class SimpleBoxBullet < AbstractBullet
attr_accessor :x, :y, :w, :h, :vx, :vy, :r, :g, :b, :a, :collider
# @return [nil]
# @param [Integral] x
# @param [Integral] y
# @param [Integral] w
# @param [Integral] h
# @param [Integral] vx
# @param [Integral] vy
# @param [Integral] damage
def initialize(x, y, w, h, vx, vy, damage = 1)
@damage = damage
@x = x
@y = y
@w = w
@h = h
@vx = vx
@vy = vy
@collider = GeoGeo::Box.new_drgtk(x, y, w, h)
end
# @return [nil]
def move
@x += @vx
@y += @vy
@collider.shift(@vx, @vy)
end
# @return [nil]
# @param [FFI::Draw] ffi_draw
def draw_override(ffi_draw)
ffi_draw.draw_solid(@x, @y, @w, @h, 255, 0, 0, 255)
end
# @return [Symbol]
def primitive_marker
:sprite
end
end | 21.564516 | 118 | 0.588257 |
ff9c5cebfd015f8e817461d3d1933940f21af71e | 5,641 | require 'time'
require 'date'
require 'active_merchant/billing/expiry_date'
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
# == Description
# This credit card object can be used as a stand alone object. It acts just like an ActiveRecord object
# but doesn't support the .save method as its not backed by a database.
#
# For testing purposes, use the 'bogus' credit card type. This card skips the vast majority of
# validations. This allows you to focus on your core concerns until you're ready to be more concerned
# with the details of particular creditcards or your gateway.
#
# == Testing With CreditCard
# Often when testing we don't care about the particulars of a given card type. When using the 'test'
# mode in your Gateway, there are six different valid card numbers: 1, 2, 3, 'success', 'fail',
# and 'error'.
#
#--
# For details, see CreditCardMethods#valid_number?
#++
#
# == Example Usage
# cc = CreditCard.new(
# :first_name => 'Steve',
# :last_name => 'Smith',
# :month => '9',
# :year => '2010',
# :type => 'visa',
# :number => '4242424242424242'
# )
#
# cc.valid? # => true
# cc.display_number # => XXXX-XXXX-XXXX-4242
#
# == Configuration
#
# Set ActiveMerchant::Billing::CreditCard.type_required = false if you dont want to send the credit card type
class CreditCard
include CreditCardMethods
include Validateable
## Configuration
cattr_accessor :type_required
## Attributes
cattr_accessor :require_verification_value
self.require_verification_value = true
# Essential attributes for a valid, non-bogus creditcards
attr_accessor :number, :month, :year, :type, :first_name, :last_name
# Required for Switch / Solo cards
attr_accessor :start_month, :start_year, :issue_number
# Optional verification_value (CVV, CVV2 etc). Gateways will try their best to
# run validation on the passed in value if it is supplied
attr_accessor :verification_value
# Provides proxy access to an expiry date object
def expiry_date
ExpiryDate.new(@month, @year)
end
def expiration
Date.new(@year.to_i, @month.to_i)
end
def expired?
expiry_date.expired?
end
def name?
first_name? && last_name?
end
def first_name?
!@first_name.blank?
end
def last_name?
!@last_name.blank?
end
def name
"#{@first_name} #{@last_name}"
end
def verification_value?
!@verification_value.blank?
end
# Show the card number, with all but last 4 numbers replace with "X". (XXXX-XXXX-XXXX-4338)
def display_number
self.class.mask(number)
end
def last_digits
self.class.last_digits(number)
end
def validate
validate_essential_attributes
# Bogus card is pretty much for testing purposes. Lets just skip these extra tests if its used
return if type == 'bogus'
validate_card_type
validate_card_number
validate_verification_value
validate_switch_or_solo_attributes
end
def self.requires_verification_value?
require_verification_value
end
private
def before_validate #:nodoc:
self.month = month.to_i
self.year = year.to_i
self.number = number.to_s.gsub(/[^\d]/, "")
self.type.downcase! if type.respond_to?(:downcase)
self.type = self.class.type?(number) if type.blank?
end
def validate_card_number #:nodoc:
errors.add :number, "is not a valid credit card number" unless CreditCard.valid_number?(number)
unless errors.on(:number) || errors.on(:type)
errors.add :type, "is not the correct card type" unless CreditCard.matching_type?(number, type)
end
end
def validate_card_type #:nodoc:
return true if @@type_required == false
errors.add :type, "is required" if type.blank?
errors.add :type, "is invalid" unless CreditCard.card_companies.keys.include?(type)
end
def validate_essential_attributes #:nodoc:
errors.add :first_name, "cannot be empty" if @first_name.blank?
errors.add :last_name, "cannot be empty" if @last_name.blank?
errors.add :month, "is not a valid month" unless valid_month?(@month)
errors.add :year, "expired" if expired?
errors.add :year, "is not a valid year" unless valid_expiry_year?(@year)
end
def validate_switch_or_solo_attributes #:nodoc:
if %w[switch solo].include?(type)
unless valid_month?(@start_month) && valid_start_year?(@start_year) || valid_issue_number?(@issue_number)
errors.add :start_month, "is invalid" unless valid_month?(@start_month)
errors.add :start_year, "is invalid" unless valid_start_year?(@start_year)
errors.add :issue_number, "cannot be empty" unless valid_issue_number?(@issue_number)
end
end
end
def validate_verification_value #:nodoc:
if CreditCard.requires_verification_value?
errors.add :verification_value, "is required" unless verification_value?
end
end
end
end
end
| 33.378698 | 115 | 0.616912 |
e84a3032361e5782990f40525dcdf23bfcada58b | 625 | Rails.application.routes.draw do
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
root to: 'pages#home'
resources :sessions, only: [:create]
resources :games
#USER ROUTES
get 'user/collections', to: 'users#collection'
# get 'user/collections/:id', to: 'users#show_collection'
delete '/user/collections/:id', to: 'users#destroy'
resources :users, only: [:create, :index, :destroy]
#SESSION ROUTES
get 'logged_in', to: 'sessions#logged_in'
delete 'logout', to: 'sessions#logout'
#GAME ROUTES
post '/games/:id', to: 'games#game_collection'
end
| 29.761905 | 102 | 0.7072 |
0125a69d0e2c82101ece5ce19a456e711f6149f9 | 13,069 | module Git
class Base
@working_directory = nil
@repository = nil
@index = nil
@lib = nil
@logger = nil
# opens a bare Git Repository - no working directory options
def self.bare(git_dir, opts = {})
default = {:repository => git_dir}
git_options = default.merge(opts)
self.new(git_options)
end
# opens a new Git Project from a working directory
# you can specify non-standard git_dir and index file in the options
def self.open(working_dir, opts={})
default = {:working_directory => working_dir}
git_options = default.merge(opts)
self.new(git_options)
end
# initializes a git repository
#
# options:
# :repository
# :index_file
#
def self.init(working_dir, opts = {})
default = {:working_directory => working_dir,
:repository => File.join(working_dir, '.git')}
git_options = default.merge(opts)
if git_options[:working_directory]
# if !working_dir, make it
FileUtils.mkdir_p(git_options[:working_directory]) if !File.directory?(git_options[:working_directory])
end
# run git_init there
Git::Lib.new(git_options).init
self.new(git_options)
end
# clones a git repository locally
#
# repository - http://repo.or.cz/w/sinatra.git
# name - sinatra
#
# options:
# :repository
#
# :bare
# or
# :working_directory
# :index_file
#
def self.clone(repository, name, opts = {})
# run git-clone
self.new(Git::Lib.new.clone(repository, name, opts))
end
def initialize(options = {})
if working_dir = options[:working_directory]
options[:repository] = File.join(working_dir, '.git') if !options[:repository]
options[:index] = File.join(working_dir, '.git', 'index') if !options[:index]
end
if options[:log]
@logger = options[:log]
@logger.info("Starting Git")
end
@working_directory = Git::WorkingDirectory.new(options[:working_directory]) if options[:working_directory]
@repository = Git::Repository.new(options[:repository]) if options[:repository]
@index = Git::Index.new(options[:index], false) if options[:index]
end
# returns a reference to the working directory
# @git.dir.path
# @git.dir.writeable?
def dir
@working_directory
end
# returns reference to the git repository directory
# @git.dir.path
def repo
@repository
end
# returns reference to the git index file
def index
@index
end
def set_working(work_dir, check = true)
@lib = nil
@working_directory = Git::WorkingDirectory.new(work_dir.to_s, check)
end
def set_index(index_file, check = true)
@lib = nil
@index = Git::Index.new(index_file.to_s, check)
end
# changes current working directory for a block
# to the git working directory
#
# example
# @git.chdir do
# # write files
# @git.add
# @git.commit('message')
# end
def chdir
Dir.chdir(dir.path) do
yield dir.path
end
end
# returns the repository size in bytes
def repo_size
size = 0
Dir.chdir(repo.path) do
(size, dot) = `du -s`.chomp.split
end
size.to_i
end
#g.config('user.name', 'Scott Chacon') # sets value
#g.config('user.email', '[email protected]') # sets value
#g.config('user.name') # returns 'Scott Chacon'
#g.config # returns whole config hash
def config(name = nil, value = nil)
if(name && value)
# set value
lib.config_set(name, value)
elsif (name)
# return value
lib.config_get(name)
else
# return hash
lib.config_list
end
end
# factory methods
# returns a Git::Object of the appropriate type
# you can also call @git.gtree('tree'), but that's
# just for readability. If you call @git.gtree('HEAD') it will
# still return a Git::Object::Commit object.
#
# @git.object calls a factory method that will run a rev-parse
# on the objectish and determine the type of the object and return
# an appropriate object for that type
def object(objectish)
Git::Object.new(self, objectish)
end
def gtree(objectish)
Git::Object.new(self, objectish, 'tree')
end
def gcommit(objectish)
Git::Object.new(self, objectish, 'commit')
end
def gblob(objectish)
Git::Object.new(self, objectish, 'blob')
end
# returns a Git::Log object with count commits
def log(count = 30)
Git::Log.new(self, count)
end
# returns a Git::Status object
def status
Git::Status.new(self)
end
# returns a Git::Branches object of all the Git::Branch objects for this repo
def branches
Git::Branches.new(self)
end
# returns a Git::Branch object for branch_name
def branch(branch_name = 'master')
Git::Branch.new(self, branch_name)
end
# returns +true+ if the branch exists locally
def is_local_branch?(branch)
branch_names = self.branches.local.map {|b| b.name}
branch_names.include?(branch)
end
# returns +true+ if the branch exists remotely
def is_remote_branch?(branch)
branch_names = self.branches.local.map {|b| b.name}
branch_names.include?(branch)
end
# returns +true+ if the branch exists
def is_branch?(branch)
branch_names = self.branches.map {|b| b.name}
branch_names.include?(branch)
end
# returns a Git::Remote object
def remote(remote_name = 'origin')
Git::Remote.new(self, remote_name)
end
# this is a convenience method for accessing the class that wraps all the
# actual 'git' forked system calls. At some point I hope to replace the Git::Lib
# class with one that uses native methods or libgit C bindings
def lib
@lib ||= Git::Lib.new(self, @logger)
end
# will run a grep for 'string' on the HEAD of the git repository
#
# to be more surgical in your grep, you can call grep() off a specific
# git object. for example:
#
# @git.object("v2.3").grep('TODO')
#
# in any case, it returns a hash of arrays of the type:
# hsh[tree-ish] = [[line_no, match], [line_no, match2]]
# hsh[tree-ish] = [[line_no, match], [line_no, match2]]
#
# so you might use it like this:
#
# @git.grep("TODO").each do |sha, arr|
# puts "in blob #{sha}:"
# arr.each do |match|
# puts "\t line #{match[0]}: '#{match[1]}'"
# end
# end
def grep(string)
self.object('HEAD').grep(string)
end
# returns a Git::Diff object
def diff(objectish = 'HEAD', obj2 = nil)
Git::Diff.new(self, objectish, obj2)
end
# adds files from the working directory to the git repository
def add(path = '.')
self.lib.add(path)
end
# removes file(s) from the git repository
def remove(path = '.', opts = {})
self.lib.remove(path, opts)
end
# resets the working directory to the provided commitish
def reset(commitish = nil, opts = {})
self.lib.reset(commitish, opts)
end
# resets the working directory to the commitish with '--hard'
def reset_hard(commitish = nil, opts = {})
opts = {:hard => true}.merge(opts)
self.lib.reset(commitish, opts)
end
# commits all pending changes in the index file to the git repository
#
# options:
# :add_all
# :allow_empty
# :author
def commit(message, opts = {})
self.lib.commit(message, opts)
end
# commits all pending changes in the index file to the git repository,
# but automatically adds all modified files without having to explicitly
# calling @git.add() on them.
def commit_all(message, opts = {})
opts = {:add_all => true}.merge(opts)
self.lib.commit(message, opts)
end
# checks out a branch as the new git working directory
def checkout(branch = 'master', opts = {})
self.lib.checkout(branch, opts)
end
# checks out an old version of a file
def checkout_file(version, file)
self.lib.checkout_file(version,file)
end
# fetches changes from a remote branch - this does not modify the working directory,
# it just gets the changes from the remote if there are any
def fetch(remote = 'origin')
self.lib.fetch(remote)
end
# pushes changes to a remote repository - easiest if this is a cloned repository,
# otherwise you may have to run something like this first to setup the push parameters:
#
# @git.config('remote.remote-name.push', 'refs/heads/master:refs/heads/master')
#
def push(remote = 'origin', branch = 'master')
self.lib.push(remote, branch)
end
# merges one or more branches into the current working branch
#
# you can specify more than one branch to merge by passing an array of branches
def merge(branch, message = 'merge')
self.lib.merge(branch, message)
end
# iterates over the files which are unmerged
#
# yields file, your_version, their_version
def each_conflict(&block)
self.lib.conflicts(&block)
end
# fetches a branch from a remote and merges it into the current working branch
def pull(remote = 'origin', branch = 'master', message = 'origin pull')
fetch(remote)
merge(branch, message)
end
# returns an array of Git:Remote objects
def remotes
self.lib.remotes.map { |r| Git::Remote.new(self, r) }
end
# adds a new remote to this repository
# url can be a git url or a Git::Base object if it's a local reference
#
# @git.add_remote('scotts_git', 'git://repo.or.cz/rubygit.git')
# @git.fetch('scotts_git')
# @git.merge('scotts_git/master')
#
def add_remote(name, url, opts = {})
if url.is_a?(Git::Base)
url = url.repo.path
end
self.lib.remote_add(name, url, opts)
Git::Remote.new(self, name)
end
# returns an array of all Git::Tag objects for this repository
def tags
self.lib.tags.map { |r| tag(r) }
end
# returns a Git::Tag object
def tag(tag_name)
Git::Object.new(self, tag_name, 'tag', true)
end
# creates a new git tag (Git::Tag)
def add_tag(tag_name)
self.lib.tag(tag_name)
tag(tag_name)
end
# creates an archive file of the given tree-ish
def archive(treeish, file = nil, opts = {})
self.object(treeish).archive(file, opts)
end
# repacks the repository
def repack
self.lib.repack
end
def gc
self.lib.gc
end
## LOWER LEVEL INDEX OPERATIONS ##
def with_index(new_index)
old_index = @index
set_index(new_index, false)
return_value = yield @index
set_index(old_index)
return_value
end
def with_temp_index &blk
tempfile = Tempfile.new('temp-index')
temp_path = tempfile.path
tempfile.unlink
with_index(temp_path, &blk)
end
def checkout_index(opts = {})
self.lib.checkout_index(opts)
end
def read_tree(treeish, opts = {})
self.lib.read_tree(treeish, opts)
end
def write_tree
self.lib.write_tree
end
def commit_tree(tree = nil, opts = {})
Git::Object::Commit.new(self, self.lib.commit_tree(tree, opts))
end
def write_and_commit_tree(opts = {})
tree = write_tree
commit_tree(tree, opts)
end
def update_ref(branch, commit)
branch(branch).update_ref(commit)
end
def ls_files
self.lib.ls_files
end
def with_working(work_dir)
return_value = false
old_working = @working_directory
set_working(work_dir)
Dir.chdir work_dir do
return_value = yield @working_directory
end
set_working(old_working)
return_value
end
def with_temp_working &blk
tempfile = Tempfile.new("temp-workdir")
temp_dir = tempfile.path
tempfile.unlink
Dir.mkdir(temp_dir, 0700)
with_working(temp_dir, &blk)
end
# runs git rev-parse to convert the objectish to a full sha
#
# @git.revparse("HEAD^^")
# @git.revparse('v2.4^{tree}')
# @git.revparse('v2.4:/doc/index.html')
#
def revparse(objectish)
self.lib.revparse(objectish)
end
def ls_tree(objectish)
self.lib.ls_tree(objectish)
end
def cat_file(objectish)
self.lib.object_contents(objectish)
end
# returns the name of the branch the working directory is currently on
def current_branch
self.lib.branch_current
end
end
end
| 26.780738 | 112 | 0.610605 |
e8e3b0041d423361f46502b6dfb6e98286105f2b | 3,073 | class Boost < Formula
desc "Collection of portable C++ source libraries"
homepage "https://www.boost.org/"
url "https://dl.bintray.com/boostorg/release/1.70.0/source/boost_1_70_0.tar.bz2"
sha256 "430ae8354789de4fd19ee52f3b1f739e1fba576f0aded0897c3c2bc00fb38778"
head "https://github.com/boostorg/boost.git"
bottle do
cellar :any
sha256 "c626b90770424ca969d0870d25d7fb13cf9d4f23a828407701face0e7ec4ac93" => :mojave
sha256 "8d5a7c95155faf57ce246d3455cea3628569d684a14fb9e621893ceaf3d65373" => :high_sierra
sha256 "0578344e152f306a4594b72493dcc3f638425b0fb7f4fcd23e5a523c4542b33a" => :sierra
end
depends_on "icu4c"
def install
# Force boost to compile with the desired compiler
open("user-config.jam", "a") do |file|
file.write "using darwin : : #{ENV.cxx} ;\n"
end
# libdir should be set by --prefix but isn't
icu4c_prefix = Formula["icu4c"].opt_prefix
bootstrap_args = %W[
--prefix=#{prefix}
--libdir=#{lib}
--with-icu=#{icu4c_prefix}
]
# Handle libraries that will not be built.
without_libraries = ["python", "mpi"]
# Boost.Log cannot be built using Apple GCC at the moment. Disabled
# on such systems.
without_libraries << "log" if ENV.compiler == :gcc
bootstrap_args << "--without-libraries=#{without_libraries.join(",")}"
# layout should be synchronized with boost-python and boost-mpi
#
# --no-cmake-config should be dropped if possible in next version
args = %W[
--prefix=#{prefix}
--libdir=#{lib}
-d2
-j#{ENV.make_jobs}
--layout=tagged-1.66
--user-config=user-config.jam
--no-cmake-config
-sNO_LZMA=1
-sNO_ZSTD=1
install
threading=multi,single
link=shared,static
]
# Boost is using "clang++ -x c" to select C compiler which breaks C++14
# handling using ENV.cxx14. Using "cxxflags" and "linkflags" still works.
args << "cxxflags=-std=c++14"
if ENV.compiler == :clang
args << "cxxflags=-stdlib=libc++" << "linkflags=-stdlib=libc++"
end
system "./bootstrap.sh", *bootstrap_args
system "./b2", "headers"
system "./b2", *args
end
def caveats
s = ""
# ENV.compiler doesn't exist in caveats. Check library availability
# instead.
if Dir["#{lib}/libboost_log*"].empty?
s += <<~EOS
Building of Boost.Log is disabled because it requires newer GCC or Clang.
EOS
end
s
end
test do
(testpath/"test.cpp").write <<~EOS
#include <boost/algorithm/string.hpp>
#include <string>
#include <vector>
#include <assert.h>
using namespace boost::algorithm;
using namespace std;
int main()
{
string str("a,b");
vector<string> strVec;
split(strVec, str, is_any_of(","));
assert(strVec.size()==2);
assert(strVec[0]=="a");
assert(strVec[1]=="b");
return 0;
}
EOS
system ENV.cxx, "test.cpp", "-std=c++14", "-stdlib=libc++", "-o", "test"
system "./test"
end
end
| 28.719626 | 93 | 0.632281 |
1d8e0b8b77e1741557749b02999e6c2867c5930e | 542 | class Acorn::AuthorizeUserService
prepend SimpleCommand
def initialize(token)
@token = token
end
def call
authorized?
end
private
attr_accessor :token
def authorized?
decoded_auth_token.present? || errors.add(:token, "Invalid token") && false
end
def decoded_auth_token
@decoded_auth_token ||= Acorn::JsonWebToken.decode(auth_token)
end
def auth_token
if token.present?
return token.split(" ").last
else
errors.add(:token, "No Token was provided")
end
nil
end
end
| 15.941176 | 79 | 0.678967 |
ac496cd3ce0f1ec72cb97544a3e8ed5ae52103ef | 2,749 | #
# Copyright 2012-2014 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "omnibus/s3_helpers"
module Omnibus
class S3Publisher < Publisher
include S3Helpers
def publish(&block)
log.info(log_key) { "Starting S3 publisher" }
packages.each do |package|
# Make sure the package is good to go!
log.debug(log_key) { "Validating '#{package.name}'" }
package.validate!
# Upload the metadata first
log.debug(log_key) { "Uploading '#{package.metadata.name}'" }
store_object(key_for(package, package.metadata.name), FFI_Yajl::Encoder.encode(package.metadata.to_hash, pretty: true),
nil, access_policy)
# Upload the actual package
log.info(log_key) { "Uploading '#{package.name}'" }
store_object(key_for(package, package.name), package.content,
package.metadata[:md5], access_policy)
# If a block was given, "yield" the package to the caller
yield(package) if block
end
end
private
def s3_configuration
{
region: @options[:region],
access_key_id: Config.publish_s3_access_key,
secret_access_key: Config.publish_s3_secret_key,
bucket_name: @options[:bucket],
}
end
#
# The unique upload key for this package. The additional "stuff" is
# postfixed to the end of the path.
#
# @param [Package] package
# the package this key is for
# @param [Array<String>] stuff
# the additional things to prepend
#
# @return [String]
#
def key_for(package, *stuff)
File.join(
package.metadata[:platform],
package.metadata[:platform_version],
package.metadata[:arch],
package.name,
*stuff
)
end
#
# The access policy that corresponds to the +s3_access+ given in the
# initializer option. Any access control that is not the strict string
# +"public"+ is assumed to be private.
#
# @return [String]
# the access policy
#
def access_policy
if @options[:acl].to_s == "public"
"public-read"
else
"private"
end
end
end
end
| 28.936842 | 127 | 0.636959 |
e212e8b44a71e2307cbf69b21de93b288a9953b4 | 690 | require 'formula'
class Predictionio < Formula
homepage 'http://prediction.io/'
url 'http://download.prediction.io/PredictionIO-0.9.0.tar.gz'
sha1 'e5724b637382a06c098bbb3617b35f5bcff1b469'
depends_on 'elasticsearch'
depends_on 'hadoop'
depends_on 'hbase'
depends_on 'apache-spark'
depends_on :java => "1.7"
def install
rm_f Dir["bin/*.bat"]
libexec.install Dir['*']
bin.write_exec_script libexec/"bin/pio"
inreplace libexec/"conf/pio-env.sh" do |s|
s.gsub! /#\s*ES_CONF_DIR=.+$/, "ES_CONF_DIR=#{Formula['elasticsearch'].opt_prefix}/config"
s.gsub! /SPARK_HOME=.+$/, "SPARK_HOME=#{Formula['apache-spark'].opt_prefix}"
end
end
end
| 25.555556 | 97 | 0.686957 |
1855d4cfa1468b3f933d0f17f1121f85bec6431b | 353 | ActionController::Routing::Routes.draw do |map|
map.resources :jobs
map.logout '/logout', :controller => 'sessions', :action => 'destroy'
map.login '/login', :controller => 'sessions', :action => 'new'
map.resource :session
map.namespace :admin do |admin|
admin.home 'home', :controller => :home
end
map.root :controller => "jobs"
end
| 23.533333 | 71 | 0.668555 |
03029a934e04cb58ea6c34b2a8f21ca7620a1693 | 707 | class ApplicationController < ActionController::Base
# Redirects to 'Edit User Features' page if questions haven't been answered
def after_sign_in_path_for(resource)
if current_user.speed == nil || current_user.guide == nil
"/user_features/#{current_user.id}/edit"
else
root_url
end
end
# Handles CanCan::AccessDenied error
rescue_from CanCan::AccessDenied do |exception|
respond_to do |format|
format.json { head :forbidden, content_type: 'text/html' }
format.html { redirect_to main_app.root_url, alert: "#{exception.message} You aren't allowed to do this!" }
format.js { head :forbidden, content_type: 'text/html' }
end
end
end
| 32.136364 | 113 | 0.695898 |
e8430060648786d38a6e18291bafdfac7a787673 | 1,430 | RSpec.describe PokerInputParser, '#parse' do
shared_examples_for PokerInputParser do |players_expected_cards|
before(:each) do
parser = PokerInputParser.new subject
@players = parser.players
end
it "has #{players_expected_cards.count} players" do
expect(@players.count).to eq(players_expected_cards.count)
end
it 'each player has 5 cards' do
@players.each do |player|
expect(player.cards.count).to eq(5)
end
end
players_expected_cards.each.with_index do |expected_cards, i|
it "player #{i+1}\'s cards are #{expected_cards}" do
actual_cards = @players[i].cards
expected_cards.each.with_index do |c, card_index|
expect(actual_cards[card_index]).to match(CardParser.parse c)
end
end
end
end
context 'given input with two players' do
subject {
'Black: 2H 3D 5S 9C KD White: 2C 3H 4S 8C AH'
}
it_should_behave_like PokerInputParser, [
['2H', '3D', '5S', '9C', 'KD'],
['2C', '3H', '4S', '8C', 'AH'],
]
end
context 'given input with three players' do
subject {
'Black: 3H AD 6C 8C QD White: 8D 3S 5S 9C AH Orange: 3D 4C 8H AS JD'
}
it_should_behave_like PokerInputParser, [
['3H', 'AD', '6C', '8C', 'QD'],
['8D', '3S', '5S', '9C', 'AH'],
['3D', '4C', '8H', 'AS', 'JD'],
]
end
end
| 26 | 76 | 0.586014 |
e858f2a4542756b66b2c3e4d4079031e7858f512 | 1,068 | require 'test_helper'
class ArticlesControllerTest < ActionDispatch::IntegrationTest
setup do
@article = articles(:one)
end
test 'should get index' do
get articles_url
assert_response :success
end
test 'should get new' do
get new_article_url
assert_response :success
end
test 'should create article' do
assert_difference('Article.count') do
post articles_url, params: { article: { article: @article.article } }
end
assert_redirected_to article_url(Article.last)
end
test 'should show article' do
get article_url(@article)
assert_response :success
end
test 'should get edit' do
get edit_article_url(@article)
assert_response :success
end
test 'should update article' do
patch article_url(@article), params: { article: { article: @article.article } }
assert_redirected_to article_url(@article)
end
test 'should destroy article' do
assert_difference('Article.count', -1) do
delete article_url(@article)
end
assert_redirected_to articles_url
end
end
| 21.795918 | 83 | 0.713483 |
ac3ea702efd6ff69548cb33614400c001477b40f | 35,560 | require File.join(File.dirname(File.expand_path(__FILE__)), 'spec_helper.rb')
describe "Database schema parser" do
before do
@iom = DB.identifier_output_method
@iim = DB.identifier_input_method
@qi = DB.quote_identifiers?
end
after do
DB.identifier_output_method = @iom
DB.identifier_input_method = @iim
DB.quote_identifiers = @qi
DB.drop_table?(:items)
end
it "should handle a database with a identifier methods" do
DB.identifier_output_method = :reverse
DB.identifier_input_method = :reverse
DB.quote_identifiers = true
DB.create_table!(:items){Integer :number}
begin
DB.schema(:items, :reload=>true).must_be_kind_of(Array)
DB.schema(:items, :reload=>true).first.first.must_equal :number
ensure
DB.drop_table(:items)
end
end
it "should handle a dataset with identifier methods different than the database's" do
DB.identifier_output_method = :reverse
DB.identifier_input_method = :reverse
DB.quote_identifiers = true
DB.create_table!(:items){Integer :number}
DB.identifier_output_method = @iom
DB.identifier_input_method = @iim
ds = DB[:items]
ds.identifier_output_method = :reverse
ds.identifier_input_method = :reverse
begin
DB.schema(ds, :reload=>true).must_be_kind_of(Array)
DB.schema(ds, :reload=>true).first.first.must_equal :number
ensure
DB.identifier_output_method = :reverse
DB.identifier_input_method = :reverse
DB.drop_table(:items)
end
end
it "should not issue an sql query if the schema has been loaded unless :reload is true" do
DB.create_table!(:items){Integer :number}
DB.schema(:items, :reload=>true)
DB.schema(:items)
DB.schema(:items, :reload=>true)
end
it "Model schema should include columns in the table, even if they aren't selected" do
DB.create_table!(:items){String :a; Integer :number}
m = Sequel::Model(DB[:items].select(:a))
m.columns.must_equal [:a]
m.db_schema[:number][:type].must_equal :integer
end
it "should raise an error when the table doesn't exist" do
proc{DB.schema(:no_table)}.must_raise(Sequel::Error, Sequel::DatabaseError)
end
it "should return the schema correctly" do
DB.create_table!(:items){Integer :number}
schema = DB.schema(:items, :reload=>true)
schema.must_be_kind_of(Array)
schema.length.must_equal 1
col = schema.first
col.must_be_kind_of(Array)
col.length.must_equal 2
col.first.must_equal :number
col_info = col.last
col_info.must_be_kind_of(Hash)
col_info[:type].must_equal :integer
DB.schema(:items)
end
it "should parse primary keys from the schema properly" do
DB.create_table!(:items){Integer :number}
DB.schema(:items).collect{|k,v| k if v[:primary_key]}.compact.must_equal []
DB.create_table!(:items){primary_key :number}
DB.schema(:items).collect{|k,v| k if v[:primary_key]}.compact.must_equal [:number]
DB.create_table!(:items){Integer :number1; Integer :number2; primary_key [:number1, :number2]}
DB.schema(:items).collect{|k,v| k if v[:primary_key]}.compact.must_equal [:number1, :number2]
end
cspecify "should parse autoincrementing primary keys from the schema properly", :sqlite, :oracle do
DB.create_table!(:items){Integer :number}
DB.schema(:items).collect{|k,v| k if v[:primary_key] && v[:auto_increment]}.compact.must_equal []
DB.create_table!(:items){primary_key :number}
DB.schema(:items).collect{|k,v| k if v[:primary_key] && v[:auto_increment]}.compact.must_equal [:number]
DB.create_table!(:items){Integer :number, :primary_key=>true}
DB.schema(:items).collect{|k,v| k if v[:primary_key] && v[:auto_increment]}.compact.must_equal []
end
it "should parse NULL/NOT NULL from the schema properly" do
DB.create_table!(:items){Integer :number, :null=>true}
DB.schema(:items).first.last[:allow_null].must_equal true
DB.create_table!(:items){Integer :number, :null=>false}
DB.schema(:items).first.last[:allow_null].must_equal false
end
it "should parse defaults from the schema properly" do
DB.create_table!(:items){Integer :number}
DB.schema(:items).first.last[:ruby_default].must_equal nil
DB.create_table!(:items){Integer :number, :default=>0}
DB.schema(:items).first.last[:ruby_default].must_equal 0
DB.create_table!(:items){String :a, :default=>"blah"}
DB.schema(:items).first.last[:ruby_default].must_equal 'blah'
end
it "should make :default nil for a NULL default" do
DB.create_table!(:items){Integer :number}
DB.schema(:items).first.last[:default].must_equal nil
DB.create_table!(:items){Integer :number, :default=>0}
DB.schema(:items).first.last[:default].wont_equal nil
end
it "should parse current timestamp defaults from the schema properly" do
DB.create_table!(:items){Time :a, :default=>Sequel::CURRENT_TIMESTAMP}
DB.schema(:items).first.last[:ruby_default].must_equal Sequel::CURRENT_TIMESTAMP
end
cspecify "should parse current date defaults from the schema properly", :mysql, :oracle do
DB.create_table!(:items){Date :a, :default=>Sequel::CURRENT_DATE}
DB.schema(:items).first.last[:ruby_default].must_equal Sequel::CURRENT_DATE
end
cspecify "should parse types from the schema properly", [:jdbc, :db2], :oracle do
DB.create_table!(:items){Integer :number}
DB.schema(:items).first.last[:type].must_equal :integer
DB.create_table!(:items){Fixnum :number}
DB.schema(:items).first.last[:type].must_equal :integer
DB.create_table!(:items){Bignum :number}
DB.schema(:items).first.last[:type].must_equal :integer
DB.create_table!(:items){Float :number}
DB.schema(:items).first.last[:type].must_equal :float
DB.create_table!(:items){BigDecimal :number, :size=>[11, 2]}
DB.schema(:items).first.last[:type].must_equal :decimal
DB.create_table!(:items){Numeric :number, :size=>[12, 0]}
DB.schema(:items).first.last[:type].must_equal :integer
DB.create_table!(:items){String :number}
DB.schema(:items).first.last[:type].must_equal :string
DB.create_table!(:items){Date :number}
DB.schema(:items).first.last[:type].must_equal :date
DB.create_table!(:items){Time :number}
DB.schema(:items).first.last[:type].must_equal :datetime
DB.create_table!(:items){DateTime :number}
DB.schema(:items).first.last[:type].must_equal :datetime
DB.create_table!(:items){File :number}
DB.schema(:items).first.last[:type].must_equal :blob
DB.create_table!(:items){TrueClass :number}
DB.schema(:items).first.last[:type].must_equal :boolean
DB.create_table!(:items){FalseClass :number}
DB.schema(:items).first.last[:type].must_equal :boolean
end
it "should round trip database types from the schema properly" do
DB.create_table!(:items){String :number, :size=>50}
db_type = DB.schema(:items).first.last[:db_type]
DB.create_table!(:items){column :number, db_type}
DB.schema(:items).first.last[:db_type].must_equal db_type
DB.create_table!(:items){Numeric :number, :size=>[11,3]}
db_type = DB.schema(:items).first.last[:db_type]
DB.create_table!(:items){column :number, db_type}
DB.schema(:items).first.last[:db_type].must_equal db_type
end
it "should parse maximum length for string columns" do
DB.create_table!(:items){String :a, :size=>4}
DB.schema(:items).first.last[:max_length].must_equal 4
DB.create_table!(:items){String :a, :fixed=>true, :size=>3}
DB.schema(:items).first.last[:max_length].must_equal 3
end
end if DB.supports_schema_parsing?
describe "Database index parsing" do
after do
DB.drop_table?(:items)
end
it "should parse indexes into a hash" do
# Delete :deferrable entry, since not all adapters implement it
f = lambda{h = DB.indexes(:items); h.values.each{|h2| h2.delete(:deferrable)}; h}
DB.create_table!(:items){Integer :n; Integer :a}
f.call.must_equal({})
DB.add_index(:items, :n)
f.call.must_equal(:items_n_index=>{:columns=>[:n], :unique=>false})
DB.drop_index(:items, :n)
f.call.must_equal({})
DB.add_index(:items, :n, :unique=>true, :name=>:blah_blah_index)
f.call.must_equal(:blah_blah_index=>{:columns=>[:n], :unique=>true})
DB.add_index(:items, [:n, :a])
f.call.must_equal(:blah_blah_index=>{:columns=>[:n], :unique=>true}, :items_n_a_index=>{:columns=>[:n, :a], :unique=>false})
DB.drop_index(:items, :n, :name=>:blah_blah_index)
f.call.must_equal(:items_n_a_index=>{:columns=>[:n, :a], :unique=>false})
DB.drop_index(:items, [:n, :a])
f.call.must_equal({})
end
it "should not include a primary key index" do
DB.create_table!(:items){primary_key :n}
DB.indexes(:items).must_equal({})
DB.create_table!(:items){Integer :n; Integer :a; primary_key [:n, :a]}
DB.indexes(:items).must_equal({})
end
cspecify "should not include partial indexes", :sqlite do
DB.create_table!(:items){Integer :n; Integer :a; index :n, :where=>proc{n > 10}}
DB.indexes(:items).must_equal({})
end if DB.supports_partial_indexes?
end if DB.supports_index_parsing?
describe "Database foreign key parsing" do
before do
@db = DB
@pr = lambda do |table, *expected|
actual = @db.foreign_key_list(table).sort_by{|c| c[:columns].map{|s| s.to_s}.join << (c[:key]||[]).map{|s| s.to_s}.join}.map{|v| v.values_at(:columns, :table, :key)}
actual.zip(expected).each do |a, e|
if e.last.first == :pk
if a.last == nil
a.pop
e.pop
else
e.last.shift
end
end
a.must_equal e
end
actual.length.must_equal expected.length
end
end
after do
@db.drop_table?(:b, :a)
end
it "should parse foreign key information into an array of hashes" do
@db.create_table!(:a, :engine=>:InnoDB){primary_key :c; Integer :d, :null => false, :unique => true}
@db.create_table!(:b, :engine=>:InnoDB){foreign_key :e, :a}
@pr[:a]
@pr[:b, [[:e], :a, [:pk, :c]]]
@db.alter_table(:b){add_foreign_key :f, :a, :key=>[:d]}
@pr[:b, [[:e], :a, [:pk, :c]], [[:f], :a, [:d]]]
@db.alter_table(:b){add_foreign_key [:f], :a, :key=>[:c]}
@pr[:b, [[:e], :a, [:pk, :c]], [[:f], :a, [:c]], [[:f], :a, [:d]]]
@db.alter_table(:a){add_unique_constraint [:d, :c]}
@db.alter_table(:b){add_foreign_key [:f, :e], :a, :key=>[:d, :c]}
@pr[:b, [[:e], :a, [:pk, :c]], [[:f], :a, [:c]], [[:f], :a, [:d]], [[:f, :e], :a, [:d, :c]]]
@db.alter_table(:b){drop_foreign_key [:f, :e]}
@pr[:b, [[:e], :a, [:pk, :c]], [[:f], :a, [:c]], [[:f], :a, [:d]]]
@db.alter_table(:b){drop_foreign_key :e}
@pr[:b, [[:f], :a, [:c]], [[:f], :a, [:d]]]
proc{@db.alter_table(:b){drop_foreign_key :f}}.must_raise(Sequel::Error, Sequel::DatabaseError)
@pr[:b, [[:f], :a, [:c]], [[:f], :a, [:d]]]
end
it "should handle composite foreign and primary keys" do
@db.create_table!(:a, :engine=>:InnoDB){Integer :b, :null=>false; Integer :c, :null=>false; Integer :d, :null=>false; primary_key [:b, :c]; unique [:d, :c]}
@db.create_table!(:b, :engine=>:InnoDB){Integer :e, :null=>false; Integer :f, :null=>false; Integer :g, :null=>false; foreign_key [:e, :f], :a; foreign_key [:g, :f], :a, :key=>[:d, :c]}
@pr[:b, [[:e, :f], :a, [:pk, :b, :c]], [[:g, :f], :a, [:d, :c]]]
end
it "should handle self-referential composite foreign and primary keys" do
@db.create_table!(:a, :engine=>:InnoDB){Integer :b, :null=>false; Integer :c, :null=>false; Integer :d, :null=>false; primary_key [:b, :c]; unique [:d, :b]}
@db.alter_table(:a){add_foreign_key [:b, :d], :a; add_foreign_key [:d, :c], :a; add_foreign_key [:c, :b], :a, :key=>[:d, :b]}
@pr[:a, [[:b, :d], :a, [:pk, :b, :c]], [[:c, :b], :a, [:d, :b]], [[:d, :c], :a, [:pk, :b, :c]]]
end
end if DB.supports_foreign_key_parsing?
describe "Database schema modifiers" do
before do
@db = DB
@ds = @db[:items]
end
after do
# Use instead of drop_table? to work around issues on jdbc/db2
@db.drop_table(:items) rescue nil
@db.drop_table(:items2) rescue nil
end
it "should create tables correctly" do
@db.create_table!(:items){Integer :number}
@db.table_exists?(:items).must_equal true
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number]
@ds.insert([10])
@ds.columns!.must_equal [:number]
end
it "should create tables from select statements correctly" do
@db.create_table!(:items){Integer :number}
@ds.insert([10])
@db.create_table(:items2, :as=>@db[:items])
@db.schema(:items2, :reload=>true).map{|x| x.first}.must_equal [:number]
@db[:items2].columns.must_equal [:number]
@db[:items2].all.must_equal [{:number=>10}]
end
it "should not raise an error if table doesn't exist when using drop_table :if_exists" do
@db.drop_table(:items, :if_exists=>true)
end if DB.supports_drop_table_if_exists?
describe "views" do
before do
@db.drop_view(:items_view2) rescue nil
@db.drop_view(:items_view) rescue nil
@db.create_table!(:items){Integer :number}
@ds.insert(:number=>1)
@ds.insert(:number=>2)
end
after do
@db.drop_view(:items_view2) rescue nil
@db.drop_view(:items_view) rescue nil
end
it "should create views correctly" do
@db.create_view(:items_view, @ds.where(:number=>1))
@db[:items_view].map(:number).must_equal [1]
end
it "should create views with check options correctly" do
@db.create_view(:items_view, @ds.where{number > 2}, :check=>true)
proc{@db[:items_view].insert(1)}.must_raise(Sequel::DatabaseError)
@db[:items_view].insert(3)
@db[:items_view].select_order_map(:number).must_equal [3]
@db.create_view(:items_view2, @db[:items_view].where{number > 1}, :check=>true)
proc{@db[:items_view2].insert(1)}.must_raise(Sequel::DatabaseError)
proc{@db[:items_view2].insert(2)}.must_raise(Sequel::DatabaseError)
@db[:items_view2].insert(4)
@db[:items_view2].select_order_map(:number).must_equal [3, 4]
@ds.select_order_map(:number).must_equal [1, 2, 3, 4]
end if DB.supports_views_with_check_option?
it "should create views with local check options correctly" do
@db.create_view(:items_view, @ds.where{number > 2})
@db[:items_view].insert(3)
@db[:items_view].select_order_map(:number).must_equal [3]
@db.create_view(:items_view2, @db[:items_view].where{number > 1}, :check=>:local)
proc{@db[:items_view2].insert(1)}.must_raise(Sequel::DatabaseError)
@db[:items_view2].insert(2)
@db[:items_view2].insert(4)
@db[:items_view2].select_order_map(:number).must_equal [3, 4]
@ds.select_order_map(:number).must_equal [1, 2, 2, 3, 4]
end if DB.supports_views_with_local_check_option?
cspecify "should create views with explicit columns correctly", [proc{|db| db.sqlite_version < 30900}, :sqlite] do
@db.create_view(:items_view, @ds.where(:number=>1), :columns=>[:n])
@db[:items_view].map(:n).must_equal [1]
end
it "should drop views correctly" do
@db.create_view(:items_view, @ds.where(:number=>1))
@db.drop_view(:items_view)
proc{@db[:items_view].map(:number)}.must_raise(Sequel::DatabaseError)
end
it "should not raise an error if view doesn't exist when using drop_view :if_exists" do
@db.drop_view(:items_view, :if_exists=>true)
end if DB.supports_drop_table_if_exists?
it "should create or replace views correctly" do
@db.create_or_replace_view(:items_view, @ds.where(:number=>1))
@db[:items_view].map(:number).must_equal [1]
@db.create_or_replace_view(:items_view, @ds.where(:number=>2))
@db[:items_view].map(:number).must_equal [2]
end
end
it "should handle create table in a rolled back transaction" do
@db.drop_table?(:items)
@db.transaction(:rollback=>:always){@db.create_table(:items){Integer :number}}
@db.table_exists?(:items).must_equal false
end if DB.supports_transactional_ddl?
describe "join tables" do
after do
@db.drop_join_table(:cat_id=>:cats, :dog_id=>:dogs) if @db.table_exists?(:cats_dogs)
@db.drop_table(:cats, :dogs)
@db.table_exists?(:cats_dogs).must_equal false
end
it "should create join tables correctly" do
@db.create_table!(:cats){primary_key :id}
@db.create_table!(:dogs){primary_key :id}
@db.create_join_table(:cat_id=>:cats, :dog_id=>:dogs)
@db.table_exists?(:cats_dogs).must_equal true
end
end
it "should create temporary tables without raising an exception" do
@db.create_table!(:items_temp, :temp=>true){Integer :number}
end
it "should have create_table? only create the table if it doesn't already exist" do
@db.create_table!(:items){String :a}
@db.create_table?(:items){String :b}
@db[:items].columns.must_equal [:a]
@db.drop_table?(:items)
@db.create_table?(:items){String :b}
@db[:items].columns.must_equal [:b]
end
it "should have create_table? work correctly with indexes" do
@db.create_table!(:items){String :a, :index=>true}
@db.create_table?(:items){String :b, :index=>true}
@db[:items].columns.must_equal [:a]
@db.drop_table?(:items)
@db.create_table?(:items){String :b, :index=>true}
@db[:items].columns.must_equal [:b]
end
it "should rename tables correctly" do
@db.drop_table?(:items)
@db.create_table!(:items2){Integer :number}
@db.rename_table(:items2, :items)
@db.table_exists?(:items).must_equal true
@db.table_exists?(:items2).must_equal false
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number]
@ds.insert([10])
@ds.columns!.must_equal [:number]
end
it "should allow creating indexes with tables" do
@db.create_table!(:items){Integer :number; index :number}
@db.table_exists?(:items).must_equal true
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number]
@ds.insert([10])
@ds.columns!.must_equal [:number]
end
it "should allow creating partial indexes with tables" do
@db.create_table!(:items){Integer :number; index :number, :where=>proc{number > 10}}
@db.table_exists?(:items).must_equal true
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number]
@ds.insert([10])
@ds.columns!.must_equal [:number]
end if DB.supports_partial_indexes?
it "should handle combination of default, unique, and not null" do
@db.create_table!(:items){Integer :number, :default=>0, :null=>false, :unique=>true}
@db.table_exists?(:items).must_equal true
@db.schema(:items, :reload=>true).map{|x| x.last}.first.values_at(:ruby_default, :allow_null).must_equal [0, false]
@ds.insert([10])
end
it "should be able to specify constraint names for column constraints" do
@db.create_table!(:items2){primary_key :id, :primary_key_constraint_name=>:foo_pk}
@db.create_table!(:items){foreign_key :id, :items2, :unique=>true, :foreign_key_constraint_name => :foo_fk, :unique_constraint_name => :foo_uk, :null=>false}
@db.alter_table(:items){drop_constraint :foo_fk, :type=>:foreign_key; drop_constraint :foo_uk, :type=>:unique}
@db.alter_table(:items2){drop_constraint :foo_pk, :type=>:primary_key}
end
it "should handle foreign keys correctly when creating tables" do
@db.create_table!(:items) do
primary_key :id
foreign_key :item_id, :items
unique [:item_id, :id]
foreign_key [:id, :item_id], :items, :key=>[:item_id, :id]
end
@db.table_exists?(:items).must_equal true
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :item_id]
@ds.columns!.must_equal [:id, :item_id]
end
it "should add columns to tables correctly" do
@db.create_table!(:items){Integer :number}
@ds.insert(:number=>10)
@db.alter_table(:items){add_column :name, String}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number, :name]
@ds.columns!.must_equal [:number, :name]
@ds.all.must_equal [{:number=>10, :name=>nil}]
end
cspecify "should add primary key columns to tables correctly", :derby do
@db.create_table!(:items){Integer :number}
@ds.insert(:number=>10)
@db.alter_table(:items){add_primary_key :id}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:number, :id]
@ds.columns!.must_equal [:number, :id]
@ds.map(:number).must_equal [10]
proc{@ds.insert(:id=>@ds.map(:id).first)}.must_raise Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError
end
it "should drop primary key constraints from tables correctly" do
@db.create_table!(:items){Integer :number; primary_key [:number], :name=>:items_pk}
@ds.insert(:number=>10)
@db.alter_table(:items){drop_constraint :items_pk, :type=>:primary_key}
@ds.map(:number).must_equal [10]
@ds.insert(10)
end
it "should add foreign key columns to tables correctly" do
@db.create_table!(:items){primary_key :id}
@ds.insert
i = @ds.get(:id)
@db.alter_table(:items){add_foreign_key :item_id, :items}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :item_id]
@ds.columns!.must_equal [:id, :item_id]
@ds.all.must_equal [{:id=>i, :item_id=>nil}]
end
it "should not allow NULLs in a primary key" do
@db.create_table!(:items){String :id, :primary_key=>true}
proc{@ds.insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
end
it "should not allow NULLs when adding a primary key column" do
@db.create_table!(:items){String :foo}
@db.alter_table(:items){add_column :id, String, :primary_key=>true, :default=>'a'}
proc{@ds.insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
end
it "should not allow NULLs when creating table with primary key constraint" do
@db.create_table!(:items){String :id1; String :id2; primary_key [:id1, :id2]}
proc{@ds.insert(:id1=>nil, :id2=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
proc{@ds.insert(:id1=>nil, :id2=>'1')}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
proc{@ds.insert(:id1=>'1', :id2=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
end
it "should not allow NULLs when adding a primary key constraint" do
@db.create_table!(:items){String :id1; String :id2}
@db.alter_table(:items){add_primary_key [:id1, :id2]}
proc{@ds.insert(:id1=>nil, :id2=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
proc{@ds.insert(:id1=>nil, :id2=>'1')}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
proc{@ds.insert(:id1=>'1', :id2=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
end
it "should rename columns correctly" do
@db.create_table!(:items){Integer :id}
@ds.insert(:id=>10)
@db.alter_table(:items){rename_column :id, :id2}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id2]
@ds.columns!.must_equal [:id2]
@ds.all.must_equal [{:id2=>10}]
end
it "should rename columns with defaults correctly" do
@db.create_table!(:items){String :n, :default=>'blah'}
@ds.insert
@db.alter_table(:items){rename_column :n, :n2}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:n2]
@ds.columns!.must_equal [:n2]
@ds.insert
@ds.all.must_equal [{:n2=>'blah'}, {:n2=>'blah'}]
end
it "should rename columns with not null constraints" do
@db.create_table!(:items, :engine=>:InnoDB){String :n, :null=>false}
@ds.insert(:n=>'blah')
@db.alter_table(:items){rename_column :n, :n2}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:n2]
@ds.columns!.must_equal [:n2]
@ds.insert(:n2=>'blah')
@ds.all.must_equal [{:n2=>'blah'}, {:n2=>'blah'}]
proc{@ds.insert(:n=>nil)}.must_raise(Sequel::DatabaseError)
end
it "should rename columns when the table is referenced by a foreign key" do
@db.create_table!(:items2){primary_key :id; Integer :a}
@db.create_table!(:items){Integer :id, :primary_key=>true; foreign_key :items_id, :items2}
@db[:items2].insert(:a=>10)
@ds.insert(:id=>1)
@db.alter_table(:items2){rename_column :a, :b}
@db[:items2].insert(:b=>20)
@ds.insert(:id=>2)
@db[:items2].select_order_map([:id, :b]).must_equal [[1, 10], [2, 20]]
end
cspecify "should rename primary_key columns correctly", :db2 do
@db.create_table!(:items){Integer :id, :primary_key=>true}
@ds.insert(:id=>10)
@db.alter_table(:items){rename_column :id, :id2}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id2]
@ds.columns!.must_equal [:id2]
@ds.all.must_equal [{:id2=>10}]
end
cspecify "should set column NULL/NOT NULL correctly", [:jdbc, :db2], [:db2] do
@db.create_table!(:items, :engine=>:InnoDB){Integer :id}
@ds.insert(:id=>10)
@db.alter_table(:items){set_column_allow_null :id, false}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id]
@ds.columns!.must_equal [:id]
proc{@ds.insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
@db.alter_table(:items){set_column_allow_null :id, true}
@ds.insert(:id=>nil)
@ds.all.must_equal [{:id=>10}, {:id=>nil}]
end
it "should set column defaults correctly" do
@db.create_table!(:items){Integer :id}
@ds.insert(:id=>10)
@db.alter_table(:items){set_column_default :id, 20}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id]
@ds.columns!.must_equal [:id]
@ds.insert
@ds.all.must_equal [{:id=>10}, {:id=>20}]
end
cspecify "should set column types correctly", [:jdbc, :db2], [:db2], :oracle do
@db.create_table!(:items){Integer :id}
@ds.insert(:id=>10)
@db.alter_table(:items){set_column_type :id, String}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id]
@ds.columns!.must_equal [:id]
@ds.insert(:id=>'20')
@ds.order(:id).all.must_equal [{:id=>"10"}, {:id=>"20"}]
end
cspecify "should set column types without modifying NULL/NOT NULL", [:jdbc, :db2], [:db2], :oracle, :derby do
@db.create_table!(:items){Integer :id, :null=>false, :default=>2}
proc{@ds.insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
@db.alter_table(:items){set_column_type :id, String}
proc{@ds.insert(:id=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
@db.create_table!(:items){Integer :id}
@ds.insert(:id=>nil)
@db.alter_table(:items){set_column_type :id, String}
@ds.insert(:id=>nil)
@ds.map(:id).must_equal [nil, nil]
end
cspecify "should set column types without modifying defaults", [:jdbc, :db2], [:db2], :oracle, :derby do
@db.create_table!(:items){Integer :id, :default=>0}
@ds.insert
@ds.map(:id).must_equal [0]
@db.alter_table(:items){set_column_type :id, String}
@ds.insert
@ds.map(:id).must_equal ['0', '0']
@db.create_table!(:items){String :id, :default=>'a'}
@ds.insert
@ds.map(:id).must_equal %w'a'
@db.alter_table(:items){set_column_type :id, String, :size=>1}
@ds.insert
@ds.map(:id).must_equal %w'a a'
end
it "should add unnamed unique constraints and foreign key table constraints correctly" do
@db.create_table!(:items, :engine=>:InnoDB){Integer :id, :null => false; Integer :item_id, :null => false}
@db.alter_table(:items) do
add_unique_constraint [:item_id, :id]
add_foreign_key [:id, :item_id], :items, :key=>[:item_id, :id]
end
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :item_id]
@ds.columns!.must_equal [:id, :item_id]
@ds.insert(1, 1)
proc{@ds.insert(1, 1)}.must_raise Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError
proc{@ds.insert(1, 2)}.must_raise Sequel::ForeignKeyConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError
end
it "should add named unique constraints and foreign key table constraints correctly" do
@db.create_table!(:items, :engine=>:InnoDB){Integer :id, :null=>false; Integer :item_id, :null=>false}
@db.alter_table(:items) do
add_unique_constraint [:item_id, :id], :name=>:unique_iii
add_foreign_key [:id, :item_id], :items, :key=>[:item_id, :id], :name=>:fk_iii
end
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :item_id]
@ds.columns!.must_equal [:id, :item_id]
@ds.insert(1, 1)
proc{@ds.insert(1, 1)}.must_raise Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError
proc{@ds.insert(1, 2)}.must_raise Sequel::ForeignKeyConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError
end
it "should drop unique constraints and foreign key table constraints correctly" do
@db.create_table!(:items) do
Integer :id
Integer :item_id
unique [:item_id, :id], :name=>:items_uk
foreign_key [:id, :item_id], :items, :key=>[:item_id, :id], :name=>:items_fk
end
@db.alter_table(:items) do
drop_constraint(:items_fk, :type=>:foreign_key)
drop_constraint(:items_uk, :type=>:unique)
end
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :item_id]
@ds.columns!.must_equal [:id, :item_id]
@ds.insert(1, 2)
@ds.insert(1, 2)
end
it "should remove columns from tables correctly" do
@db.create_table!(:items) do
primary_key :id
Integer :i
end
@ds.insert(:i=>10)
@db.drop_column(:items, :i)
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id]
end
it "should remove columns with defaults from tables correctly" do
@db.create_table!(:items) do
primary_key :id
Integer :i, :default=>20
end
@ds.insert(:i=>10)
@db.drop_column(:items, :i)
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id]
end
it "should remove foreign key columns from tables correctly" do
@db.create_table!(:items, :engine=>:InnoDB) do
primary_key :id
Integer :i
foreign_key :item_id, :items
end
@ds.insert(:i=>10)
@db.alter_table(:items){drop_foreign_key :item_id}
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :i]
end if DB.supports_foreign_key_parsing?
it "should remove multiple columns in a single alter_table block" do
@db.create_table!(:items) do
primary_key :id
String :name
Integer :number
end
@ds.insert(:number=>10)
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id, :name, :number]
@db.alter_table(:items) do
drop_column :name
drop_column :number
end
@db.schema(:items, :reload=>true).map{|x| x.first}.must_equal [:id]
end
cspecify "should work correctly with many operations in a single alter_table call", [:jdbc, :db2], [:db2] do
@db.create_table!(:items) do
primary_key :id
String :name2
String :number2
constraint :bar, Sequel.~(:id=>nil)
end
@ds.insert(:name2=>'A12')
@db.alter_table(:items) do
add_column :number, Integer
drop_column :number2
rename_column :name2, :name
drop_constraint :bar
set_column_not_null :name
set_column_default :name, 'A13'
add_constraint :foo, Sequel.like(:name, 'A%')
end
@db[:items].first.must_equal(:id=>1, :name=>'A12', :number=>nil)
@db[:items].delete
proc{@db[:items].insert(:name=>nil)}.must_raise(Sequel::NotNullConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
@db[:items].insert(:number=>1)
@db[:items].get(:name).must_equal 'A13'
end
it "should support deferrable foreign key constraints" do
@db.create_table!(:items2){Integer :id, :primary_key=>true}
@db.create_table!(:items){foreign_key :id, :items2, :deferrable=>true}
proc{@db[:items].insert(1)}.must_raise(Sequel::ForeignKeyConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
@db.transaction{proc{@db[:items].insert(1)}}.must_raise(Sequel::ForeignKeyConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
end if DB.supports_deferrable_foreign_key_constraints?
it "should support deferrable unique constraints when creating or altering tables" do
@db.create_table!(:items){Integer :t; unique [:t], :name=>:atest_def, :deferrable=>true, :using=>:btree}
@db[:items].insert(1)
@db[:items].insert(2)
proc{@db[:items].insert(2)}.must_raise(Sequel::DatabaseError, Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
@db.transaction{proc{@db[:items].insert(2)}}.must_raise(Sequel::DatabaseError, Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
@db.create_table!(:items){Integer :t}
@db.alter_table(:items){add_unique_constraint [:t], :name=>:atest_def, :deferrable=>true, :using=>:btree}
@db[:items].insert(1)
@db[:items].insert(2)
proc{@db[:items].insert(2)}.must_raise(Sequel::DatabaseError, Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
@db.transaction{proc{@db[:items].insert(2)}}.must_raise(Sequel::DatabaseError, Sequel::UniqueConstraintViolation, Sequel::ConstraintViolation, Sequel::DatabaseError)
end if DB.supports_deferrable_constraints?
end
describe "Database#tables and #views" do
before do
class ::String
@@xxxxx = 0
def xxxxx
"xxxxx#{@@xxxxx += 1}"
end
end
@db = DB
@db.drop_view(:sequel_test_view) rescue nil
@db.drop_table?(:sequel_test_table)
@db.create_table(:sequel_test_table){Integer :a}
@db.create_view :sequel_test_view, @db[:sequel_test_table]
@iom = @db.identifier_output_method
@iim = @db.identifier_input_method
end
after do
@db.identifier_output_method = @iom
@db.identifier_input_method = @iim
@db.drop_view :sequel_test_view
@db.drop_table :sequel_test_table
end
it "#tables should return an array of symbols" do
ts = @db.tables
ts.must_be_kind_of(Array)
ts.each{|t| t.must_be_kind_of(Symbol)}
ts.must_include(:sequel_test_table)
ts.wont_include(:sequel_test_view)
end if DB.supports_table_listing?
it "#tables should respect the database's identifier_output_method" do
@db.identifier_output_method = :xxxxx
@db.identifier_input_method = :xxxxx
@db.tables.each{|t| t.to_s.must_match(/\Ax{5}\d+\z/)}
end if DB.supports_table_listing?
it "#views should return an array of symbols" do
ts = @db.views
ts.must_be_kind_of(Array)
ts.each{|t| t.must_be_kind_of(Symbol)}
ts.wont_include(:sequel_test_table)
ts.must_include(:sequel_test_view)
end if DB.supports_view_listing?
it "#views should respect the database's identifier_output_method" do
@db.identifier_output_method = :xxxxx
@db.identifier_input_method = :xxxxx
@db.views.each{|t| t.to_s.must_match(/\Ax{5}\d+\z/)}
end if DB.supports_view_listing?
end
| 42.485066 | 189 | 0.673003 |
873b1c0944d9cab302a0129389c080b5289fd907 | 634 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'sms_otp_reader'
s.version = '0.0.6'
s.summary = 'SMS library'
s.description = <<-DESC
SMS library
DESC
s.homepage = 'http://example.com'
s.license = { :file => '../LICENSE' }
s.author = { 'Nitish Kumar' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'Flutter'
s.ios.deployment_target = '8.0'
end
| 28.818182 | 83 | 0.5347 |
4a1ca661a63f1df5cf9a12cf3e8167fdb4776ff7 | 126 | MyEngine::Engine.routes.draw do
root :to=>'auth#index'
match 'facebook/callback' => 'auth#callback' , :via => 'get'
end
| 25.2 | 63 | 0.65873 |
7a6d933edd8ed2b424c6c429e922f06ba7a77941 | 99 | require 'number_words'
require 'minitest/autorun'
require 'minitest/unit'
require 'minitest/pride'
| 19.8 | 26 | 0.808081 |
bb8216fb6dcf00e3a0c1dae500e0daa5178c9684 | 137 | class AddDescriptionToCharacters < ActiveRecord::Migration[5.1]
def change
add_column :characters, :description, :string
end
end
| 22.833333 | 63 | 0.773723 |
8744a12cff311ca8440100b7202b6591a758d93c | 1,588 | require 'spec_helper'
describe Starcraft2::Member do
describe '.initialize' do
let(:member) { Starcraft2::Member.new(@options) }
before do
@options = {
'character' => {
'id' => 333319,
'realm' => 1,
'displayName' => 'NajM',
'clanName' => '',
'clanTag' => '',
'profilePath' => '/profile/333319/1/NajM/'
},
'joinTimestamp' => 123456,
'points' => 1234,
'wins' => 123,
'losses' => 12,
'highestRank' => 1,
'previousRank' => 5,
'favoriteRaceP1' => 'Protoss',
'favoriteRaceP2' => 'Protoss',
'favoriteRaceP3' => 'Protoss',
'favoriteRaceP4' => 'Protoss'
}
end
it 'should store attributes as underscored' do
member.character.class.should == Starcraft2::Character
member.character.id.should == 333319
member.character.realm.should == 1
member.character.display_name.should == 'NajM'
member.character.clan_name.should == ''
member.character.clan_tag.should == ''
member.character.profile_path.should == '/profile/333319/1/NajM/'
member.join_timestamp.should == 123456
member.points.should == 1234
member.wins.should == 123
member.losses.should == 12
member.highest_rank.should == 1
member.previous_rank.should == 5
member.favorite_race_p1.should == 'Protoss'
member.favorite_race_p2.should == 'Protoss'
member.favorite_race_p3.should == 'Protoss'
member.favorite_race_p4.should == 'Protoss'
end
end
end | 31.137255 | 71 | 0.586272 |
bf9d44ce969689419d3d193c27426dd5efece15c | 2,224 | # coding: utf-8
module Pod
class Command
class ListPods < Command
include Config::Mixin
self.summary = 'Check if the latest version of a pod is up to date'
self.arguments = [['NAME', :required]]
def initialize(argv)
@name = argv.shift_argument
super
end
def validate!
super
# help!('A pod name is required.') unless @name
end
def run
# sets = SourcesManager.search_by_name(@name.strip)
# sets.each do |set|
# show_pod(set)
# end
sof_dir = config.repos_dir.to_s + "/bitbucket-sof-pod_spec_osx-ios"
sets = Source.new(sof_dir).search_by_name(".*")
sets.each do |set|
show_pod(set)
end
end
def show_pod(set)
UI.title("-> #{set.name}".green, '', 1) do
# UI.labeled 'Homepage', set.specification.homepage
latest_pod_version = set.highest_version.to_s
UI.labeled 'Latest pod version', " " + latest_pod_version
github_url = github_url(set)
if github_url
latest_version_in_original_repo = latest_version_in_repo(github_url)
UI.labeled 'Latest version in original repo', latest_version_in_original_repo
unless latest_pod_version == latest_version_in_original_repo
UI.puts_indented 'Outdated!'.yellow
end
else
# UI.warn 'Only GitHub source repository is supported.'
end
end
end
def github_url(set)
git_url = set.specification.source[:git]
return nil if !git_url || !git_url.include?('github.com/')
git_url.sub(/\.git$/, '')
end
def latest_version_in_repo(git_url)
tags = github_tags(git_url)
versions_from_tags(tags).sort.last
end
def github_tags(github_url)
GitHub.tags(github_url).map { |hash| hash['name'] }
end
def versions_from_tags(tags)
tags.map do |tag|
normalized_tag = tag.strip.sub(/\Av\D*/i, '')
if Version.correct?(normalized_tag)
normalized_tag
else
nil
end
end.compact
end
end
end
end
| 27.121951 | 89 | 0.583183 |
bb9f0220671cc2cac923277a742295753dcdae8f | 9,026 | require 'helper'
require 'inspec/resource'
require 'resources/aws/aws_iam_users'
require 'resource_support/aws'
require 'resources/aws/aws_iam_users'
# Maiusb = Mock AwsIamUsers::BackendFactory
# Abbreviation not used outside of this file
class AwsIamUsersTestConstructor < Minitest::Test
def setup
AwsIamUsers::BackendFactory.select(Maiusb::Empty)
end
def test_users_no_params_does_not_explode
AwsIamUsers.new
end
def test_users_all_params_rejected
assert_raises(ArgumentError) { AwsIamUsers.new(something: 'somevalue') }
end
end
class AwsIamUsersTestFilterCriteria < Minitest::Test
def setup
# Reset to empty, that's harmless
AwsIamUsers::BackendFactory.select(Maiusb::Empty)
end
#------------------------------------------#
# Open Filter
#------------------------------------------#
def test_users_empty_result_when_no_users_no_criteria
users = AwsIamUsers.new.where {}
assert users.entries.empty?
end
def test_users_all_returned_when_some_users_no_criteria
AwsIamUsers::BackendFactory.select(Maiusb::Basic)
users = AwsIamUsers.new.where {}
assert(3, users.entries.count)
end
#------------------------------------------#
# has_mfa_enabled?
#------------------------------------------#
def test_users_criteria_has_mfa_enabled
AwsIamUsers::BackendFactory.select(Maiusb::Basic)
users = AwsIamUsers.new.where { has_mfa_enabled }
assert(1, users.entries.count)
assert_includes users.usernames, 'carol'
refute_includes users.usernames, 'alice'
end
#------------------------------------------#
# has_console_password?
#------------------------------------------#
def test_users_criteria_has_console_password?
AwsIamUsers::BackendFactory.select(Maiusb::Basic)
users = AwsIamUsers.new.where { has_console_password }
assert(2, users.entries.count)
assert_includes users.usernames, 'carol'
refute_includes users.usernames, 'alice'
end
#------------------------------------------#
# password_ever_used?
#------------------------------------------#
def test_users_criteria_password_ever_used?
AwsIamUsers::BackendFactory.select(Maiusb::Basic)
users = AwsIamUsers.new.where { password_ever_used? }
assert(2, users.entries.count)
assert_includes users.usernames, 'carol'
refute_includes users.usernames, 'alice'
end
#------------------------------------------#
# password_never_used?
#------------------------------------------#
def test_users_criteria_password_never_used?
AwsIamUsers::BackendFactory.select(Maiusb::Basic)
users = AwsIamUsers.new.where { password_never_used? }
assert(1, users.entries.count)
assert_includes users.usernames, 'alice'
refute_includes users.usernames, 'carol'
end
#------------------------------------------#
# password_last_used_days_ago
#------------------------------------------#
def test_users_criteria_has_password_last_used_days_ago_10
AwsIamUsers::BackendFactory.select(Maiusb::Basic)
users = AwsIamUsers.new.where(password_last_used_days_ago: 10)
assert(1, users.entries.count)
assert_includes users.usernames, 'bob'
refute_includes users.usernames, 'alice'
end
#------------------------------------------#
# has_inline_policies
#------------------------------------------#
def test_users_have_inline_policies
AwsIamUsers::BackendFactory.select(Maiusb::Basic)
users = AwsIamUsers.new.where(has_inline_policies?: true)
assert_equal(2, users.entries.count)
assert_includes users.usernames, 'bob'
assert_includes users.usernames, 'carol'
refute_includes users.usernames, 'alice'
users.inline_policy_names.each do |name|
assert_kind_of(String, name)
end
assert_includes users.inline_policy_names, 'bob-inline-01'
assert_includes users.inline_policy_names, 'bob-inline-02'
assert_includes users.inline_policy_names, 'carol-inline-01'
assert_equal(3, users.inline_policy_names.count)
end
#------------------------------------------#
# has_attached_policies
#------------------------------------------#
def test_users_have_attached_policies
AwsIamUsers::BackendFactory.select(Maiusb::Basic)
users = AwsIamUsers.new.where(has_attached_policies: true)
assert_equal(2, users.entries.count)
assert_includes users.usernames, 'bob'
assert_includes users.usernames, 'carol'
refute_includes users.usernames, 'alice'
users.attached_policy_names.each do |name|
assert_kind_of(String, name)
end
assert_includes users.attached_policy_names, 'AdministratorAccess'
assert_includes users.attached_policy_names, 'ReadOnlyAccess'
assert_equal(2, users.attached_policy_names.count)
users.attached_policy_arns.each do |arn|
assert_kind_of(String, arn)
end
assert_includes users.attached_policy_arns, 'arn:aws:iam::aws:policy/ReadOnlyAccess'
assert_equal(3, users.attached_policy_arns.count)
end
end
#=============================================================================#
# Test Fixture Classes
#=============================================================================#
module Maiusb
# --------------------------------
# Empty - No users
# --------------------------------
class Empty < AwsBackendBase
def list_users(criteria = {})
OpenStruct.new({
users: []
})
end
def get_login_profile(criteria)
raise Aws::IAM::Errors::NoSuchEntity.new("No login profile for #{criteria[:user_name]}", 'Nope')
end
def list_mfa_devices(_criteria)
OpenStruct.new({
mfa_devices: []
})
end
end
# --------------------------------
# Basic - 3 Users
# --------------------------------
# Alice has no password or MFA device
# Bob has a password but no MFA device
# Carol has a password and MFA device
class Basic < AwsBackendBase
# arn, path, user_id omitted
def list_users(criteria = {})
OpenStruct.new({
users: [
OpenStruct.new({
user_name: 'alice',
create_date: DateTime.parse('2017-10-10T16:19:30Z'),
# Password last used is absent, never logged in w/ password
}),
OpenStruct.new({
user_name: 'bob',
create_date: DateTime.parse('2017-11-06T16:19:30Z'),
password_last_used: Time.now - 10*24*60*60,
}),
OpenStruct.new({
user_name: 'carol',
create_date: DateTime.parse('2017-10-10T16:19:30Z'),
password_last_used: Time.now - 91*24*60*60,
}),
]
})
end
def get_login_profile(criteria)
if ['bob', 'carol'].include?(criteria[:user_name])
OpenStruct.new({
login_profile: OpenStruct.new({
user_name: criteria[:user_name],
created_date: DateTime.parse('2017-10-10T16:19:30Z')
})
})
else
raise Aws::IAM::Errors::NoSuchEntity.new("No login profile for #{criteria[:user_name]}", 'Nope')
end
end
def list_mfa_devices(criteria)
if ['carol'].include?(criteria[:user_name])
OpenStruct.new({
mfa_devices: [
OpenStruct.new({
user_name: criteria[:user_name],
serial_number: '1234567890',
enable_date: DateTime.parse('2017-10-10T16:19:30Z'),
})
]
})
else
OpenStruct.new({
mfa_devices: []
})
end
end
def list_user_policies(query)
people = {
'alice' => Aws::IAM::Types::ListUserPoliciesResponse.new(
policy_names: []
),
'bob' => Aws::IAM::Types::ListUserPoliciesResponse.new(
policy_names: ['bob-inline-01', 'bob-inline-02'],
),
'carol' => Aws::IAM::Types::ListUserPoliciesResponse.new(
policy_names: ['carol-inline-01'],
)
}
people[query[:user_name]]
end
def list_attached_user_policies(query)
people = {
'alice' => Aws::IAM::Types::ListAttachedUserPoliciesResponse.new(
attached_policies: [],
),
'bob' => Aws::IAM::Types::ListAttachedUserPoliciesResponse.new(
attached_policies: [
{
policy_arn: 'arn:aws:iam::aws:policy/AdministratorAccess',
policy_name: 'AdministratorAccess',
},
]
),
'carol' => Aws::IAM::Types::ListAttachedUserPoliciesResponse.new(
attached_policies: [
{
policy_arn: 'arn:aws:iam::aws:policy/ReadOnlyAccess',
policy_name: 'ReadOnlyAccess',
},
{
policy_arn: 'arn:aws:iam::123456789012:policy/some-policy',
policy_name: 'AdministratorAccess',
},
]
),
}
people[query[:user_name]]
end
end
end
| 32.584838 | 104 | 0.580324 |
1df445197f3682869b9de64d0a116adf46bfb071 | 1,605 | require File.expand_path('../../../test_helper', __FILE__)
module Etsy
class AboutTest < Test::Unit::TestCase
context "An instance of the About class" do
setup do
data = read_fixture('about/getAbout.json')
@about = About.new(data.first)
end
should "have a shop id" do
@about.shop_id.should == 8740774
end
should "have a status" do
@about.status.should == "active"
end
should "have a story_headline" do
@about.story_headline.should == 'A shop long in the making...'
end
should "have a story_leading_paragraph" do
@about.story_leading_paragraph.should == 'This is the leading paragraph'
end
should "have a story" do
@about.story.should == "I grew up with strong women in my family who all had a love of creating. My mom and grandma always encouraged a lifelong love of creating Working with glass, wire, and mineral components brings back my graduate school days, when I studied these items from a scientific point-of-view. Here's hoping I can create something that gives you a little sparkle in your life!"
end
should "have a related_links" do
@about.related_links.should == {
"link-0"=> {"title"=> "facebook", "url"=> "https://www.facebook.com/pebbleplusmetal/"},
"link-1"=> {"title"=> "pinterest", "url"=> "https://www.pinterest.com/PebblePlusMetal/pebble%2Bmetal/"}
}
end
should "have a url" do
@about.url.should == 'https://www.etsy.com/shop/PebblePlusMetal/about'
end
end
end
end
| 36.477273 | 404 | 0.646106 |
089c54cffd3b9894599f39d78ec5f5364c6efe0d | 702 | require 'rails_helper'
require 'shoulda/matchers'
describe Contact do
it "should have valid factory" do
expect(FactoryGirl.create(:contact)).to be_valid
end
it "should be invalid without name" do
expect(FactoryGirl.build(:contact, name: nil)).not_to be_valid
end
it "should be invalid without surname" do
expect(FactoryGirl.build(:contact, surname: nil)).not_to be_valid
end
it "should be invalid without telephone" do
expect(FactoryGirl.build(:contact, telephone: nil)).not_to be_valid
end
it "should be invalid without email" do
expect(FactoryGirl.build(:contact, email: nil)).not_to be_valid
end
it { should have_and_belong_to_many(:locations) }
end | 23.4 | 72 | 0.740741 |
e8926c124b38e434dc741516296177f09167b8e8 | 121 | class AddOrderQuantity < ActiveRecord::Migration[5.1]
def change
add_column :orders, :quantity, :integer
end
end
| 20.166667 | 53 | 0.743802 |
f7d80c50876555de2ec760ad4216f35e68ef0ee5 | 98 | name 'ripple-authd'
description 'peer-assisted key derivation for ripple wallets'
version '0.0.1'
| 24.5 | 61 | 0.785714 |
4a33f4873ae385038ff4af1af804eac8db571a85 | 1,976 | module Steep
class Project
class FileLoader
attr_reader :project
def initialize(project:)
@project = project
end
def each_path_in_patterns(patterns, ext)
patterns.each do |path|
absolute_path = project.base_dir + path
if absolute_path.file?
yield project.relative_path(absolute_path)
else
files = if absolute_path.directory?
Pathname.glob("#{absolute_path}/**/*#{ext}")
else
Pathname.glob(absolute_path)
end
files.sort.each do |source_path|
yield project.relative_path(source_path)
end
end
end
end
def load_sources(command_line_patterns)
project.targets.each do |target|
Steep.logger.tagged "target=#{target.name}" do
target_patterns = command_line_patterns.empty? ? target.source_patterns : command_line_patterns
each_path_in_patterns target_patterns, ".rb" do |path|
if target.possible_source_file?(path)
unless target.source_file?(path)
Steep.logger.info { "Adding source file: #{path}" }
target.add_source path, project.absolute_path(path).read
end
end
end
end
end
end
def load_signatures()
project.targets.each do |target|
Steep.logger.tagged "target=#{target.name}" do
each_path_in_patterns target.signature_patterns, ".rbs" do |path|
if target.possible_signature_file?(path)
unless target.signature_file?(path)
Steep.logger.info { "Adding signature file: #{path}" }
target.add_signature path, project.absolute_path(path).read
end
end
end
end
end
end
end
end
end
| 30.875 | 107 | 0.55668 |
260d20ddf4178c7e1ebe776d4bc619412ea463fd | 133 | module Admin
class DashboardController < Admin::BaseController
def index
redirect_to [:admin,:vendors]
end
end
end
| 16.625 | 51 | 0.706767 |
4a8af55945262d9b5c33962c73e51ef8e84b216e | 932 | # This class replicates the behaviour of a `ActionController::TestCase` to allow
# us to test the minitest helpers, in RSpec.
class FakeMinitestControllerTestCase
def initialize
@request = FakeRequestResponseObject.new
@response = response
end
def assert(*); end
def assert_equal(*); end
def assert_select(*); end
def assert_match(*); end
def refute_nil(*); end
def refute_equal(*); end
def css_select(*)
[
FakeNokogiriElement.new,
]
end
def response
FakeRequestResponseObject.new
end
class FakeNokogiriElement
def attributes
{
"content" => FakeNokogiriAttr.new,
"data-analytics-dimension" => FakeNokogiriAttr.new,
}
end
end
class FakeNokogiriAttr
def value
"example"
end
end
class FakeRequestResponseObject
def headers
{}
end
def body
"some content in the page body"
end
end
end
| 16.642857 | 80 | 0.660944 |
3363f290e779fcfec356654a50a83c7bf0030d8c | 1,159 | #
# Copyright 2022 ThoughtWorks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Admin
module PipelinesHelper
include JavaImports
def default_stage_config
job_configs = JobConfigs.new([JobConfig.new(CaseInsensitiveString.new("defaultJob"), ResourceConfigs.new, ArtifactTypeConfigs.new, com.thoughtworks.go.config.Tasks.new([AntTask.new].to_java(Task)))].to_java(JobConfig))
StageConfig.new(CaseInsensitiveString.new("defaultStage"), job_configs)
end
def use_template?(params)
"configurationType_template" == params.try(:[], :pipeline_group).try(:[], :pipeline).try(:[], :configurationType)
end
end
end
| 37.387097 | 224 | 0.749784 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.