hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
d5e414e2fe5d9c7435d4efcb8980286be68000a4 | 1,072 | class Librsync < Formula
desc "Library that implements the rsync remote-delta algorithm"
homepage "https://librsync.github.io/"
url "https://github.com/librsync/librsync/archive/v2.3.1.tar.gz"
sha256 "dbd7eb643665691bdf4009174461463737b19b4814b789baad62914cabfe4569"
license "LGPL-2.1"
bottle do
sha256 arm64_big_sur: "f849718419f4b2bcd7411e9840a3f075ab8880794d342fc6eb3521c454d3ef8d"
sha256 big_sur: "718cdee2aa974cb87367e5fcf26eee51e4d77552a9033622a8c3584e09f99f0e"
sha256 catalina: "eb1526a88a99556f1ae98c7fa008a8c17ddbe2efe2e55de0192ccbccf9840937"
sha256 mojave: "27f16505bf1b37a9701d70701e708451d47743a3b4d453dcc1d4048065af05af"
sha256 high_sierra: "20fd33975022b7caaa12b9906b726f1b9dd9a792d9291170e72298a351650610"
end
depends_on "cmake" => :build
depends_on "popt"
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
man1.install "doc/rdiff.1"
man3.install "doc/librsync.3"
end
test do
assert_match version.to_s, shell_output("#{bin}/rdiff -V")
end
end
| 35.733333 | 92 | 0.772388 |
33669e940fdf8348a28441fa8598e1617c9be4c2 | 1,638 | describe Graphlyte do
it "should support integers" do
query = Graphlyte.query do
arguments(int: 1) do
id
end
end
expect(query.to_s).to eql(<<~STRING)
{
arguments(int: 1) {
id
}
}
STRING
end
it "should support floats" do
query = Graphlyte.query do |q|
q.arguments(float: 1.01) do |i|
i.id
end
end
expect(query.to_s).to eql(<<~STRING)
{
arguments(float: 1.01) {
id
}
}
STRING
end
it "should support strings" do
query = Graphlyte.query do |q|
q.arguments(string: "hello") do |i|
i.id
end
end
expect(query.to_s).to eql(<<~STRING)
{
arguments(string: "hello") {
id
}
}
STRING
end
it "should support lists" do
query = Graphlyte.query do |q|
q.arguments(list: [1, 2, "string"]) do |i|
i.id
end
end
expect(query.to_s).to eql(<<~STRING)
{
arguments(list: [1, 2, "string"]) {
id
}
}
STRING
end
it "should support hashes" do
query = Graphlyte.query do |q|
q.arguments(object: { one: 2, three: [1, 2] }) do |i|
i.id
end
end
expect(query.to_s).to eql(<<~STRING)
{
arguments(object: { one: 2, three: [1, 2] }) {
id
}
}
STRING
end
it "should handle booleans" do
query = Graphlyte.query do |q|
q.arguments(boolean: true) do |i|
i.id
end
end
expect(query.to_s).to eql(<<~STRING)
{
arguments(boolean: true) {
id
}
}
STRING
end
end | 18 | 59 | 0.503663 |
180b97a32ff6ac88efe483d165c5d8c3fdd0224d | 2,104 | # frozen_string_literal: true
require 'set'
require 'middleman-core/core_extensions/data/proxies/base'
module Middleman
module CoreExtensions
module Data
module Proxies
class ArrayProxy < BaseProxy
WRAPPED_LIST_METHODS = Set.new %i[each each_with_index select sort shuffle reverse rotate sample]
def respond_to_missing?(name, *)
self.class.const_get(:WRAPPED_LIST_METHODS).include?(name) || super
end
def method_missing(name, *args, &block)
if self.class.const_get(:WRAPPED_LIST_METHODS).include?(name)
log_access(:__full_access__)
return wrapped_array.send(name, *args, &block)
end
super
end
def fetch(index, default = (_missing_default = true), &block)
wrap_data index, @data.fetch(index, default, &block)
end
def slice(arg, length = (missing_length = true))
if missing_length
if arg.is_a?(Range)
log_access(:__full_access__)
@data.slice(arg)
else
relative_index = (@data.size + arg) % @data.size
wrap_data(relative_index, @data[relative_index])
end
else
log_access(:__full_access__)
@data.slice(arg, length)
end
end
alias [] slice
def first(length = (missing_length = true))
if missing_length || length == 1
slice(0)
else
slice(0, length)
end
end
def last(length = (missing_length = true))
if missing_length || length == 1
slice(-1)
else
slice(size - length, length)
end
end
private
def wrapped_array
@wrapped_array ||= begin
i = 0
@data.map do |d|
wrap_data(i, d).tap { i += 1 }
end
end
end
end
end
end
end
end
| 27.324675 | 107 | 0.515684 |
5d1dcaeba203a713900e36e96c0a1a04eec9f54c | 7,493 | require './enums'
describe Enumerable do
array = %w[Sharon Leo Leila Brian Arun]
num_array = [10, 20, 30, 5, 7, 9, 3]
hash = { min: 2, max: 5 }
describe '#my_each' do
context 'if block not given' do
it 'returns enum' do
expect(array.my_each).to be_an Enumerator
end
end
context 'if block given' do
context 'when self is an array' do
it 'yields item' do
arr = array.my_each { |friend| friend }
expect(arr).to eq(%w[Sharon Leo Leila Brian Arun])
end
context 'when self is a hash' do
it 'yields item' do
result = []
hash.my_each { |key, value| result.push("k: #{key}, v: #{value}") }
expect(result).to eq(['k: min, v: 2', 'k: max, v: 5'])
end
end
context 'when self is a range' do
it 'yields items in that range' do
arr = array[3..-1].my_each { |item| item }
expect(arr).to eq(%w[Brian Arun])
end
end
end
end
end
describe '#my_each_with_index' do
context 'if block not given' do
it 'returns enum' do
expect(array.my_each_with_index).to be_an Enumerator
end
end
context 'if block given' do
context 'when self is an array' do
it 'yields index' do
arr = []
array.my_each_with_index { |_item, index| arr.push(index) }
expect(arr).to eq([0, 1, 2, 3, 4])
end
end
context 'when self is a hash' do
it 'yields item with index' do
arr = []
array.my_each_with_index { |friend, index| arr.push("#{index}: #{friend}") if index.odd? }
expect(arr).to eq(['1: Leo', '3: Brian'])
end
end
context 'when self is a range' do
it 'yields items in that range' do
arr = []
array[3..-1].my_each_with_index { |item, index| arr.push("#{index}: #{item}") }
expect(arr).to eq(['0: Brian', '1: Arun'])
end
end
end
end
describe '#my_select' do
context 'if block not given' do
it 'returns enum' do
expect(array.my_select).to be_an Enumerator
end
end
context 'if block given' do
context 'when self is an array' do
it 'yields selected items' do
arr = []
array.my_select { |friend| arr.push(friend) if friend != 'Brian' }
expect(arr).to eq(%w[Sharon Leo Leila Arun])
end
end
context 'when self is a hash' do
it 'yields selected items with their index' do
result = []
hash.my_select { |k, v| result.push(k, v) if v > 4 }
expect(result).to eq([:max, 5])
end
end
context 'when self is a range' do
it 'yields selected items within that range' do
arr = []
array[3..-1].my_select { |friend| arr.push(friend) if friend != 'Brian' }
expect(arr).to eq(['Arun'])
end
end
end
end
describe '#my_all?' do
context 'if block given' do
it "returns false if all the items doesn't yield true" do
expect(%w[Sharon Leo Leila Brian Arun].my_all? { |word| word.length > 5 }).to be(false)
end
end
context 'if block not given' do
context 'if the argument is a class' do
it "returns false if all the items doesn't satisfy a given condition" do
expect(%w[Sharon Leo Leila Brian Arun].my_all?(Float)).to be(false)
end
end
context 'if the argument is a Regex' do
it "returns false if all the items doesn't satisfy a given condition" do
expect(%w[Sharon Leo Leila Brian Arun].my_all?(/a/)).to be(false)
end
end
end
end
describe '#my_any?' do
context 'if block given' do
it 'returns true if any of the item yields true' do
expect(%w[Sharon Leo Leila Brian Arun].my_any? { |word| word.length > 5 }).to be(true)
end
end
context 'if block not given' do
context 'if the argument is a Regex' do
it 'returns true if any of the item satisfies a given condition' do
expect(%w[Sharon Leo Leila Brian Arun].my_any?(/a/)).to be(true)
end
end
context 'if the argument is a class' do
it 'returns true if any of the item satisfies a given condition' do
expect(%w[Sharon Leo Leila Brian Arun].my_any?(String)).to be(true)
end
end
end
end
describe '#my_none?' do
context 'if block given' do
it 'returns true if none of the items yield true' do
expect(%w[Sharon Leo Leila Brian Arun].my_none? { |word| word.length > 6 }).to be(true)
end
end
context 'if block not given' do
context 'if the argument is a Regex' do
it 'returns true if none of the items satisfy a given condition' do
expect(%w[Sharon Leo Leila Brian Arun].my_none?(/c/)).to be(true)
end
end
context 'if the argument is a Class' do
it 'returns true if none of the items satisfy a given condition' do
expect(%w[Sharon Leo Leila Brian Arun].my_none?(Numeric)).to be(true)
end
end
end
end
describe '#my_count' do
context 'if block given' do
it 'returns number of items satisfying the condition' do
expect(%w[Sharon Leo Leila Brian Arun].my_count { |word| word.length > 5 }).to eq(1)
end
end
context 'if block and argument given' do
it 'returns number of arguments satisfying the condition' do
expect(%w[Sharon Leo Leila Brian Arun Leon Leone].my_count('Leo') { |word| word == 'Leo' }).to eq(1)
end
end
context 'if block not given, but argument given' do
it 'returns count of that particular argument' do
expect(%w[Sharon Leo Leila Leo Brian Arun Leo].my_count('Leo')).to eq(3)
end
end
context 'if block and argument both not given' do
it 'returns size of the instance' do
expect(%w[Sharon Leo Leila Brian Arun].my_count).to eq(5)
end
end
end
describe '#my_map' do
context 'if block not given' do
it 'returns enum' do
expect(array.my_map).to be_an Enumerator
end
end
context 'if block given' do
it 'returns a new array applying the given operation to the items' do
arr = array.my_map(&:upcase)
expect(arr).to eq(%w[SHARON LEO LEILA BRIAN ARUN])
end
end
end
describe '#my_inject' do
context 'if block given and arg given' do
context 'for a number array with arg' do
it 'returns numbers those passes our filter' do
num = []
num_array.my_inject([]) { |_result, element| num << element.to_s if element > 9 }
expect(num).to eq(%w[10 20 30])
end
end
context 'for a array of strings without arg' do
it 'returns items those passes our filter' do
longest = array.my_inject { |memo, word| memo.size > word.size ? memo : word }
expect(longest).to eq('Sharon')
end
end
end
context 'if block not given, but argument given' do
context 'plus symbol given' do
it 'returns sum of items' do
num = num_array.my_inject(:+)
expect(num).to eq(84)
end
end
context 'multiplication symbol given' do
it 'returns multiplication of items' do
num = num_array.my_inject(:*)
expect(num).to eq(5_670_000)
end
end
end
end
end
| 31.75 | 108 | 0.583344 |
d5949a25d6e3a700540d005810bd76c76bb5f35e | 1,961 | # This should probably be multiple patches
class PatchFixCollectorNumbers < Patch
def call
each_set do |set|
fix_numbers(set)
end
end
private
def fix_numbers(set)
set_code = set["code"]
set_name = set["name"]
cards = cards_by_set[set_code]
case set_code
when "van"
cards.sort_by{|c| c["multiverseid"]}.each_with_index{|c,i| c["number"] = "#{i+1}"}
when "hop", "arc", "pc2", "pca", "e01"
cards.each do |card|
unless (card["types"] & ["Plane", "Phenomenon", "Scheme"]).empty?
card["number"] = (1000 + card["number"].to_i).to_s
end
end
when "bfz", "ogw"
# No idea if this is correct
basic_land_cards = cards.select{|c| (c["supertypes"]||[]) .include?("Basic") }
basic_land_cards = basic_land_cards.sort_by{|c| [c["number"], c["multiverseid"]]}
basic_land_cards.each_slice(2) do |a,b|
raise unless a["number"] == b["number"]
b["number"] += "A"
end
when "ust"
cards_with_variants = %W[3 12 41 49 54 67 82 98 103 113 145 147 165]
variant_counter = {}
cards.each do |card_data|
number = card_data["number"]
next unless cards_with_variants.include?(number)
variant_counter[number] = variant_counter[number] ? variant_counter[number].next : "A"
card_data["number"] = number + variant_counter[number]
end
when "me4"
# Gatherer numbers use same number for 4 alt art variants of each Urza's land
# add A B C D to them
cards.group_by{|c| c["number"]}.each do |number, variants|
next if variants.size == 1
variants.sort_by{|c| c["multiverseid"]}.each_with_index do |card, i|
card["number"] += "ABCD"[i]
end
end
# These are somewhat silly orders
when "s00", "rqs"
cards
.sort_by{|c| [c["name"], c["multiverseid"]] }
.each_with_index{|c,i| c["number"] = "#{i+1}"}
end
end
end
| 33.237288 | 94 | 0.594085 |
1c4de201351da52bf38d35dbb9eecfccafd18647 | 1,142 | class Api::V1::Widget::ConversationsController < Api::V1::Widget::BaseController
include Events::Types
protect_from_forgery with: :null_session
def index
@conversation = conversation
end
def update_last_seen
head :ok && return if conversation.nil?
conversation.contact_last_seen_at = DateTime.now.utc
conversation.save!
head :ok
end
def transcript
if permitted_params[:email].present? && conversation.present?
ConversationReplyMailer.conversation_transcript(
conversation,
permitted_params[:email]
)&.deliver_later
end
head :ok
end
def toggle_typing
head :ok && return if conversation.nil?
case permitted_params[:typing_status]
when 'on'
trigger_typing_event(CONVERSATION_TYPING_ON)
when 'off'
trigger_typing_event(CONVERSATION_TYPING_OFF)
end
head :ok
end
private
def trigger_typing_event(event)
Rails.configuration.dispatcher.dispatch(event, Time.zone.now, conversation: conversation, user: @contact)
end
def permitted_params
params.permit(:id, :typing_status, :website_token, :email)
end
end
| 22.392157 | 109 | 0.718914 |
1c527601dbc0741bc19bfc5e32ec72156a63b441 | 1,994 | # frozen_string_literal: true
namespace :pgdump_scrambler do
default_config_path = 'config/pgdump_scrambler.yml'
desc 'create config from database'
task config_from_db: :environment do
config =
if File.exist?(default_config_path)
puts "#{default_config_path} found!\nmerge existing config with config from database"
PgdumpScrambler::Config
.read_file(default_config_path)
.update_with(PgdumpScrambler::Config.from_db)
else
puts "craete config from database"
PgdumpScrambler::Config.from_db
end
config.write_file(default_config_path)
end
desc 'check if new columns exist'
task check: :environment do
config = PgdumpScrambler::Config
.read_file(default_config_path)
.update_with(PgdumpScrambler::Config.from_db)
unspecified_columns = config.unspecified_columns
count = unspecified_columns.sum { |_, columns| columns.size }
if count > 0
unspecified_columns.each_key do |table_name|
puts "#{table_name}:"
unspecified_columns[table_name].each do |column_name|
puts " #{column_name}"
end
end
puts "#{count} unspecified columns found!"
exit 1
else
puts "No unspecified columns found."
end
end
desc 'create scrambled dump'
task dump: :environment do
config = PgdumpScrambler::Config.read_file(default_config_path)
PgdumpScrambler::Dumper.new(config).run
end
desc 'upload to s3'
task s3_upload: :environment do
config = PgdumpScrambler::Config.read_file(default_config_path)
uploader = PgdumpScrambler::S3Uploader.new(
s3_path: File.join(config.resolved_s3['prefix'], File::basename(config.dump_path)),
local_path: config.dump_path,
region: config.resolved_s3['region'],
bucket: config.resolved_s3['bucket'],
access_key_id: config.resolved_s3['access_key_id'],
secret_key: config.resolved_s3['secret_key']
)
uploader.run
end
end | 33.233333 | 93 | 0.700602 |
e970cbe068400f960e323c1e739e1ef0725503a0 | 2,459 | # Small Worlds Discord Bot
require 'discordrb'
require 'yaml'
# Constants
require './rolesconfig.rb'
# App files
require './app/admin.rb'
require './app/bearing_plotter.rb'
require './app/context.rb'
require './app/countdown.rb'
require './app/custom_commands.rb'
require './app/deaths.rb'
require './app/emoji_role.rb'
require './app/gravity.rb'
require './app/lists.rb'
require './app/max_jump.rb'
require './app/move.rb'
require './app/roleplay.rb'
require './app/roles.rb'
require './app/tableflip.rb'
require './app/voice.rb'
require './app/utility.rb'
require './app/waterworldlogging.rb'
settings = YAML.load_file('botconfig.yml')
if settings['token'].nil? || settings['client_id'].nil?
if ENV['TOKEN'].nil? || ENV['CLIENTID'].nil?
puts '`token` and `client_id` are required! Please copy `botconfig.yml.example` to `botconfig.yml` and edit it with your credentials.'
exit
else
settings['token'] = ENV['TOKEN']
settings['client_id'] = ENV['CLIENTID']
end
end
settings['prefix_char'] ||= '&'
case settings['log_mode'].downcase
when 'debug'
log_mode = :debug
when 'verbose'
log_mode = :verbose
when 'quiet'
log_mode = :quiet
when 'silent'
log_mode = :silent
else
log_mode = :normal
end
bot = Discordrb::Commands::CommandBot.new token: settings['token'],
client_id: settings['client_id'],
prefix: settings['prefix_char'],
advanced_functionality: false,
log_mode: log_mode,
command_doesnt_exist_message: "%command% is not a valid command! Use &help to get a list of current commands.",
no_permission_message: "Error! Insufficient permissions to use command."
bot.bucket :memes, limit: 3, time_span: 60, delay: 10
bot.bucket :slowdown, limit: 2, time_span: 2, delay: 300
puts "Invite URL is #{bot.invite_url}"
# Load all bot modules
bot.include! Admin
bot.include! BearingPlotter
bot.include! Countdown
bot.include! CustomCommands
bot.include! Deaths
bot.include! EmojiRole
bot.include! GravityCalculator
bot.include! Listing
bot.include! MaxJumpCalculator
bot.include! Move
bot.include! NoContext
bot.include! PublicRoles
bot.include! Roleplay
bot.include! TableFlip
bot.include! Voice
bot.include! Utility
bot.include! WaterWorld
bot.run unless ARGV[0] == "test"
| 28.929412 | 153 | 0.661244 |
03acda5f16b6b0fff1e7e0950b57efec5522b7ab | 252 | require 'spec_helper'
describe Virtus::Coercion::Integer, '.to_float' do
subject { object.to_float(fixnum) }
let(:object) { described_class }
let(:fixnum) { 1 }
it { should be_instance_of(Float) }
it { should eql(1.0) }
end
| 19.384615 | 50 | 0.638889 |
5d6805a991acef8bca9e9360fc69fd83c8ae91e4 | 94 | # NOTE: here for Bundler to auto-load the gem unless :require => false
require 'arjdbc/hsqldb' | 47 | 70 | 0.755319 |
ed95af364cfef83300d2d49a05871b49d468d783 | 1,521 | require "helper"
require "bundles/inspec-supermarket/target"
require "bundles/inspec-supermarket/api"
describe "Inspec::Fetcher" do
it "loads the local fetcher for this file" do
res = Inspec::Fetcher::Registry.resolve(__FILE__)
_(res).must_be_kind_of Inspec::Fetcher::Local
end
describe "without a source specified" do
let(:mock_open) do
m = Minitest::Mock.new
m.expect :meta, { "content-type" => "application/gzip" }
m.expect :read, "fake content"
m
end
before do
Supermarket::API.expects(:exist?).returns(true)
Supermarket::API.expects(:find).returns({ "tool_source_url" => "http://mock-url" })
end
it "defaults to supermarket if only a name is given" do
res = Inspec::Fetcher::Registry.resolve({ name: "mock/test-profile" })
res.expects(:open).returns(mock_open)
_(res).must_be_kind_of Inspec::Fetcher::Url
_(res.resolved_source[:url]).must_equal("http://mock-url")
end
it "ignores keys that might have come along for the ride" do
res = Inspec::Fetcher::Registry.resolve({ name: "mock/test-profile", cwd: "/tmp/inspec-test", cache: "ancache", backend: "test-backend" })
_(res).must_be_kind_of Inspec::Fetcher::Url
end
end
it "is able to handle Windows paths" do
# simulate a local windows path
file = __FILE__
file.tr!("/", '\\')
res = Inspec::Fetcher::Registry.resolve(file)
_(res).must_be_kind_of Inspec::Fetcher::Local
_(res.target).must_equal __FILE__
end
end
| 33.065217 | 144 | 0.670611 |
d52a7913fa11c7309770034b305cbffd0baef30e | 1,536 | # -*- encoding: utf-8 -*-
# stub: rspec-expectations 2.14.5 ruby lib
Gem::Specification.new do |s|
s.name = "rspec-expectations"
s.version = "2.14.5"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Steven Baker", "David Chelimsky"]
s.date = "2014-02-01"
s.description = "rspec expectations (should[_not] and matchers)"
s.email = "[email protected]"
s.homepage = "http://github.com/rspec/rspec-expectations"
s.licenses = ["MIT"]
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubyforge_project = "rspec"
s.rubygems_version = "2.1.9"
s.summary = "rspec-expectations-2.14.5"
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<diff-lcs>, ["< 2.0", ">= 1.1.3"])
s.add_development_dependency(%q<rake>, ["~> 10.0.0"])
s.add_development_dependency(%q<cucumber>, ["~> 1.1.9"])
s.add_development_dependency(%q<aruba>, ["~> 0.5"])
else
s.add_dependency(%q<diff-lcs>, ["< 2.0", ">= 1.1.3"])
s.add_dependency(%q<rake>, ["~> 10.0.0"])
s.add_dependency(%q<cucumber>, ["~> 1.1.9"])
s.add_dependency(%q<aruba>, ["~> 0.5"])
end
else
s.add_dependency(%q<diff-lcs>, ["< 2.0", ">= 1.1.3"])
s.add_dependency(%q<rake>, ["~> 10.0.0"])
s.add_dependency(%q<cucumber>, ["~> 1.1.9"])
s.add_dependency(%q<aruba>, ["~> 0.5"])
end
end
| 36.571429 | 105 | 0.617188 |
e24e81e6e68fd8cb4dcc7fb7fe477d650fbbfb8c | 341 | module PViewer
module Web
module Commands
class DisplayScheme < Basic
REQUIRED_ARGS = [
*REQUIRED_ARGS,
:data,
:details
].freeze
ALLOWED_ARGS = ALLOWED_ARGS.merge(
details: Hash,
data: Array
).freeze
end
end
end
end
| 17.947368 | 43 | 0.495601 |
ab054f31ec76101aea7c1ff8fe2acb3336ad9f68 | 10,407 | # frozen_string_literal: true
require 'spec_helper'
# maps AIX release major fact value to the known AIX version
AIX_VERSION = {
'6100': '6.1',
'7100': '7.2',
'7200': '7.2',
}.freeze
def redhat_familly_supported_os
on_supported_os(
supported_os: [
{
'operatingsystem' => 'RedHat',
"operatingsystemrelease": %w[5 6 7 8],
},
],
)
end
describe 'puppet_agent' do
package_version = '6.5.4'
global_params = { package_version: package_version }
def global_facts(facts, os)
facts.merge(
if os =~ %r{sles}
{
is_pe: true,
operatingsystemmajrelease: facts[:operatingsystemrelease].split('.')[0],
}
elsif os =~ %r{solaris}
{
is_pe: true,
}
elsif os =~ %r{aix}
{
is_pe: true,
platform_tag: "aix-#{AIX_VERSION[facts.dig(:os, 'release', 'major')]}-power",
}
elsif os =~ %r{windows}
{
puppet_agent_appdata: 'C:\\ProgramData',
puppet_confdir: 'C:\\ProgramData\\Puppetlabs\\puppet\\etc',
env_temp_variable: 'C:/tmp',
puppet_agent_pid: 42,
puppet_config: 'C:\\puppet.conf',
}
else
{}
end,
).merge(servername: 'master.example.vm')
end
context 'package version' do
context 'valid' do
['5.5.15-1.el7', '5.5.15.el7', '6.0.9.3.g886c5ab'].each do |version|
redhat_familly_supported_os.each do |os, facts|
let(:facts) { global_facts(facts, os) }
context "on #{os}" do
let(:params) { { package_version: version } }
it { is_expected.to compile.with_all_deps }
it { expect { catalogue }.not_to raise_error }
it { is_expected.to contain_class('puppet_agent::prepare').with_package_version(version) }
it { is_expected.to contain_class('puppet_agent::install').with_package_version(version) }
end
end
end
end
context 'invalid' do
['5.5.15x-1.el7', '5.5.15a+a.el7', '6.x0.9.3.g886c5abx'].each do |version|
redhat_familly_supported_os.each do |os, facts|
let(:facts) { global_facts(facts, os) }
let(:params) { { package_version: version } }
context "on #{os}" do
it { expect { catalogue }.to raise_error(%r{invalid version}) }
end
end
end
end
end
context 'supported_operating systems' do
on_supported_os.each do |os, facts|
context "on #{os}" do
let(:facts) do
global_facts(facts, os)
end
context 'when the aio_agent_version fact is undefined' do
let(:facts) do
global_facts(facts, os).merge(aio_agent_version: nil)
end
it { is_expected.not_to compile }
end
if os !~ %r{sles|solaris|aix}
context 'package_version is undef by default' do
let(:facts) do
global_facts(facts, os).merge(is_pe: false)
end
it { is_expected.to contain_class('puppet_agent').with_package_version(nil) }
end
end
context 'package_version is undef if pe_compiling_server_aio_build is not defined' do
let(:facts) do
global_facts(facts, os).merge(is_pe: true)
end
it { is_expected.to contain_class('puppet_agent').with_package_version(nil) }
end
context 'package_version is same as master when set to auto' do
let(:params) { { package_version: 'auto' } }
let(:node_params) { { serverversion: '7.6.5' } }
it { is_expected.to contain_class('puppet_agent::prepare').with_package_version('7.6.5') }
it { is_expected.to contain_class('puppet_agent::install').with_package_version('7.6.5') }
end
end
end
end
context 'supported_operating systems' do
on_supported_os.each do |os, facts|
context "on #{os}" do
let(:facts) do
global_facts(facts, os).merge(is_pe: true)
end
before(:each) do
Puppet::Parser::Functions.newfunction(:pe_build_version, type: :rvalue) { |_args| '2000.0.0' }
Puppet::Parser::Functions.newfunction(:pe_compiling_server_aio_build, type: :rvalue) { |_args| '1.10.100' }
Puppet::Parser::Functions.newfunction(:pe_compiling_server_version, type: :rvalue) { |_args| '2.20.200' }
end
context 'package_version is initialized automatically' do
it { is_expected.to contain_class('puppet_agent').with_package_version(nil) }
end
context 'On a PE infrastructure node puppet_agent does nothing' do
before(:each) do
facts['pe_server_version'] = '2000.0.0'
end
it { is_expected.not_to contain_class('puppet_agent::prepare') }
it { is_expected.not_to contain_class('puppet_agent::install') }
end
end
end
end
context 'supported operating systems' do
on_supported_os.each do |os, facts|
context "on #{os}" do
let(:facts) do
global_facts(facts, os)
end
before(:each) do
if os =~ %r{sles|solaris|aix}
# Need to mock the PE functions
Puppet::Parser::Functions.newfunction(:pe_build_version, type: :rvalue) do |_args|
'2000.0.0'
end
Puppet::Parser::Functions.newfunction(:pe_compiling_server_aio_build, type: :rvalue) do |_args|
'1.10.100'
end
end
end
context 'invalid package_versions' do
['1.3.5banana', '1.2', '10-q-5'].each do |version|
let(:params) { { package_version: version } }
it { expect { catalogue }.to raise_error(%r{invalid version}) }
end
end
context 'valid package_versions' do
['1.4.0.30.g886c5ab', '1.4.0', '1.4.0-10', '1.4.0.10'].each do |version|
let(:params) { { package_version: version } }
it { is_expected.to compile.with_all_deps }
it { expect { catalogue }.not_to raise_error }
end
end
[{}, { service_names: [] }].each do |params|
context "puppet_agent class with install_options with params: #{params}" do
let(:params) do
global_params.merge(
install_options: ['OPTION1=value1', 'OPTION2=value2'],
)
end
let(:expected_package_install_options) do
if os =~ %r{aix}
['--ignoreos', 'OPTION1=value1', 'OPTION2=value2']
else
['OPTION1=value1', 'OPTION2=value2']
end
end
let(:expected_class_install_options) { ['OPTION1=value1', 'OPTION2=value2'] }
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_class('puppet_agent::install').with_install_options(expected_class_install_options) }
if os !~ %r{windows|solaris-10}
it { is_expected.to contain_package('puppet-agent') .with_install_options(expected_package_install_options) }
end
if os =~ %r{solaris-10}
it do
is_expected.to contain_exec('solaris_install script')
.with_command(
'/usr/bin/ctrun -l none /tmp/solaris_install.sh 298 2>&1 > /tmp/solaris_install.log &',
)
end
end
end
end
[{}, { service_names: [] }].each do |params|
context "puppet_agent class without any parameters(params: #{params})" do
let(:params) { params.merge(global_params) }
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_class('puppet_agent') }
it { is_expected.to contain_class('puppet_agent::params') }
it { is_expected.to contain_class('puppet_agent::prepare') }
it { is_expected.to contain_class('puppet_agent::install').that_requires('Class[puppet_agent::prepare]') }
if facts[:osfamily] == 'Debian'
deb_package_version = package_version + '-1' + facts[:lsbdistcodename]
it { is_expected.to contain_package('puppet-agent').with_ensure(deb_package_version) }
elsif facts[:osfamily] == 'Solaris'
if facts[:operatingsystemmajrelease] == '11'
it { is_expected.to contain_package('puppet-agent').with_ensure('6.5.4') }
else
it do
is_expected.to contain_exec('solaris_install script')
.with_command(
'/usr/bin/ctrun -l none /tmp/solaris_install.sh 298 2>&1 > /tmp/solaris_install.log &',
)
end
end
elsif facts[:osfamily] == 'windows'
# Windows does not contain any Package resources
else
it { is_expected.to contain_package('puppet-agent').with_ensure(package_version) }
end
unless os =~ %r{windows}
if os !~ %r{sles|solaris|aix}
it { is_expected.to contain_class('puppet_agent::service').that_requires('Class[puppet_agent::install]') }
end
end
# Windows platform does not use Service resources; their services
# are managed by the MSI installer.
unless facts[:osfamily] == 'windows'
if params[:service_names].nil? && os !~ %r{sles|solaris|aix}
it { is_expected.to contain_service('puppet') }
else
it { is_expected.not_to contain_service('puppet') }
it { is_expected.not_to contain_service('mcollective') }
end
end
end
end
end
end
end
context 'unsupported operating system' do
describe 'puppet_agent class without any parameters on Solaris/Nexenta' do
let(:facts) do
{
osfamily: 'Solaris',
operatingsystem: 'Nexenta',
puppet_ssldir: '/dev/null/ssl',
puppet_config: '/dev/null/puppet.conf',
architecture: 'i386',
}
end
let(:params) { global_params }
it { expect { catalogue }.to raise_error(Puppet::Error, %r{Nexenta not supported}) }
end
end
end
| 34.121311 | 125 | 0.570193 |
ab8b8f7c3f1d1efdbf2265ab869d068f51283785 | 636 | class DjangoCompletion < Formula
desc "Bash completion for Django"
homepage "https://www.djangoproject.com/"
url "https://github.com/django/django/archive/3.2.3.tar.gz"
sha256 "854da43b18bade10f5d0c3a01919fa396037f4630b142ad81aa365295b3bb733"
license "BSD-3-Clause"
head "https://github.com/django/django.git"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
def install
bash_completion.install "extras/django_bash_completion" => "django"
end
test do
assert_match "-F _django_completion",
shell_output("source #{bash_completion}/django && complete -p django-admin.py")
end
end
| 27.652174 | 85 | 0.716981 |
4aab8530903ce43b1fcc9958614c880318cd5cde | 1,814 | # Matt Pruitt
# Ruby Library for working with Vimeo
# Based on the sample PHP Vimeo API
require 'digest/md5'
require 'net/http'
require 'rexml/document'
require File.join(File.dirname(__FILE__), %w[Video])
module RBVIMEO
class Vimeo
attr_accessor :api_key, :api_secret
@@API_REST_URL = "http://www.vimeo.com/api/rest"
@@API_AUTH_URL = "http://www.vimeo.com/services/auth/"
@@API_UPLOAD_URL = "http://www.vimeo.com/services/upload/"
# api_key and api_secret should both be generated on www.vimeo.com
def initialize api_key, api_secret
@api_key = api_key
@api_secret = api_secret
end
# @vimeo.generate_url({"method" => "vimeo.videos.getInfo", "read",
# "video_id" => "339189", "api_key" => @vimeo.api_key})
# This example returns a url to the xml for the Vimeo video with id 339189
def generate_url parameters, permissions = nil
url = @@API_REST_URL + "?api_key=" + @api_key
params = parameters.sort
params.each do |param|
url += "&" + param[0].to_s + "=" + param[1].to_s unless param[0].to_s == "api_key"
end
url += "&api_sig=" + generate_signature(parameters)
return url
end
# parameters is a hash
def generate_signature parameters
temp = ''
params = parameters.sort
params.each do |array|
temp += array[0].to_s + array[1].to_s
end
signature = @api_secret + temp
Digest::MD5.hexdigest(signature)
end
# Provides easier access to RBVIMEO::Video
# video = @vimeo.video 339189
def video id, xml=nil
vid = Video.new(id, self, xml)
return nil if vid.id == -1
return vid
end
# Provides easier access to RBVIMEO::User
# user = @vimeo.user
def user
return User.new
end
end
end | 29.737705 | 90 | 0.634509 |
b9bb686eff4dc319a9874f02e0244f6189bee856 | 8,554 | require 'spectator/clock'
require 'spectator/counter'
require 'spectator/distribution_summary'
require 'spectator/gauge'
require 'spectator/http'
require 'spectator/meter_id'
require 'spectator/timer'
module Spectator
# Registry to manage a set of meters
class Registry
attr_reader :config, :clock, :publisher, :common_tags
# Initialize the registry using the given config, and clock
# The default clock is the SystemClock
# The config is a Hash which should include:
# :common_tags as a hash with tags that will be added to all metrics
# :frequency the interval at which metrics will be sent to an
# aggregator service, expressed in seconds
# :uri the endpoint for the aggregator service
def initialize(config, clock = SystemClock.new)
@config = config
@clock = clock
@meters = {}
@common_tags = to_symbols(config[:common_tags]) || {}
@lock = Mutex.new
@publisher = Publisher.new(self)
end
# Create a new MeterId with the given name, and optional tags
def new_id(name, tags = nil)
MeterId.new(name, tags)
end
# Create or get a Counter with the given id
def counter_with_id(id)
new_meter(id) { |meter_id| Counter.new(meter_id) }
end
# Create or get a Gauge with the given id
def gauge_with_id(id)
new_meter(id) { |meter_id| Gauge.new(meter_id) }
end
# Create or get a DistributionSummary with the given id
def distribution_summary_with_id(id)
new_meter(id) { |meter_id| DistributionSummary.new(meter_id) }
end
# Create or get a Timer with the given id
def timer_with_id(id)
new_meter(id) { |meter_id| Timer.new(meter_id) }
end
# Create or get a Counter with the given name, and optional tags
def counter(name, tags = nil)
counter_with_id(MeterId.new(name, tags))
end
# Create or get a Gauge with the given name, and optional tags
def gauge(name, tags = nil)
gauge_with_id(MeterId.new(name, tags))
end
# Create or get a DistributionSummary with the given name, and optional tags
def distribution_summary(name, tags = nil)
distribution_summary_with_id(MeterId.new(name, tags))
end
# Create or get a Timer with the given name, and optional tags
def timer(name, tags = nil)
timer_with_id(MeterId.new(name, tags))
end
# Get the list of measurements from all registered meters
def measurements
@lock.synchronize do
@meters.values.flat_map(&:measure)
end
end
# Start publishing measurements to the aggregator service
def start
@publisher.start
end
# Stop publishing measurements
def stop
@publisher.stop
end
private
def to_symbols(tags)
return nil if tags.nil?
symbolic_tags = {}
tags.each { |k, v| symbolic_tags[k.to_sym] = v.to_sym }
symbolic_tags
end
def new_meter(meter_id)
@lock.synchronize do
meter = @meters[meter_id.key]
if meter.nil?
meter = yield(meter_id)
@meters[meter_id.key] = meter
end
meter
end
end
end
# Internal class used to publish measurements to an aggregator service
class Publisher
def initialize(registry)
@registry = registry
@started = false
@should_stop = false
@frequency = registry.config[:frequency] || 5
@http = Http.new(registry)
end
def should_start?
if @started
Spectator.logger.info('Ignoring start request. ' \
'Spectator registry already started')
return false
end
@started = true
uri = @registry.config[:uri]
if uri.nil? || uri.empty?
Spectator.logger.info('Ignoring start request since Spectator ' \
'registry has no valid uri')
return false
end
true
end
# Start publishing if the config is acceptable:
# uri is non-nil or empty
def start
return unless should_start?
Spectator.logger.info 'Starting Spectator registry'
@should_stop = false
@publish_thread = Thread.new do
publish
end
end
# Stop publishing measurements
def stop
unless @started
Spectator.logger.info('Attemping to stop Spectator ' \
'without a previous call to start')
return
end
@should_stop = true
Spectator.logger.info('Stopping spectator')
@publish_thread.kill if @publish_thread
@started = false
Spectator.logger.info('Sending last batch of metrics before exiting')
send_metrics_now
end
ADD_OP = 0
MAX_OP = 10
UNKNOWN_OP = -1
OPS = { count: ADD_OP,
totalAmount: ADD_OP,
totalTime: ADD_OP,
totalOfSquares: ADD_OP,
percentile: ADD_OP,
max: MAX_OP,
gauge: MAX_OP,
activeTasks: MAX_OP,
duration: MAX_OP }.freeze
# Get the operation to be used for the given Measure
# Gauges are aggregated using MAX_OP, counters with ADD_OP
def op_for_measurement(measure)
stat = measure.id.tags.fetch(:statistic, :unknown)
OPS.fetch(stat, UNKNOWN_OP)
end
# Gauges are sent if they have a value
# Counters if they have a number of increments greater than 0
def should_send(measure)
op = op_for_measurement(measure)
return measure.value > 0 if op == ADD_OP
return !measure.value.nan? if op == MAX_OP
false
end
# Build a string table from the list of measurements
# Unique words are identified, and assigned a number starting from 0 based
# on their lexicographical order
def build_string_table(measurements)
common_tags = @registry.common_tags
table = {}
common_tags.each do |k, v|
table[k] = 0
table[v] = 0
end
table[:name] = 0
measurements.each do |m|
table[m.id.name] = 0
m.id.tags.each do |k, v|
table[k] = 0
table[v] = 0
end
end
keys = table.keys.sort
keys.each_with_index do |str, index|
table[str] = index
end
table
end
# Add a measurement to our payload table.
# The serialization for a measurement is:
# - length of tags
# - indexes for the tags based on the string table
# - operation (add (0), max (10))
# - floating point value
def append_measurement(payload, table, measure)
op = op_for_measurement(measure)
common_tags = @registry.common_tags
tags = measure.id.tags
len = tags.length + 1 + common_tags.length
payload.push(len)
common_tags.each do |k, v|
payload.push(table[k])
payload.push(table[v])
end
tags.each do |k, v|
payload.push(table[k])
payload.push(table[v])
end
payload.push(table[:name])
payload.push(table[measure.id.name])
payload.push(op)
payload.push(measure.value)
end
# Generate a payload from the list of measurements
# The payload is an array, with the number of elements in the string table
# The string table, and measurements
def payload_for_measurements(measurements)
table = build_string_table(measurements)
payload = []
payload.push(table.length)
strings = table.keys.sort
payload.concat(strings)
measurements.each { |m| append_measurement(payload, table, m) }
payload
end
# Get a list of measurements that should be sent
def registry_measurements
@registry.measurements.select { |m| should_send(m) }
end
# Send the current measurements to our aggregator service
def send_metrics_now
ms = registry_measurements
if ms.empty?
Spectator.logger.debug 'No measurements to send'
else
payload = payload_for_measurements(ms)
uri = @registry.config[:uri]
Spectator.logger.info "Sending #{ms.length} measurements to #{uri}"
@http.post_json(uri, payload)
end
end
# Publish loop:
# send measurements to the aggregator endpoint ':uri',
# every ':frequency' seconds
def publish
clock = @registry.clock
until @should_stop
start = clock.wall_time
Spectator.logger.info 'Publishing'
send_metrics_now
elapsed = clock.wall_time - start
sleep @frequency - elapsed if elapsed < @frequency
end
Spectator.logger.info 'Stopping publishing thread'
end
end
end
| 28.608696 | 80 | 0.638649 |
5dd8b1ac04d0a024409c28d6be85784accb4e1cf | 556 | require 'spec_helper'
describe 'classroom::course::virtual::intro' do
context "applied to master" do
let(:pre_condition) {
"service { 'pe-puppetserver':
ensure => running,
}" + GLOBAL_PRE
}
let(:node) { 'master.puppetlabs.vm' }
let(:facts) { {
:servername => 'master.puppetlabs.vm'
} }
it { should compile }
end
context "applied to agent" do
let(:node) { 'agent.puppetlabs.vm' }
let(:facts) { {
:servername => 'master.puppetlabs.vm'
} }
it { should compile }
end
end
| 19.857143 | 47 | 0.579137 |
183c87af7f067dcad5276c489e41c36fa8fdd18b | 7,641 | # This file is a tool extension to simply stored. Its intention is to not
# change anything of an existing implementation, but to hugely speedup existing
# implementations. I will illustrate this given the example relation types:
# Person has_many posts and belongs to a group, Post has many comments, Comment belongs to Writer
# If for a reason you have one page where you want to display all of these objects
# (Person, Post, Comment) you can ofcourse create a view returning all these
# objects with a smart key for some handy selection. This will probably end up
# in a controller implementation:
# @persons = view_result.select{|r| r.is_a?(Person)}
# @posts = view_result.select{|r| r.is_a?(Post)}
# @comments = view_result.select{|r| r.is_a?(Comment)}
# @writers = view_result.select{|r| r.is_a?(Writer)}
# This probably is the recommended way of solving problems in most cases, but sometimes
# because you are lazy or some other obscure reason, you want to use the standard
# SimplyStored behaviour, but not wait too long. For example, if I have a list of 40 persons,
# all having 5 posts that all have 10 comments belonging to a writer, getting all these
# through their standard relations:
# @persons.each{ |person| person.posts.each{ |post| post.comments.each{ |comment| puts person.group.name + comment.writer.name } } }
# This will result in 40 * 5 * 10 + 40 = 2040 queries to the database. Doing exactly the same thing using this script
# will look like:
# @persons = Person.all.include_relation( :group, posts: { conmments: :writer } )
# The useless script above will not take 2040 queries but:
# 1 (persons) + 1 (group) + 1 (posts) + 1 (comments) + 1 (write) = 5 queries
# This makes a difference.
# Issues:
# * Supported relation types:
# * has_many
# * belongs_to
# * belongs_to relations, that have no value (nil) will be queried again.
# That would make the calculation above: 5 + number of persons without a group + number of comments without a writer
# * Little test coverage
class Array
def include_relation(*relations_arg)
return self if empty?
relations = {}
database = nil
database = relations_arg.last.delete(:database) if relations_arg.last.is_a?(Hash) and relations_arg.last.has_key?(:database)
database ||= CouchPotato.database
# Make sure relations is a Hash, process up to two levels for recursion
# keys with value nil will not have a followup
relations_arg.each do |arg|
if arg.is_a?(Symbol)
relations[arg] = nil
elsif arg.is_a?(Hash)
arg.each{|k, v| relations[k] = v}
elsif arg.is_a?(Array)
arg.each do |v|
if arg.is_a?(Symbol)
relations[v] = nil
elsif arg.is_a?(Hash)
arg.each{|k, v| relations[k] = v}
end
end
end
end
# For now, assume an array of only one datatype
klass = first.class
relations.each do |relation, followup|
property = klass.properties.find{|p| p.name == relation}
unless property
warn "Attempt to include_relations #{relation} on #{klass.name} but does not have supporting relation", uplevel: 1
next
end
case property
when SimplyStored::Couch::HasMany::Property then
other_class = property.options[:class_name].constantize
other_property = other_class.properties.find{|p| p.is_a?(SimplyStored::Couch::BelongsTo::Property) && p.options[:class_name] == klass.name}
#TODO riase when soft_delete is enabled
view_name = "by_#{other_property.name}_id"
raise "Cannot include has_many relation #{other_class.name.underscore.pluralize} on #{klass.name} when view :#{view_name}, key: :#{other_property.name}_id is not defined on #{other_class.name}" unless other_class.views[view_name].present?
relation_objects = other_class.database.view(other_class.send(view_name, keys: collect(&:id))) #not working yet
if followup # deeper nested including
case followup
when Hash
then relation_objects.include_relation(followup.merge(database: database))
else
relation_objects.include_relation(*(Array.wrap(followup) + [{database: database}]))
end
end
for obj in self
found_relation_objects = relation_objects.select{|r| r.send("#{other_property.name}_id") == obj.id}
# Make sure every object has a cached value, no more loading is done
obj.instance_variable_set("@#{relation}", {all: []}) unless obj.instance_variable_get("@#{relation}").try('[]', :all)
if found_relation_objects.any?
obj.instance_variable_get("@#{relation}")[:all] |= found_relation_objects
if reverse_property_name = other_class.properties.find{|p| p.is_a?(SimplyStored::Couch::BelongsTo::Property) && p.options[:class_name] == klass.name }.try(:name)
found_relation_objects.each{|relation_object| relation_object.instance_variable_set("@#{reverse_property_name}", obj)}
end
end
end
when SimplyStored::Couch::BelongsTo::Property then
key = "#{relation}_id"
# Collect keys for all objects
keys = []
each do |obj|
next unless obj.is_a?(SimplyStored::Couch) && obj.respond_to?(key)
keys << obj.send(key)
end
# Get from the database
relation_objects = database.couchrest_database.bulk_load(keys.compact.uniq)
relation_objects = Array.wrap(relation_objects['rows']).map{|r| r['doc']}.compact if relation_objects.is_a?(Hash)
relation_objects ||= [] # Ensure array datatype
if followup # deeper nested including
case followup
when Hash
then relation_objects.include_relation(followup.merge(database: database))
else
relation_objects.include_relation(*(Array.wrap(followup) + [{database: database}]))
end
end
# Set to attributes
each do |obj|
obj.instance_variable_set("@#{relation}", relation_objects.find{|o| o.id == obj.send(key)})
end
when SimplyStored::Couch::HasAndBelongsToMany::Property
if property.options[:storing_keys]
key = "#{relation.to_s.singularize}_ids"
# Collect relation ids for all objects
relation_ids = []
each do |obj|
next unless obj.is_a?(SimplyStored::Couch) && obj.respond_to?(key) && obj.send(key).present?
relation_ids += obj.send(key)
end
# Create unique list of ids, this will optimize stuff and synchronize the object ids
relation_ids = relation_ids.flatten.compact.uniq
# Get from the database
relation_objects = database.couchrest_database.bulk_load(relation_ids)
relation_objects = Array.wrap(relation_objects['rows']).map{|r| r['doc']}.compact if relation_objects.is_a?(Hash)
relation_objects ||= [] # Ensure array datatype
each do |obj|
obj.instance_variable_set("@#{relation}", {all: relation_objects.select{|o| Array.wrap(obj.send(key)).include?(o.id)}})
end
if followup # deeper nested including
case followup
when Hash
then relation_objects.include_relation(followup.merge(database: database))
else
relation_objects.include_relation(*(Array.wrap(followup) + [{database: database}]))
end
end
end
end
end
self
end
# Alias method as plural form
def include_relations(*args)
include_relation(*args)
end
end
| 47.75625 | 246 | 0.664573 |
33b3f7eca00eccc56c1454090bef2df03d62ed01 | 1,251 | require 'rails_helper'
describe Reports::SpUserQuotasReport do
subject { described_class.new }
let(:issuer) { 'foo' }
let(:app_id) { 'app_id' }
it 'is empty' do
expect(subject.call).to eq('[]')
end
it 'runs correctly if the current month is before the fiscal start month of October' do
expect_report_to_run_correctly_for_fiscal_start_year_month_day(2019, 9, 1)
end
it 'runs correctly if the current month is after the fiscal start month of October' do
expect_report_to_run_correctly_for_fiscal_start_year_month_day(2019, 11, 1)
end
def expect_report_to_run_correctly_for_fiscal_start_year_month_day(year, month, day)
ServiceProvider.create(issuer: issuer, friendly_name: issuer, app_id: app_id)
ServiceProviderIdentity.create(user_id: 1, service_provider: issuer, uuid: 'foo1')
ServiceProviderIdentity.create(user_id: 2, service_provider: issuer, uuid: 'foo2')
ServiceProviderIdentity.create(
user_id: 3, service_provider: issuer, uuid: 'foo3',
verified_at: Time.zone.now
)
results = [{ issuer: issuer, app_id: app_id, ial2_total: 1, percent_ial2_quota: 0 }].to_json
Timecop.travel Date.new(year, month, day) do
expect(subject.call).to eq(results)
end
end
end
| 34.75 | 96 | 0.740208 |
7aa5c29c4a56043e5d2a2b23261742c1053de44a | 1,156 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/healthcare_v1/service.rb'
require 'google/apis/healthcare_v1/classes.rb'
require 'google/apis/healthcare_v1/representations.rb'
module Google
module Apis
# Cloud Healthcare API
#
# Manage, store, and access healthcare data in Google Cloud Platform.
#
# @see https://cloud.google.com/healthcare
module HealthcareV1
VERSION = 'V1'
REVISION = '20200515'
# View and manage your data across Google Cloud Platform services
AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform'
end
end
end
| 33.028571 | 76 | 0.737889 |
384ec83a4af6d3637eda326e4c7eff714016be69 | 11,843 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/spanner/admin/instance/v1/spanner_instance_admin_pb"
require "google/spanner/admin/instance/v1/spanner_instance_admin_services_pb"
require "google/cloud/spanner/admin/instance/v1/instance_admin"
class ::Google::Cloud::Spanner::Admin::Instance::V1::InstanceAdmin::OperationsTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_list_operations
# Create GRPC objects.
grpc_response = ::Google::Longrunning::ListOperationsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
filter = "hello world"
page_size = 42
page_token = "hello world"
list_operations_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_operations, name
assert_kind_of ::Google::Longrunning::ListOperationsRequest, request
assert_equal "hello world", request["name"]
assert_equal "hello world", request["filter"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_operations_client_stub do
# Create client
client = ::Google::Cloud::Spanner::Admin::Instance::V1::InstanceAdmin::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_operations({ name: name, filter: filter, page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_operations name: name, filter: filter, page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_operations ::Google::Longrunning::ListOperationsRequest.new(name: name, filter: filter, page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_operations({ name: name, filter: filter, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_operations ::Google::Longrunning::ListOperationsRequest.new(name: name, filter: filter, page_size: page_size, page_token: page_token), grpc_options do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_operations_client_stub.call_rpc_count
end
end
def test_get_operation
# Create GRPC objects.
grpc_response = ::Google::Longrunning::Operation.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_operation, name
assert_kind_of ::Google::Longrunning::GetOperationRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_operation_client_stub do
# Create client
client = ::Google::Cloud::Spanner::Admin::Instance::V1::InstanceAdmin::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_operation({ name: name }) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_operation name: name do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_operation ::Google::Longrunning::GetOperationRequest.new(name: name) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_operation({ name: name }, grpc_options) do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_operation ::Google::Longrunning::GetOperationRequest.new(name: name), grpc_options do |response, operation|
assert_kind_of Gapic::Operation, response
assert_equal grpc_response, response.grpc_op
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_operation_client_stub.call_rpc_count
end
end
def test_delete_operation
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
delete_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_operation, name
assert_kind_of ::Google::Longrunning::DeleteOperationRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_operation_client_stub do
# Create client
client = ::Google::Cloud::Spanner::Admin::Instance::V1::InstanceAdmin::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_operation({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_operation name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_operation ::Google::Longrunning::DeleteOperationRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_operation({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_operation ::Google::Longrunning::DeleteOperationRequest.new(name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_operation_client_stub.call_rpc_count
end
end
def test_cancel_operation
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
cancel_operation_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :cancel_operation, name
assert_kind_of ::Google::Longrunning::CancelOperationRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, cancel_operation_client_stub do
# Create client
client = ::Google::Cloud::Spanner::Admin::Instance::V1::InstanceAdmin::Operations.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.cancel_operation({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.cancel_operation name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.cancel_operation ::Google::Longrunning::CancelOperationRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.cancel_operation({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.cancel_operation ::Google::Longrunning::CancelOperationRequest.new(name: name), grpc_options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, cancel_operation_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::Spanner::Admin::Instance::V1::InstanceAdmin::Operations.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::Spanner::Admin::Instance::V1::InstanceAdmin::Operations::Configuration, config
end
end
| 37.477848 | 190 | 0.717132 |
f8a0d7206217dc0ca6c9ff23f108de7e7d53e030 | 193 | Rails.application.routes.draw do
root "home#index"
resources :reviews
devise_for :users, controllers: {
registrations: "users/registrations",
sessions: "users/sessions"
}
end
| 17.545455 | 41 | 0.715026 |
11614ce4045c937bc06b3560820e8acbb8775915 | 50 | require "log_format/engine"
module LogFormat
end
| 10 | 27 | 0.82 |
ed41ff7a0fae08cc8ea216a64bb20d3fdd2781e5 | 20,193 | require 'spec_helper'
describe Commit do
let(:project) { create(:project, :public, :repository) }
let(:commit) { project.commit }
describe 'modules' do
subject { described_class }
it { is_expected.to include_module(Mentionable) }
it { is_expected.to include_module(Participable) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(StaticModel) }
end
describe '.lazy' do
set(:project) { create(:project, :repository) }
context 'when the commits are found' do
let(:oids) do
%w(
498214de67004b1da3d820901307bed2a68a8ef6
c642fe9b8b9f28f9225d7ea953fe14e74748d53b
6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9
048721d90c449b244b7b4c53a9186b04330174ec
281d3a76f31c812dbf48abce82ccf6860adedd81
)
end
subject { oids.map { |oid| described_class.lazy(project, oid) } }
it 'batches requests for commits' do
expect(project.repository).to receive(:commits_by).once.and_call_original
subject.first.title
subject.last.title
end
it 'maintains ordering' do
subject.each_with_index do |commit, i|
expect(commit.id).to eq(oids[i])
end
end
end
context 'when not found' do
it 'returns nil as commit' do
commit = described_class.lazy(project, 'deadbeef').__sync
expect(commit).to be_nil
end
end
end
describe '#author', :request_store do
it 'looks up the author in a case-insensitive way' do
user = create(:user, email: commit.author_email.upcase)
expect(commit.author).to eq(user)
end
it 'caches the author' do
user = create(:user, email: commit.author_email)
expect(commit.author).to eq(user)
key = "Commit:author:#{commit.author_email.downcase}"
expect(Gitlab::SafeRequestStore[key]).to eq(user)
expect(commit.author).to eq(user)
end
context 'using eager loading' do
let!(:alice) { create(:user, email: '[email protected]') }
let!(:bob) { create(:user, email: '[email protected]') }
let(:alice_commit) do
described_class.new(RepoHelpers.sample_commit, project).tap do |c|
c.author_email = '[email protected]'
end
end
let(:bob_commit) do
# The commit for Bob uses one of his alternative Emails, instead of the
# primary one.
described_class.new(RepoHelpers.sample_commit, project).tap do |c|
c.author_email = '[email protected]'
end
end
let(:eve_commit) do
described_class.new(RepoHelpers.sample_commit, project).tap do |c|
c.author_email = '[email protected]'
end
end
let!(:commits) { [alice_commit, bob_commit, eve_commit] }
before do
create(:email, user: bob, email: '[email protected]')
end
it 'executes only two SQL queries' do
recorder = ActiveRecord::QueryRecorder.new do
# Running this first ensures we don't run one query for every
# commit.
commits.each(&:lazy_author)
# This forces the execution of the SQL queries necessary to load the
# data.
commits.each { |c| c.author.try(:id) }
end
expect(recorder.count).to eq(2)
end
it "preloads the authors for Commits matching a user's primary Email" do
commits.each(&:lazy_author)
expect(alice_commit.author).to eq(alice)
end
it "preloads the authors for Commits using a User's alternative Email" do
commits.each(&:lazy_author)
expect(bob_commit.author).to eq(bob)
end
it 'sets the author to Nil if an author could not be found for a Commit' do
commits.each(&:lazy_author)
expect(eve_commit.author).to be_nil
end
it 'does not execute SQL queries once the authors are preloaded' do
commits.each(&:lazy_author)
commits.each { |c| c.author.try(:id) }
recorder = ActiveRecord::QueryRecorder.new do
alice_commit.author
bob_commit.author
eve_commit.author
end
expect(recorder.count).to be_zero
end
end
end
describe '#to_reference' do
let(:project) { create(:project, :repository, path: 'sample-project') }
it 'returns a String reference to the object' do
expect(commit.to_reference).to eq commit.id
end
it 'supports a cross-project reference' do
another_project = build(:project, :repository, name: 'another-project', namespace: project.namespace)
expect(commit.to_reference(another_project)).to eq "sample-project@#{commit.id}"
end
end
describe '#reference_link_text' do
let(:project) { create(:project, :repository, path: 'sample-project') }
it 'returns a String reference to the object' do
expect(commit.reference_link_text).to eq commit.short_id
end
it 'supports a cross-project reference' do
another_project = build(:project, :repository, name: 'another-project', namespace: project.namespace)
expect(commit.reference_link_text(another_project)).to eq "sample-project@#{commit.short_id}"
end
end
describe '#title' do
it "returns no_commit_message when safe_message is blank" do
allow(commit).to receive(:safe_message).and_return('')
expect(commit.title).to eq("--no commit message")
end
it 'truncates a message without a newline at natural break to 80 characters' do
message = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec sodales id felis id blandit. Vivamus egestas lacinia lacus, sed rutrum mauris.'
allow(commit).to receive(:safe_message).and_return(message)
expect(commit.title).to eq('Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec sodales id felis…')
end
it "truncates a message with a newline before 80 characters at the newline" do
message = commit.safe_message.split(" ").first
allow(commit).to receive(:safe_message).and_return(message + "\n" + message)
expect(commit.title).to eq(message)
end
it "does not truncates a message with a newline after 80 but less 100 characters" do
message = <<eos
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec sodales id felis id blandit.
Vivamus egestas lacinia lacus, sed rutrum mauris.
eos
allow(commit).to receive(:safe_message).and_return(message)
expect(commit.title).to eq(message.split("\n").first)
end
end
describe '#full_title' do
it "returns no_commit_message when safe_message is blank" do
allow(commit).to receive(:safe_message).and_return('')
expect(commit.full_title).to eq("--no commit message")
end
it "returns entire message if there is no newline" do
message = 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec sodales id felis id blandit. Vivamus egestas lacinia lacus, sed rutrum mauris.'
allow(commit).to receive(:safe_message).and_return(message)
expect(commit.full_title).to eq(message)
end
it "returns first line of message if there is a newLine" do
message = commit.safe_message.split(" ").first
allow(commit).to receive(:safe_message).and_return(message + "\n" + message)
expect(commit.full_title).to eq(message)
end
end
describe 'description' do
it 'returns no_commit_message when safe_message is blank' do
allow(commit).to receive(:safe_message).and_return(nil)
expect(commit.description).to eq('--no commit message')
end
it 'returns description of commit message if title less than 100 characters' do
message = <<eos
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec sodales id felis id blandit.
Vivamus egestas lacinia lacus, sed rutrum mauris.
eos
allow(commit).to receive(:safe_message).and_return(message)
expect(commit.description).to eq('Vivamus egestas lacinia lacus, sed rutrum mauris.')
end
it 'returns full commit message if commit title more than 100 characters' do
message = <<eos
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec sodales id felis id blandit. Vivamus egestas lacinia lacus, sed rutrum mauris.
Vivamus egestas lacinia lacus, sed rutrum mauris.
eos
allow(commit).to receive(:safe_message).and_return(message)
expect(commit.description).to eq(message)
end
end
describe "delegation" do
subject { commit }
it { is_expected.to respond_to(:message) }
it { is_expected.to respond_to(:authored_date) }
it { is_expected.to respond_to(:committed_date) }
it { is_expected.to respond_to(:committer_email) }
it { is_expected.to respond_to(:author_email) }
it { is_expected.to respond_to(:parents) }
it { is_expected.to respond_to(:date) }
it { is_expected.to respond_to(:diffs) }
it { is_expected.to respond_to(:id) }
end
describe '#closes_issues' do
let(:issue) { create :issue, project: project }
let(:other_project) { create(:project, :public) }
let(:other_issue) { create :issue, project: other_project }
let(:committer) { create :user }
before do
project.add_developer(committer)
other_project.add_developer(committer)
end
it 'detects issues that this commit is marked as closing' do
ext_ref = "#{other_project.full_path}##{other_issue.iid}"
allow(commit).to receive_messages(
safe_message: "Fixes ##{issue.iid} and #{ext_ref}",
committer_email: committer.email
)
expect(commit.closes_issues).to include(issue)
expect(commit.closes_issues).to include(other_issue)
end
end
it_behaves_like 'a mentionable' do
subject { create(:project, :repository).commit }
let(:author) { create(:user, email: subject.author_email) }
let(:backref_text) { "commit #{subject.id}" }
let(:set_mentionable_text) do
->(txt) { allow(subject).to receive(:safe_message).and_return(txt) }
end
# Include the subject in the repository stub.
let(:extra_commits) { [subject] }
end
describe '#hook_attrs' do
let(:data) { commit.hook_attrs(with_changed_files: true) }
it { expect(data).to be_a(Hash) }
it { expect(data[:message]).to include('adds bar folder and branch-test text file to check Repository merged_to_root_ref method') }
it { expect(data[:timestamp]).to eq('2016-09-27T14:37:46Z') }
it { expect(data[:added]).to contain_exactly("bar/branch-test.txt") }
it { expect(data[:modified]).to eq([]) }
it { expect(data[:removed]).to eq([]) }
end
describe '#cherry_pick_message' do
let(:user) { create(:user) }
context 'of a regular commit' do
let(:commit) { project.commit('video') }
it { expect(commit.cherry_pick_message(user)).to include("\n\n(cherry picked from commit 88790590ed1337ab189bccaa355f068481c90bec)") }
end
context 'of a merge commit' do
let(:repository) { project.repository }
let(:merge_request) do
create(:merge_request,
source_branch: 'video',
target_branch: 'master',
source_project: project,
author: user)
end
let(:merge_commit) do
merge_commit_id = repository.merge(user,
merge_request.diff_head_sha,
merge_request,
'Test message')
repository.commit(merge_commit_id)
end
context 'that is found' do
before do
# Artificially mark as completed.
merge_request.update(merge_commit_sha: merge_commit.id)
end
it do
expected_appended_text = <<~STR.rstrip
(cherry picked from commit #{merge_commit.sha})
467dc98f Add new 'videos' directory
88790590 Upload new video file
STR
expect(merge_commit.cherry_pick_message(user)).to include(expected_appended_text)
end
end
context "that is existing but not found" do
it 'does not include details of the merged commits' do
expect(merge_commit.cherry_pick_message(user)).to end_with("(cherry picked from commit #{merge_commit.sha})")
end
end
end
end
describe '#reverts_commit?' do
let(:another_commit) { double(:commit, revert_description: "This reverts commit #{commit.sha}") }
let(:user) { commit.author }
it { expect(commit.reverts_commit?(another_commit, user)).to be_falsy }
context 'commit has no description' do
before do
allow(commit).to receive(:description?).and_return(false)
end
it { expect(commit.reverts_commit?(another_commit, user)).to be_falsy }
end
context "another_commit's description does not revert commit" do
before do
allow(commit).to receive(:description).and_return("Foo Bar")
end
it { expect(commit.reverts_commit?(another_commit, user)).to be_falsy }
end
context "another_commit's description reverts commit" do
before do
allow(commit).to receive(:description).and_return("Foo #{another_commit.revert_description} Bar")
end
it { expect(commit.reverts_commit?(another_commit, user)).to be_truthy }
end
context "another_commit's description reverts merged merge request" do
before do
revert_description = "This reverts merge request !foo123"
allow(another_commit).to receive(:revert_description).and_return(revert_description)
allow(commit).to receive(:description).and_return("Foo #{another_commit.revert_description} Bar")
end
it { expect(commit.reverts_commit?(another_commit, user)).to be_truthy }
end
end
describe '#last_pipeline' do
let!(:first_pipeline) do
create(:ci_empty_pipeline,
project: project,
sha: commit.sha,
status: 'success')
end
let!(:second_pipeline) do
create(:ci_empty_pipeline,
project: project,
sha: commit.sha,
status: 'success')
end
it 'returns last pipeline' do
expect(commit.last_pipeline).to eq second_pipeline
end
end
describe '#status' do
context 'without ref argument' do
before do
%w[success failed created pending].each do |status|
create(:ci_empty_pipeline,
project: project,
sha: commit.sha,
status: status)
end
end
it 'gives compound status from latest pipelines' do
expect(commit.status).to eq(Ci::Pipeline.latest_status)
expect(commit.status).to eq('pending')
end
end
context 'when a particular ref is specified' do
let!(:pipeline_from_master) do
create(:ci_empty_pipeline,
project: project,
sha: commit.sha,
ref: 'master',
status: 'failed')
end
let!(:pipeline_from_fix) do
create(:ci_empty_pipeline,
project: project,
sha: commit.sha,
ref: 'fix',
status: 'success')
end
it 'gives pipelines from a particular branch' do
expect(commit.status('master')).to eq(pipeline_from_master.status)
expect(commit.status('fix')).to eq(pipeline_from_fix.status)
end
it 'gives compound status from latest pipelines if ref is nil' do
expect(commit.status(nil)).to eq(pipeline_from_fix.status)
end
end
end
describe '#set_status_for_ref' do
it 'sets the status for a given reference' do
commit.set_status_for_ref('master', 'failed')
expect(commit.status('master')).to eq('failed')
end
end
describe '#participants' do
let(:user1) { build(:user) }
let(:user2) { build(:user) }
let!(:note1) do
create(:note_on_commit,
commit_id: commit.id,
project: project,
note: 'foo')
end
let!(:note2) do
create(:note_on_commit,
commit_id: commit.id,
project: project,
note: 'bar')
end
before do
allow(commit).to receive(:author).and_return(user1)
allow(commit).to receive(:committer).and_return(user2)
end
it 'includes the commit author' do
expect(commit.participants).to include(commit.author)
end
it 'includes the committer' do
expect(commit.participants).to include(commit.committer)
end
it 'includes the authors of the commit notes' do
expect(commit.participants).to include(note1.author, note2.author)
end
end
describe '#uri_type' do
it 'returns the URI type at the given path' do
expect(commit.uri_type('files/html')).to be(:tree)
expect(commit.uri_type('files/images/logo-black.png')).to be(:raw)
expect(project.commit('video').uri_type('files/videos/intro.mp4')).to be(:raw)
expect(commit.uri_type('files/js/application.js')).to be(:blob)
end
it "returns nil if the path doesn't exists" do
expect(commit.uri_type('this/path/doesnt/exist')).to be_nil
expect(commit.uri_type('../path/doesnt/exist')).to be_nil
end
it 'is nil if the path is nil or empty' do
expect(commit.uri_type(nil)).to be_nil
expect(commit.uri_type("")).to be_nil
end
end
describe '.from_hash' do
let(:new_commit) { described_class.from_hash(commit.to_hash, project) }
it 'returns a Commit' do
expect(new_commit).to be_an_instance_of(described_class)
end
it 'wraps a Gitlab::Git::Commit' do
expect(new_commit.raw).to be_an_instance_of(Gitlab::Git::Commit)
end
it 'stores the correct commit fields' do
expect(new_commit.id).to eq(commit.id)
expect(new_commit.message).to eq(commit.message)
end
end
describe '#work_in_progress?' do
['squash! ', 'fixup! ', 'wip: ', 'WIP: ', '[WIP] '].each do |wip_prefix|
it "detects the '#{wip_prefix}' prefix" do
commit.message = "#{wip_prefix}#{commit.message}"
expect(commit).to be_work_in_progress
end
end
it "detects WIP for a commit just saying 'wip'" do
commit.message = "wip"
expect(commit).to be_work_in_progress
end
it "doesn't detect WIP for a commit that begins with 'FIXUP! '" do
commit.message = "FIXUP! #{commit.message}"
expect(commit).not_to be_work_in_progress
end
it "doesn't detect WIP for words starting with WIP" do
commit.message = "Wipout #{commit.message}"
expect(commit).not_to be_work_in_progress
end
end
describe '.valid_hash?' do
it 'checks hash contents' do
expect(described_class.valid_hash?('abcdef01239ABCDEF')).to be true
expect(described_class.valid_hash?("abcdef01239ABCD\nEF")).to be false
expect(described_class.valid_hash?(' abcdef01239ABCDEF ')).to be false
expect(described_class.valid_hash?('Gabcdef01239ABCDEF')).to be false
expect(described_class.valid_hash?('gabcdef01239ABCDEF')).to be false
expect(described_class.valid_hash?('-abcdef01239ABCDEF')).to be false
end
it 'checks hash length' do
expect(described_class.valid_hash?('a' * 6)).to be false
expect(described_class.valid_hash?('a' * 7)).to be true
expect(described_class.valid_hash?('a' * 40)).to be true
expect(described_class.valid_hash?('a' * 41)).to be false
end
end
describe '#merge_requests' do
let!(:project) { create(:project, :repository) }
let!(:merge_request1) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'feature') }
let!(:merge_request2) { create(:merge_request, source_project: project, source_branch: 'merged-target', target_branch: 'feature') }
let(:commit1) { merge_request1.merge_request_diff.commits.last }
let(:commit2) { merge_request1.merge_request_diff.commits.first }
it 'returns merge_requests that introduced that commit' do
expect(commit1.merge_requests).to contain_exactly(merge_request1, merge_request2)
expect(commit2.merge_requests).to contain_exactly(merge_request1)
end
end
end
| 32.727715 | 159 | 0.660377 |
e8fa394aed26c883930f0f51eda00869b47e9531 | 1,479 | # frozen_string_literal: false
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file in README.md and
# CONTRIBUTING.md located at the root of this package.
#
# ----------------------------------------------------------------------------
module GoogleInSpec
module SQL
module Property
class DatabaseInstanceSettingsDatabaseFlags
attr_reader :name
attr_reader :value
def initialize(args = nil, parent_identifier = nil)
return if args.nil?
@parent_identifier = parent_identifier
@name = args['name']
@value = args['value']
end
def to_s
"#{@parent_identifier} DatabaseInstanceSettingsDatabaseFlags"
end
end
class DatabaseInstanceSettingsDatabaseFlagsArray
def self.parse(value, parent_identifier)
return if value.nil?
return DatabaseInstanceSettingsDatabaseFlags.new(value, parent_identifier) unless value.is_a?(::Array)
value.map { |v| DatabaseInstanceSettingsDatabaseFlags.new(v, parent_identifier) }
end
end
end
end
end
| 32.152174 | 112 | 0.555105 |
918e8a4e7b5eaf54925b33b1782a667446425577 | 4,910 | require 'spec_helper'
shared_examples_for 'the first before max_id page' do
it 'returns a default sized array' do
expect(subject.size).to eq(25)
end
it 'returns the appropriate first object' do
expect(subject.first.name).to eq('user100')
end
end
shared_examples_for 'blank page' do
it 'returns an array with size 0' do
expect(subject.size).to eq(0)
end
end
shared_examples_for 'before max_id pagination' do
it 'includes a hash with heys :next_url and next_max_id' do
expect(subject[:next_url]).to include('max_id=76')
expect(subject[:next_url].scan('max_id').length).to eq(1)
expect(subject[:next_max_id]).to eq(76)
end
end
describe Divination::ActiveRecordExtension do
it 'returns no before max_id when there are no records' do
params = User.page(max_id: 0).pagination('http://example.com')
expect(params.has_key?(:next_url)).to be_falsey
expect(params[:next_max_id]).to be_nil
end
end
describe Divination::ActiveRecordExtension do
before do
1.upto(100) {|i| User.create! :name => "user#{'%03d' % i}", :age => (i / 10)}
1.upto(100) {|i| GemDefinedModel.create! :name => "user#{'%03d' % i}", :age => (i / 10)}
1.upto(100) {|i| Device.create! :name => "user#{'%03d' % i}", :age => (i / 10)}
end
[User, Admin, GemDefinedModel, Device].each do |model_class|
context "for #{model_class}" do
describe '#page' do
context 'page 1 <= max_id' do
subject { model_class.page(max_id: 101) }
it_should_behave_like 'the first before max_id page'
end
context 'page 2 <= max_id' do
subject { model_class.page(max_id: 75) }
it 'returns a default sized array' do
expect(subject.size).to eq 25
end
it 'returns the correctly sorted first object' do
expect(subject.first.name).to eq('user075')
end
end
context 'page without an argument' do
subject { model_class.page() }
it_should_behave_like 'the first before max_id page'
end
context 'max_id is empty string' do
subject { model_class.page(max_id: '') }
it_should_behave_like 'the first before max_id page'
end
context 'before max_id page < 0' do
subject { model_class.page(max_id: 0) }
it_should_behave_like 'blank page'
end
context 'max_id is equal to last item in last page' do
subject { model_class.page(max_id: 1) }
it 'returns nil for next_max_id' do
expect(subject.next_max_id).to be_nil
end
end
context 'before max_id page > max page' do
subject { model_class.page(before: 1000) }
it_should_behave_like 'the first before max_id page'
end
describe 'ensure #order_values is preserved' do
subject { model_class.order('id').page() }
it 'ensures scope is preserved' do
expect(subject.order_values.uniq).to eq(["#{model_class.table_name}.id desc"])
end
end
end
describe '#per' do
context 'default page per 5' do
subject { model_class.page.per(5) }
it 'returns array with size 5' do
expect(subject.size).to eq(5)
end
it 'returns the appropriate first object' do
expect(subject.first.name).to eq('user100')
end
end
context "default page per nil (using default)" do
subject { model_class.page.per(nil) }
it 'uses the default per page' do
expect(subject.size).to eq(25)
end
end
end
describe '#next_max_id' do
context 'before max_id 1st page' do
subject { model_class.page }
it 'returns the appropriate next_max_id' do
expect(subject.next_max_id).to eq 76
end
end
context 'before max_id middle page' do
subject { model_class.page(max_id: 50) }
it 'returns the appropriate next_max_id' do
expect(subject.next_max_id).to eq 26
end
end
end
describe '#pagination' do
context 'before max_id' do
subject { model_class.page.pagination('http://example.com') }
it_should_behave_like 'before max_id pagination'
end
context 'before with existing before query param' do
subject { model_class.page(max_id: 101).pagination('http://example.com?max_id=10') }
it_should_behave_like 'before max_id pagination'
end
context 'before with query params' do
subject { model_class.page.pagination('http://example.com?a[]=one&a[]=two') }
it_should_behave_like 'before max_id pagination'
specify { expect(subject[:next_url]).to include('a[]=one&a[]=two') }
end
end
end
end
end
| 30.308642 | 94 | 0.611202 |
ffa71582cac18dc17af7ee8bd0757764c5d76e07 | 1,073 | class Site::ArticlesController < Site::BaseController
def index
query = Article.published.latest
if params.key?( :category )
@category = Category.find_by_slug( params[:category] )
return not_found if @category.nil?
query = query.in( @category )
end
@articles = query.paginate( page: params[:page], per_page: 10 )
end
def show
@article = params.key?( :url ) ? Article.published.find_by_url( params[:url] ) : Article.find( params[:id] )
# for stats module
@primary_content = @article
rescue
not_found and return
end
def update
# if something’s not quite right, pretend not to be here
return not_found unless user_signed_in? && !!request.xhr?
# load the article
article = Article.find( params[:id] )
# attempt to save
if article.update_attributes( params.except( :id ).permit( Article.cms_assignable_attributes ))
render json: article
else
render json: { messages: article.errors, status: 400 }
end
rescue
not_found and return
end
end
| 17.883333 | 112 | 0.657968 |
28c14d537569ea57955eaec750d1985a8670f6a7 | 505 | json.code @code
json.message @message
if @posts.length != 0
json.post @posts.each do |p|
json.pid p.id
json.ptitle p.title
json.pleixing p.leixing
json.passets p.postimgs, :asset
time = p.created_at.strftime("%Y-%m-%d %T").to_s
json.updatetime time
json.uid p.user.id
json.uname p.user.wename
json.uasset p.user.weno
@all_comments = Comment.where(commentable: p).order(updated_at: :desc)
json.sumcomment @all_comments.length
end
end
| 26.578947 | 76 | 0.653465 |
e28672417f9ba515770f3351bd0baf8affaf4a16 | 7,707 | =begin
#MailSlurp API
#MailSlurp is an API for sending and receiving emails from dynamically allocated email addresses. It's designed for developers and QA teams to test applications, process inbound emails, send templated notifications, attachments, and more. ## Resources - [Homepage](https://www.mailslurp.com) - Get an [API KEY](https://app.mailslurp.com/sign-up/) - Generated [SDK Clients](https://www.mailslurp.com/docs/) - [Examples](https://github.com/mailslurp/examples) repository
The version of the OpenAPI document: 6.5.2
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'date'
module MailSlurpClient
class TestNewInboxRulesetOptions
attr_accessor :inbox_ruleset_test_options
attr_accessor :create_inbox_ruleset_options
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'inbox_ruleset_test_options' => :'inboxRulesetTestOptions',
:'create_inbox_ruleset_options' => :'createInboxRulesetOptions'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'inbox_ruleset_test_options' => :'InboxRulesetTestOptions',
:'create_inbox_ruleset_options' => :'CreateInboxRulesetOptions'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `MailSlurpClient::TestNewInboxRulesetOptions` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `MailSlurpClient::TestNewInboxRulesetOptions`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'inbox_ruleset_test_options')
self.inbox_ruleset_test_options = attributes[:'inbox_ruleset_test_options']
end
if attributes.key?(:'create_inbox_ruleset_options')
self.create_inbox_ruleset_options = attributes[:'create_inbox_ruleset_options']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @inbox_ruleset_test_options.nil?
invalid_properties.push('invalid value for "inbox_ruleset_test_options", inbox_ruleset_test_options cannot be nil.')
end
if @create_inbox_ruleset_options.nil?
invalid_properties.push('invalid value for "create_inbox_ruleset_options", create_inbox_ruleset_options cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @inbox_ruleset_test_options.nil?
return false if @create_inbox_ruleset_options.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
inbox_ruleset_test_options == o.inbox_ruleset_test_options &&
create_inbox_ruleset_options == o.create_inbox_ruleset_options
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[inbox_ruleset_test_options, create_inbox_ruleset_options].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
MailSlurpClient.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 34.10177 | 470 | 0.65966 |
387015e602cb616b43ab4fa2956b2118d7bc7159 | 4,921 | require "simplecov"
SimpleCov.start
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = "doc"
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
end
| 47.317308 | 92 | 0.743142 |
39c21467c09db5ceb8d44b50d7718d5012d7a6a4 | 1,143 | #
# Cookbook Name:: nagios
# Recipe:: default
#
# Copyright (C) 2015 Wanelo, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
| 43.961538 | 72 | 0.769904 |
e840a7895cf3fba484711078d515f595d3d92605 | 5,530 | # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Log entries related to a specific work request.
class ContainerEngine::Models::WorkRequestLogEntry
# The description of an action that occurred.
# @return [String]
attr_accessor :message
# The date and time the log entry occurred.
# @return [String]
attr_accessor :timestamp
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'message': :'message',
'timestamp': :'timestamp'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'message': :'String',
'timestamp': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :message The value to assign to the {#message} property
# @option attributes [String] :timestamp The value to assign to the {#timestamp} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.message = attributes[:'message'] if attributes[:'message']
self.timestamp = attributes[:'timestamp'] if attributes[:'timestamp']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
message == other.message &&
timestamp == other.timestamp
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[message, timestamp].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 34.347826 | 245 | 0.667812 |
623f53b2746b1363a35d226d451a3c3351557536 | 23,344 | require "rbs"
module TypeProf
class RBSReader
def initialize
@env, @builtin_env_json = RBSReader.get_builtin_env
end
@builtin_env = @builtin_env_json = nil
def self.get_builtin_env
unless @builtin_env
@builtin_env = RBS::Environment.new
loader = RBS::EnvironmentLoader.new
new_decls = loader.load(env: @builtin_env).map {|decl,| decl }
@builtin_env_json = load_rbs(@builtin_env, new_decls)
end
return @builtin_env.dup, @builtin_env_json
end
def load_builtin
@builtin_env_json
end
def load_library(lib)
loader = RBS::EnvironmentLoader.new(core_root: nil)
loader.add(library: lib)
case lib
when "yaml"
loader.add(library: "pstore")
loader.add(library: "dbm")
end
new_decls = loader.load(env: @env).map {|decl,| decl }
RBSReader.load_rbs(@env, new_decls)
end
def load_path(path)
loader = RBS::EnvironmentLoader.new(core_root: nil)
loader.add(path: path)
new_decls = loader.load(env: @env).map {|decl,| decl }
RBSReader.load_rbs(@env, new_decls)
end
def load_rbs_string(name, content)
buffer = RBS::Buffer.new(name: name, content: content)
new_decls = []
RBS::Parser.parse_signature(buffer).each do |decl|
@env << decl
new_decls << decl
end
RBSReader.load_rbs(@env, new_decls)
end
def self.load_rbs(env, new_decls)
all_env = env.resolve_type_names
resolver = RBS::TypeNameResolver.from_env(all_env)
cur_env = RBS::Environment.new
new_decls.each do |decl|
cur_env << env.resolve_declaration(resolver, decl, outer: [], prefix: RBS::Namespace.root)
end
RBS2JSON.new(all_env, cur_env).dump_json
end
end
class RBS2JSON
def initialize(all_env, cur_env)
@all_env, @cur_env = all_env, cur_env
end
def dump_json
{
classes: conv_classes,
constants: conv_constants,
globals: conv_globals,
}
end
# constant_name = [Symbol]
#
# { constant_name => type }
def conv_constants
constants = {}
@cur_env.constant_decls.each do |name, decl|
klass = conv_type_name(name)
constants[klass] = conv_type(decl.decl.type)
end
constants
end
# gvar_name = Symbol (:$gvar)
#
# { gvar_name => type }
def conv_globals
gvars = {}
@cur_env.global_decls.each do |name, decl|
decl = decl.decl
gvars[name] = conv_type(decl.type)
end
gvars
end
def conv_classes
json = {}
each_class_decl do |name, decls|
klass = conv_type_name(name)
super_class_name, super_class_args = get_super_class(name, decls)
if super_class_name
name = conv_type_name(super_class_name)
type_args = super_class_args.map {|type| conv_type(type) }
superclass = [name, type_args]
end
type_params = nil
modules = { include: [], extend: [], prepend: [] }
methods = {}
attr_methods = {}
ivars = {}
cvars = {}
rbs_sources = {}
visibility = true
decls.each do |decl|
decl = decl.decl
type_params2 = decl.type_params.params.map {|param| [param.name, param.variance] }
raise "inconsistent type parameter declaration" if type_params && type_params != type_params2
type_params = type_params2
decl.members.each do |member|
case member
when RBS::AST::Members::MethodDefinition
name = member.name
method_types = member.types.map do |method_type|
case method_type
when RBS::MethodType then method_type
when :super then raise NotImplementedError
end
end
method_def = conv_method_def(method_types, visibility)
rbs_source = [(member.kind == :singleton ? "self." : "") + member.name.to_s, member.types.map {|type| type.location.source }]
if member.instance?
methods[[false, name]] = method_def
rbs_sources[[false, name]] = rbs_source
end
if member.singleton?
methods[[true, name]] = method_def
rbs_sources[[true, name]] = rbs_source
end
when RBS::AST::Members::AttrReader
ty = conv_type(member.type)
attr_methods[[false, member.name]] = attr_method_def(:reader, member.name, ty, visibility)
when RBS::AST::Members::AttrWriter
ty = conv_type(member.type)
attr_methods[[false, member.name]] = attr_method_def(:writer, member.name, ty, visibility)
when RBS::AST::Members::AttrAccessor
ty = conv_type(member.type)
attr_methods[[false, member.name]] = attr_method_def(:accessor, member.name, ty, visibility)
when RBS::AST::Members::Alias
# XXX: an alias to attr methods?
if member.instance?
method_def = methods[[false, member.old_name]]
methods[[false, member.new_name]] = method_def if method_def
end
if member.singleton?
method_def = methods[[true, member.old_name]]
methods[[true, member.new_name]] = method_def if method_def
end
when RBS::AST::Members::Include
name = member.name
if name.kind == :class
mod = conv_type_name(name)
type_args = member.args.map {|type| conv_type(type) }
modules[:include] << [mod, type_args]
else
# including an interface is not supported yet
end
when RBS::AST::Members::Extend
name = member.name
if name.kind == :class
mod = conv_type_name(name)
type_args = member.args.map {|type| conv_type(type) }
modules[:extend] << [mod, type_args]
else
# extending a module with an interface is not supported yet
end
when RBS::AST::Members::Prepend
name = member.name
if name.kind == :class
mod = conv_type_name(name)
type_args = member.args.map {|type| conv_type(type) }
modules[:prepend] << [mod, type_args]
else
# extending a module with an interface is not supported yet
end
when RBS::AST::Members::InstanceVariable
ivars[member.name] = conv_type(member.type)
when RBS::AST::Members::ClassVariable
cvars[member.name] = conv_type(member.type)
when RBS::AST::Members::Public
visibility = true
when RBS::AST::Members::Private
visibility = false
# The following declarations are ignoreable because they are handled in other level
when RBS::AST::Declarations::Constant
when RBS::AST::Declarations::Alias # type alias
when RBS::AST::Declarations::Class, RBS::AST::Declarations::Module
when RBS::AST::Declarations::Interface
else
warn "Importing #{ member.class.name } is not supported yet"
end
end
end
json[klass] = {
type_params: type_params,
superclass: superclass,
members: {
modules: modules,
methods: methods,
attr_methods: attr_methods,
ivars: ivars,
cvars: cvars,
rbs_sources: rbs_sources,
},
}
end
json
end
def each_class_decl
# topological sort
# * superclasses and modules appear earlier than their subclasses (Object is earlier than String)
# * namespace module appers earlier than its children (Process is earlier than Process::Status)
visited = {}
queue = @cur_env.class_decls.keys.map {|name| [:visit, name] }.reverse
until queue.empty?
event, name = queue.pop
case event
when :visit
if !visited[name]
visited[name] = true
queue << [:new, name]
@all_env.class_decls[name].decls.each do |decl|
decl = decl.decl
next if decl.is_a?(RBS::AST::Declarations::Module)
each_reference(decl) {|name| queue << [:visit, name] }
end
queue << [:visit, name.namespace.to_type_name] if !name.namespace.empty?
end
when :new
decls = @cur_env.class_decls[name]
yield name, decls.decls if decls
end
end
@cur_env.interface_decls.each do |name, decl|
yield name, [decl]
end
end
def each_reference(decl, &blk)
yield decl.name
if decl.super_class
name = decl.super_class.name
else
name = RBS::BuiltinNames::Object.name
end
return if decl.name == RBS::BuiltinNames::BasicObject.name
return if decl.name == name
decls = @all_env.class_decls[name]
if decls
decls.decls.each do |decl|
each_reference(decl.decl, &blk)
end
end
end
def get_super_class(name, decls)
return nil if name == RBS::BuiltinNames::BasicObject.name
decls.each do |decl|
decl = decl.decl
case decl
when RBS::AST::Declarations::Class
super_class = decl.super_class
return super_class.name, super_class.args if super_class
when RBS::AST::Declarations::Module, RBS::AST::Declarations::Interface
return nil
else
raise "unknown declaration: %p" % decl.class
end
end
return RBS::BuiltinNames::Object.name, []
end
def conv_method_def(rbs_method_types, visibility)
sig_rets = rbs_method_types.map do |method_type|
conv_func(method_type.type_params, method_type.type, method_type.block)
end
{
sig_rets: sig_rets,
visibility: visibility,
}
end
def conv_func(type_params, func, block)
blk = block ? conv_block(block) : nil
lead_tys = func.required_positionals.map {|type| conv_type(type.type) }
opt_tys = func.optional_positionals.map {|type| conv_type(type.type) }
rest_ty = func.rest_positionals
rest_ty = conv_type(rest_ty.type) if rest_ty
opt_kw_tys = func.optional_keywords.to_h {|key, type| [key, conv_type(type.type)] }
req_kw_tys = func.required_keywords.to_h {|key, type| [key, conv_type(type.type)] }
rest_kw_ty = func.rest_keywords
rest_kw_ty = conv_type(rest_kw_ty.type) if rest_kw_ty
ret_ty = conv_type(func.return_type)
{
type_params: type_params,
lead_tys: lead_tys,
opt_tys: opt_tys,
rest_ty: rest_ty,
req_kw_tys: req_kw_tys,
opt_kw_tys: opt_kw_tys,
rest_kw_ty: rest_kw_ty,
blk: blk,
ret_ty: ret_ty,
}
end
def attr_method_def(kind, name, ty, visibility)
{
kind: kind,
ivar: name,
ty: ty,
visibility: visibility,
}
end
def conv_block(rbs_block)
type = rbs_block.type
# XXX
raise NotImplementedError unless type.optional_keywords.empty?
raise NotImplementedError unless type.required_keywords.empty?
raise NotImplementedError if type.rest_keywords
req = rbs_block.required
lead_tys = type.required_positionals.map do |type|
conv_type(type.type)
end
opt_tys = type.optional_positionals.map do |type|
conv_type(type.type)
end
ret_ty = conv_type(type.return_type)
[req, lead_tys, opt_tys, ret_ty]
end
def conv_type(ty)
case ty
when RBS::Types::ClassSingleton
[:class, conv_type_name(ty.name)]
when RBS::Types::ClassInstance
klass = conv_type_name(ty.name)
case klass
when [:Array]
raise if ty.args.size != 1
[:array, [:Array], [], conv_type(ty.args.first)]
when [:Hash]
raise if ty.args.size != 2
key, val = ty.args
[:hash, [:Hash], [conv_type(key), conv_type(val)]]
when [:Enumerator]
raise if ty.args.size != 2
[:array, [:Enumerator], [], conv_type(ty.args.first)]
else
if ty.args.empty?
[:instance, klass]
else
[:cell, [:instance, klass], ty.args.map {|ty| conv_type(ty) }]
end
end
when RBS::Types::Bases::Bool then [:bool]
when RBS::Types::Bases::Any then [:any]
when RBS::Types::Bases::Top then [:any]
when RBS::Types::Bases::Void then [:void]
when RBS::Types::Bases::Self then [:self]
when RBS::Types::Bases::Nil then [:nil]
when RBS::Types::Bases::Bottom then [:union, []]
when RBS::Types::Variable then [:var, ty.name]
when RBS::Types::Tuple
tys = ty.types.map {|ty2| conv_type(ty2) }
[:array, [:Array], tys, [:union, []]]
when RBS::Types::Literal
case ty.literal
when Integer then [:int]
when String then [:str]
when true then [:true]
when false then [:false]
when Symbol then [:sym, ty.literal]
else
p ty.literal
raise NotImplementedError
end
when RBS::Types::Alias
alias_decl = @all_env.alias_decls[ty.name]
alias_decl ? conv_type(alias_decl.decl.type) : [:any]
when RBS::Types::Union
[:union, ty.types.map {|ty2| conv_type(ty2) }.compact]
when RBS::Types::Optional
[:optional, conv_type(ty.type)]
when RBS::Types::Interface
# XXX: Currently, only a few builtin interfaces are supported
case ty.to_s
when "::_ToStr" then [:str]
when "::_ToInt" then [:int]
when "::_ToAry[U]" then [:array, [:Array], [], [:var, :U]]
else
[:instance, conv_type_name(ty.name)]
end
when RBS::Types::Bases::Instance then [:any] # XXX: not implemented yet
when RBS::Types::Record
[:hash_record, [:Hash], ty.fields.map {|key, ty| [key, conv_type(ty)] }]
when RBS::Types::Proc
[:proc, conv_func(nil, ty.type, nil)]
else
warn "unknown RBS type: %p" % ty.class
[:any]
end
end
def conv_type_name(name)
name.namespace.path + [name.name]
end
end
class Import
def self.import_builtin(scratch)
Import.new(scratch, scratch.rbs_reader.load_builtin).import
end
def self.import_library(scratch, feature)
begin
json = scratch.rbs_reader.load_library(feature)
rescue RBS::EnvironmentLoader::UnknownLibraryError
return nil
rescue RBS::DuplicatedDeclarationError
return true
end
# need cache?
Import.new(scratch, json).import
end
def self.import_rbs_file(scratch, rbs_path)
rbs_path = Pathname(rbs_path) unless rbs_path.is_a?(Pathname)
Import.new(scratch, scratch.rbs_reader.load_path(rbs_path)).import(true)
end
def self.import_rbs_code(scratch, rbs_name, rbs_code)
Import.new(scratch, scratch.rbs_reader.load_rbs_string(rbs_name, rbs_code)).import(true)
end
def initialize(scratch, json)
@scratch = scratch
@json = json
end
def import(explicit = false)
classes = @json[:classes].map do |classpath, cdef|
type_params = cdef[:type_params]
superclass, superclass_type_args = cdef[:superclass]
members = cdef[:members]
name = classpath.last
superclass = path_to_klass(superclass) if superclass
base_klass = path_to_klass(classpath[0..-2])
klass = @scratch.get_constant(base_klass, name)
if klass.is_a?(Type::Any)
klass = @scratch.new_class(base_klass, name, type_params, superclass, nil)
# There builtin classes are needed to interpret RBS declarations
case classpath
when [:NilClass] then Type::Builtin[:nil] = klass
when [:TrueClass] then Type::Builtin[:true] = klass
when [:FalseClass] then Type::Builtin[:false] = klass
when [:Integer] then Type::Builtin[:int] = klass
when [:String] then Type::Builtin[:str] = klass
when [:Symbol] then Type::Builtin[:sym] = klass
when [:Array] then Type::Builtin[:ary] = klass
when [:Hash] then Type::Builtin[:hash] = klass
when [:Proc] then Type::Builtin[:proc] = klass
end
end
[klass, superclass_type_args, members]
end
classes.each do |klass, superclass_type_args, members|
@scratch.add_superclass_type_args!(klass, superclass_type_args&.map {|ty| conv_type(ty) })
modules = members[:modules]
methods = members[:methods]
attr_methods = members[:attr_methods]
ivars = members[:ivars]
cvars = members[:cvars]
rbs_sources = members[:rbs_sources]
modules.each do |kind, mods|
mods.each do |mod, type_args|
type_args = type_args&.map {|ty| conv_type(ty) }
case kind
when :include
@scratch.mix_module(:after, klass, path_to_klass(mod), type_args, false, nil)
when :extend
@scratch.mix_module(:after, klass, path_to_klass(mod), type_args, true, nil)
when :prepend
@scratch.mix_module(:before, klass, path_to_klass(mod), type_args, false, nil)
end
end
end
methods.each do |(singleton, method_name), mdef|
rbs_source = explicit ? rbs_sources[[singleton, method_name]] : nil
mdef = conv_method_def(method_name, mdef, rbs_source)
@scratch.add_method(klass, method_name, singleton, mdef)
end
attr_methods.each do |(singleton, method_name), mdef|
kind = mdef[:kind]
ivar = mdef[:ivar]
ty = conv_type(mdef[:ty]).remove_type_vars
@scratch.add_attr_method(klass, ivar, :"@#{ ivar }", kind, mdef[:visibility], nil)
@scratch.add_ivar_write!(Type::Instance.new(klass), :"@#{ ivar }", ty, nil)
end
ivars.each do |ivar_name, ty|
ty = conv_type(ty).remove_type_vars
@scratch.add_ivar_write!(Type::Instance.new(klass), ivar_name, ty, nil)
end
cvars.each do |ivar_name, ty|
ty = conv_type(ty).remove_type_vars
@scratch.add_cvar_write!(klass, ivar_name, ty, nil)
end
end
@json[:constants].each do |classpath, value|
base_klass = path_to_klass(classpath[0..-2])
value = conv_type(value).remove_type_vars
@scratch.add_constant(base_klass, classpath[-1], value, nil)
end
@json[:globals].each do |name, ty|
ty = conv_type(ty).remove_type_vars
@scratch.add_gvar_write!(name, ty, nil)
end
true
end
def conv_method_def(method_name, mdef, rbs_source)
sig_rets = mdef[:sig_rets].flat_map do |sig_ret|
conv_func(sig_ret)
end
TypedMethodDef.new(sig_rets, rbs_source, mdef[:visibility])
end
def conv_func(sig_ret)
#type_params = sig_ret[:type_params] # XXX
lead_tys = sig_ret[:lead_tys]
opt_tys = sig_ret[:opt_tys]
rest_ty = sig_ret[:rest_ty]
req_kw_tys = sig_ret[:req_kw_tys]
opt_kw_tys = sig_ret[:opt_kw_tys]
rest_kw_ty = sig_ret[:rest_kw_ty]
blk = sig_ret[:blk]
ret_ty = sig_ret[:ret_ty]
lead_tys = lead_tys.map {|ty| conv_type(ty) }
opt_tys = opt_tys.map {|ty| conv_type(ty) }
rest_ty = conv_type(rest_ty) if rest_ty
kw_tys = []
req_kw_tys.each {|key, ty| kw_tys << [true, key, conv_type(ty)] }
opt_kw_tys.each {|key, ty| kw_tys << [false, key, conv_type(ty)] }
kw_rest_ty = conv_type(rest_kw_ty) if rest_kw_ty
blks = conv_block(blk)
ret_ty = conv_type(ret_ty)
blks.map do |blk|
[MethodSignature.new(lead_tys, opt_tys, rest_ty, [], kw_tys, kw_rest_ty, blk), ret_ty]
end
end
def conv_block(blk)
return [Type.nil] unless blk
req, lead_tys, opt_tys, ret_ty = blk
lead_tys = lead_tys.map {|ty| conv_type(ty) }
opt_tys = opt_tys.map {|ty| conv_type(ty) }
msig = MethodSignature.new(lead_tys, opt_tys, nil, [], {}, nil, Type.nil)
ret_ty = conv_type(ret_ty)
ret = [Type::Proc.new(TypedBlock.new(msig, ret_ty), Type::Builtin[:proc])]
ret << Type.nil unless req
ret
end
def conv_type(ty)
case ty.first
when :class then path_to_klass(ty[1])
when :instance then Type::Instance.new(path_to_klass(ty[1]))
when :cell
Type::Cell.new(Type::Cell::Elements.new(ty[2].map {|ty| conv_type(ty) }), conv_type(ty[1]))
when :any then Type.any
when :void then Type::Void.new
when :nil then Type.nil
when :optional then Type.optional(conv_type(ty[1]))
when :bool then Type.bool
when :self then Type::Var.new(:self)
when :int then Type::Instance.new(Type::Builtin[:int])
when :str then Type::Instance.new(Type::Builtin[:str])
when :sym then Type::Symbol.new(ty.last, Type::Instance.new(Type::Builtin[:sym]))
when :true then Type::Instance.new(Type::Builtin[:true])
when :false then Type::Instance.new(Type::Builtin[:false])
when :array
_, path, lead_tys, rest_ty = ty
lead_tys = lead_tys.map {|ty| conv_type(ty) }
rest_ty = conv_type(rest_ty)
base_type = Type::Instance.new(path_to_klass(path))
Type::Array.new(Type::Array::Elements.new(lead_tys, rest_ty), base_type)
when :hash
_, path, (k, v) = ty
Type.gen_hash(Type::Instance.new(path_to_klass(path))) do |h|
k_ty = conv_type(k)
v_ty = conv_type(v)
h[k_ty] = v_ty
end
when :hash_record
_, path, key_tys = ty
Type.gen_hash(Type::Instance.new(path_to_klass(path))) do |h|
key_tys.each do |key, ty|
k_ty = Type::Symbol.new(key, Type::Instance.new(Type::Builtin[:sym]))
v_ty = conv_type(ty)
h[k_ty] = v_ty
end
end
when :union
tys = ty[1]
Type::Union.new(Utils::Set[*tys.map {|ty2| conv_type(ty2) }], nil).normalize # XXX: Array and Hash support
when :var
Type::Var.new(ty[1])
when :proc
msig, ret_ty = conv_func(ty[1]).first # Currently, RBS Proc does not accept a block, so the size should be always one
Type::Proc.new(TypedBlock.new(msig, ret_ty), Type::Instance.new(Type::Builtin[:proc]))
else
pp ty
raise NotImplementedError
end
end
def path_to_klass(path)
klass = Type::Builtin[:obj]
path.each do |name|
klass = @scratch.get_constant(klass, name)
if klass == Type.any
raise TypeProfError.new("A constant `#{ path.join("::") }' is used but not defined in RBS")
end
end
klass
end
end
end
| 33.588489 | 139 | 0.589273 |
d501764f9f27cd199fafb87d762a419979bfb969 | 529 | class Api::V1::RoomsController < ApplicationController
def create
@room = Room.create(room_params)
if @room.valid?
render json: { room: RoomSerializer.new(@room) }, status: :created
else
render json: { error: 'failed to create room' }, status: :not_acceptable
end
end
def destroy
@room = Room.find(params[:id])
@room.destroy
render json: @room
end
private
def room_params
params.require(:room).permit(:name, :user_id, :temp_F, :light, :humidity, :pet_access)
end
end
| 22.041667 | 90 | 0.661626 |
e906d19517c2c3d4d369b4c3005b7a64c3531e0a | 659 | Pod::Spec.new do |s|
s.name = "POP+SnapKit"
s.version = "3.0.2"
s.summary = "Use SnapKit with the Facebook Pop animation framework."
s.homepage = "https://github.com/wxxsw/POP-SnapKit"
s.license = "MIT"
s.author = { "GeSen" => "[email protected]" }
# Swift code is built as dynamic framework by CocoaPods, while dynamic frameworks works on iOS8+
s.platform = :ios, "8.0"
s.source = { :git => "https://github.com/wxxsw/POP-SnapKit.git", :tag => "#{s.version}" }
s.source_files = "Source/**/*"
s.requires_arc = true
s.dependency "pop", "~> 1.0.9"
s.dependency "SnapKit", "~> 3.0.2"
end
| 29.954545 | 98 | 0.582701 |
384ecc3b580abdaf66095e9bf0548cedf2ceecd5 | 47 | # typed: true
case foo; in A[] then true; end
| 11.75 | 31 | 0.638298 |
3967a5956f796d62d2ce1c698fbd08c8220036c1 | 2,851 | module Mandrill
class API
# Blank Slate
instance_methods.each do |m|
undef_method m unless m.to_s =~ /^__|object_id|method_missing|respond_to?|to_s|inspect|kind_of?|should|should_not/
end
# Mandrill API Documentation: http://mandrillapp.com/api/docs
API_VERSION = "1.0"
API_URL = "https://mandrillapp.com/api"
AUTH_URL = "https://mandrillapp.com/api-auth/"
# Generate a Mandrill +authorization_url+.
# Returns a URL to redirect users to so that they will be prompted
# to enter their Mandrill username and password to authorize a
# connection between your application and their Mandrill account.
#
# If authorized successfully, a POST request will be sent to the
# +redirect_url+ with a "key" parameter containing the API key for
# that user's Mandrill account. Be sure to store this key somewhere,
# as you will need it to run API requests later.
#
# If authorization fails for some reason, an "error" parameter will
# be present in the POST request, containing an error message.
#
# == Example
#
# redirect_to Mandrill::API.authorization_url("12345","https://example.com/callback")
#
def self.authorization_url(app_id, redirect_url)
"#{AUTH_URL}?id=#{app_id}&redirect_url=#{URI.escape(redirect_url, Regexp.new("[^#{URI::PATTERN::UNRESERVED}]"))}"
end
# Initialize
def initialize(api_key, config = {})
defaults = {
:api_version => API_VERSION,
:format => 'json'
}
@config = defaults.merge(config).freeze
@api_key = api_key
end
# Dynamically find API methods
def method_missing(api_method, *args) # :nodoc:
call(api_method, *args)
if @response.code.to_i == 200
return "PONG!" if @response.body == "\"PONG!\""
@config[:format] == 'json' ? JSON.parse(@response.body) : @response.body
else
raise(API::Error.new(JSON.parse(@response.body)["code"], JSON.parse(@response.body)["message"]))
end
end
# Check the API to see if a method is supported
def respond_to?(api_method, *args) # :nodoc:
call(api_method, *args)
@response.code == 500 ? false : true
end
# Display the supported methods
def public_methods # :nodoc:
[:messages, :senders, :tags, :templates, :urls, :users]
end
# Call the API
def call(api_method, *args)
req_endpoint = "#{API_URL}/#{@config[:api_version]}/#{api_method.to_s}/#{args.first.to_s}.#{@config[:format]}"
req_body = {:key => @api_key}
req_body.merge!(args.last) if args.last.is_a?(Hash)
@response = HTTPI.post(req_endpoint, req_body.to_json.to_s)
end
class Error < StandardError
def initialize(code, message)
super "(#{code}) #{message}"
end
end
end
end | 35.6375 | 120 | 0.641179 |
e9dc2d0aa539a8997e56ab07a713eab48eaddf8a | 1,589 | if ENV["UI_TEST"]
require 'selenium-webdriver'
require 'page-object'
require 'csv'
require 'json'
require_relative '../ui_selenium/pages/cal_central_pages'
Dir[Rails.root.join('spec', 'ui_selenium', 'util', "**.rb")].each { |f| require f }
require_relative '../ui_selenium/pages/api_my_academics_page'
Dir[Rails.root.join('spec', 'ui_selenium', 'pages', "api**.rb")].each { |f| require f }
require_relative '../ui_selenium/pages/splash_page'
require_relative '../ui_selenium/pages/my_dashboard_page'
require_relative '../ui_selenium/pages/my_academics_page'
require_relative '../ui_selenium/pages/my_academics_class_page'
require_relative '../ui_selenium/pages/my_academics_book_list_page'
require_relative '../ui_selenium/pages/my_campus_page'
require_relative '../ui_selenium/pages/my_finances_pages'
require_relative '../ui_selenium/pages/my_finances_landing_page'
require_relative '../ui_selenium/pages/my_finances_details_page'
require_relative '../ui_selenium/pages/my_finances_billing_page'
require_relative '../ui_selenium/pages/my_finances_financial_aid_page'
require_relative '../ui_selenium/pages/my_profile_page'
require_relative '../ui_selenium/pages/my_toolbox_page'
require_relative '../ui_selenium/pages/cal_net_auth_page'
require_relative '../ui_selenium/pages/google_page'
require_relative '../ui_selenium/pages/canvas_page'
Dir[Rails.root.join('spec', 'ui_selenium', 'pages', "canvas**.rb")].each { |f| require f }
Dir[Rails.root.join('spec', 'ui_selenium', 'pages', "**card.rb")].each { |f| require f }
end
| 40.74359 | 92 | 0.757709 |
f8b6daea5a3b90e395bc73a6cd6d8cf6c82aa1e1 | 21,267 | require 'date'
require 'set'
require 'bigdecimal'
require 'bigdecimal/util'
module ActiveRecord
module ConnectionAdapters #:nodoc:
# Abstract representation of an index definition on a table. Instances of
# this type are typically created and returned by methods in database
# adapters. e.g. ActiveRecord::ConnectionAdapters::AbstractMysqlAdapter#indexes
class IndexDefinition < Struct.new(:table, :name, :unique, :columns, :lengths, :orders, :where, :type, :using) #:nodoc:
end
# Abstract representation of a column definition. Instances of this type
# are typically created by methods in TableDefinition, and added to the
# +columns+ attribute of said TableDefinition object, in order to be used
# for generating a number of table creation or table changing SQL statements.
class ColumnDefinition < Struct.new(:name, :type, :limit, :precision, :scale, :default, :null, :first, :after, :primary_key, :sql_type, :cast_type) #:nodoc:
def primary_key?
primary_key || type.to_sym == :primary_key
end
end
class ChangeColumnDefinition < Struct.new(:column, :type, :options) #:nodoc:
end
class ForeignKeyDefinition < Struct.new(:from_table, :to_table, :options) #:nodoc:
def name
options[:name]
end
def column
options[:column]
end
def primary_key
options[:primary_key] || default_primary_key
end
def on_delete
options[:on_delete]
end
def on_update
options[:on_update]
end
def custom_primary_key?
options[:primary_key] != default_primary_key
end
private
def default_primary_key
"id"
end
end
module TimestampDefaultDeprecation # :nodoc:
def emit_warning_if_null_unspecified(options)
return if options.key?(:null)
ActiveSupport::Deprecation.warn(<<-MSG.squish)
`#timestamp` was called without specifying an option for `null`. In Rails 5,
this behavior will change to `null: false`. You should manually specify
`null: true` to prevent the behavior of your existing migrations from changing.
MSG
end
end
# Represents the schema of an SQL table in an abstract way. This class
# provides methods for manipulating the schema representation.
#
# Inside migration files, the +t+ object in +create_table+
# is actually of this type:
#
# class SomeMigration < ActiveRecord::Migration
# def up
# create_table :foo do |t|
# puts t.class # => "ActiveRecord::ConnectionAdapters::TableDefinition"
# end
# end
#
# def down
# ...
# end
# end
#
# The table definitions
# The Columns are stored as a ColumnDefinition in the +columns+ attribute.
class TableDefinition
include TimestampDefaultDeprecation
# An array of ColumnDefinition objects, representing the column changes
# that have been defined.
attr_accessor :indexes
attr_reader :name, :temporary, :options, :as
def initialize(types, name, temporary, options, as = nil)
@columns_hash = {}
@indexes = {}
@native = types
@temporary = temporary
@options = options
@as = as
@name = name
end
def columns; @columns_hash.values; end
# Appends a primary key definition to the table definition.
# Can be called multiple times, but this is probably not a good idea.
def primary_key(name, type = :primary_key, options = {})
column(name, type, options.merge(:primary_key => true))
end
# Returns a ColumnDefinition for the column with name +name+.
def [](name)
@columns_hash[name.to_s]
end
# Instantiates a new column for the table.
# The +type+ parameter is normally one of the migrations native types,
# which is one of the following:
# <tt>:primary_key</tt>, <tt>:string</tt>, <tt>:text</tt>,
# <tt>:integer</tt>, <tt>:float</tt>, <tt>:decimal</tt>,
# <tt>:datetime</tt>, <tt>:timestamp</tt>, <tt>:time</tt>,
# <tt>:date</tt>, <tt>:binary</tt>, <tt>:boolean</tt>.
#
# You may use a type not in this list as long as it is supported by your
# database (for example, "polygon" in MySQL), but this will not be database
# agnostic and should usually be avoided.
#
# Available options are (none of these exists by default):
# * <tt>:limit</tt> -
# Requests a maximum column length. This is number of characters for <tt>:string</tt> and
# <tt>:text</tt> columns and number of bytes for <tt>:binary</tt> and <tt>:integer</tt> columns.
# * <tt>:default</tt> -
# The column's default value. Use nil for NULL.
# * <tt>:null</tt> -
# Allows or disallows +NULL+ values in the column. This option could
# have been named <tt>:null_allowed</tt>.
# * <tt>:precision</tt> -
# Specifies the precision for a <tt>:decimal</tt> column.
# * <tt>:scale</tt> -
# Specifies the scale for a <tt>:decimal</tt> column.
# * <tt>:index</tt> -
# Create an index for the column. Can be either <tt>true</tt> or an options hash.
#
# Note: The precision is the total number of significant digits
# and the scale is the number of digits that can be stored following
# the decimal point. For example, the number 123.45 has a precision of 5
# and a scale of 2. A decimal with a precision of 5 and a scale of 2 can
# range from -999.99 to 999.99.
#
# Please be aware of different RDBMS implementations behavior with
# <tt>:decimal</tt> columns:
# * The SQL standard says the default scale should be 0, <tt>:scale</tt> <=
# <tt>:precision</tt>, and makes no comments about the requirements of
# <tt>:precision</tt>.
# * MySQL: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..30].
# Default is (10,0).
# * PostgreSQL: <tt>:precision</tt> [1..infinity],
# <tt>:scale</tt> [0..infinity]. No default.
# * SQLite2: Any <tt>:precision</tt> and <tt>:scale</tt> may be used.
# Internal storage as strings. No default.
# * SQLite3: No restrictions on <tt>:precision</tt> and <tt>:scale</tt>,
# but the maximum supported <tt>:precision</tt> is 16. No default.
# * Oracle: <tt>:precision</tt> [1..38], <tt>:scale</tt> [-84..127].
# Default is (38,0).
# * DB2: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..62].
# Default unknown.
# * SqlServer?: <tt>:precision</tt> [1..38], <tt>:scale</tt> [0..38].
# Default (38,0).
#
# This method returns <tt>self</tt>.
#
# == Examples
# # Assuming +td+ is an instance of TableDefinition
# td.column(:granted, :boolean)
# # granted BOOLEAN
#
# td.column(:picture, :binary, limit: 2.megabytes)
# # => picture BLOB(2097152)
#
# td.column(:sales_stage, :string, limit: 20, default: 'new', null: false)
# # => sales_stage VARCHAR(20) DEFAULT 'new' NOT NULL
#
# td.column(:bill_gates_money, :decimal, precision: 15, scale: 2)
# # => bill_gates_money DECIMAL(15,2)
#
# td.column(:sensor_reading, :decimal, precision: 30, scale: 20)
# # => sensor_reading DECIMAL(30,20)
#
# # While <tt>:scale</tt> defaults to zero on most databases, it
# # probably wouldn't hurt to include it.
# td.column(:huge_integer, :decimal, precision: 30)
# # => huge_integer DECIMAL(30)
#
# # Defines a column with a database-specific type.
# td.column(:foo, 'polygon')
# # => foo polygon
#
# == Short-hand examples
#
# Instead of calling +column+ directly, you can also work with the short-hand definitions for the default types.
# They use the type as the method name instead of as a parameter and allow for multiple columns to be defined
# in a single statement.
#
# What can be written like this with the regular calls to column:
#
# create_table :products do |t|
# t.column :shop_id, :integer
# t.column :creator_id, :integer
# t.column :item_number, :string
# t.column :name, :string, default: "Untitled"
# t.column :value, :string, default: "Untitled"
# t.column :created_at, :datetime
# t.column :updated_at, :datetime
# end
# add_index :products, :item_number
#
# can also be written as follows using the short-hand:
#
# create_table :products do |t|
# t.integer :shop_id, :creator_id
# t.string :item_number, index: true
# t.string :name, :value, default: "Untitled"
# t.timestamps
# end
#
# There's a short-hand method for each of the type values declared at the top. And then there's
# TableDefinition#timestamps that'll add +created_at+ and +updated_at+ as datetimes.
#
# TableDefinition#references will add an appropriately-named _id column, plus a corresponding _type
# column if the <tt>:polymorphic</tt> option is supplied. If <tt>:polymorphic</tt> is a hash of
# options, these will be used when creating the <tt>_type</tt> column. The <tt>:index</tt> option
# will also create an index, similar to calling <tt>add_index</tt>. So what can be written like this:
#
# create_table :taggings do |t|
# t.integer :tag_id, :tagger_id, :taggable_id
# t.string :tagger_type
# t.string :taggable_type, default: 'Photo'
# end
# add_index :taggings, :tag_id, name: 'index_taggings_on_tag_id'
# add_index :taggings, [:tagger_id, :tagger_type]
#
# Can also be written as follows using references:
#
# create_table :taggings do |t|
# t.references :tag, index: { name: 'index_taggings_on_tag_id' }
# t.references :tagger, polymorphic: true, index: true
# t.references :taggable, polymorphic: { default: 'Photo' }
# end
def column(name, type, options = {})
name = name.to_s
type = type.to_sym
if @columns_hash[name] && @columns_hash[name].primary_key?
raise ArgumentError, "you can't redefine the primary key column '#{name}'. To define a custom primary key, pass { id: false } to create_table."
end
index_options = options.delete(:index)
index(name, index_options.is_a?(Hash) ? index_options : {}) if index_options
@columns_hash[name] = new_column_definition(name, type, options)
self
end
def remove_column(name)
@columns_hash.delete name.to_s
end
[:string, :text, :integer, :bigint, :float, :decimal, :datetime, :timestamp, :time, :date, :binary, :boolean].each do |column_type|
define_method column_type do |*args|
options = args.extract_options!
column_names = args
column_names.each { |name| column(name, column_type, options) }
end
end
# Adds index options to the indexes hash, keyed by column name
# This is primarily used to track indexes that need to be created after the table
#
# index(:account_id, name: 'index_projects_on_account_id')
def index(column_name, options = {})
indexes[column_name] = options
end
# Appends <tt>:datetime</tt> columns <tt>:created_at</tt> and
# <tt>:updated_at</tt> to the table.
def timestamps(*args)
options = args.extract_options!
emit_warning_if_null_unspecified(options)
column(:created_at, :datetime, options)
column(:updated_at, :datetime, options)
end
# Adds a reference. Optionally adds a +type+ column, if <tt>:polymorphic</tt> option is provided.
# <tt>references</tt> and <tt>belongs_to</tt> are acceptable. The reference column will be an +integer+
# by default, the <tt>:type</tt> option can be used to specify a different type.
#
# t.references(:user)
# t.references(:user, type: "string")
# t.belongs_to(:supplier, polymorphic: true)
#
# See SchemaStatements#add_reference
def references(*args)
options = args.extract_options!
polymorphic = options.delete(:polymorphic)
index_options = options.delete(:index)
type = options.delete(:type) || :integer
args.each do |col|
column("#{col}_id", type, options)
column("#{col}_type", :string, polymorphic.is_a?(Hash) ? polymorphic : options) if polymorphic
index(polymorphic ? %w(type id).map { |t| "#{col}_#{t}" } : "#{col}_id", index_options.is_a?(Hash) ? index_options : {}) if index_options
end
end
alias :belongs_to :references
def new_column_definition(name, type, options) # :nodoc:
type = aliased_types(type.to_s, type)
column = create_column_definition name, type
limit = options.fetch(:limit) do
native[type][:limit] if native[type].is_a?(Hash)
end
column.limit = limit
column.precision = options[:precision]
column.scale = options[:scale]
column.default = options[:default]
column.null = options[:null]
column.first = options[:first]
column.after = options[:after]
column.primary_key = type == :primary_key || options[:primary_key]
column
end
private
def create_column_definition(name, type)
ColumnDefinition.new name, type
end
def native
@native
end
def aliased_types(name, fallback)
'timestamp' == name ? :datetime : fallback
end
end
class AlterTable # :nodoc:
attr_reader :adds
attr_reader :foreign_key_adds
attr_reader :foreign_key_drops
def initialize(td)
@td = td
@adds = []
@foreign_key_adds = []
@foreign_key_drops = []
end
def name; @td.name; end
def add_foreign_key(to_table, options)
@foreign_key_adds << ForeignKeyDefinition.new(name, to_table, options)
end
def drop_foreign_key(name)
@foreign_key_drops << name
end
def add_column(name, type, options)
name = name.to_s
type = type.to_sym
@adds << @td.new_column_definition(name, type, options)
end
end
# Represents an SQL table in an abstract way for updating a table.
# Also see TableDefinition and SchemaStatements#create_table
#
# Available transformations are:
#
# change_table :table do |t|
# t.column
# t.index
# t.rename_index
# t.timestamps
# t.change
# t.change_default
# t.rename
# t.references
# t.belongs_to
# t.string
# t.text
# t.integer
# t.float
# t.decimal
# t.datetime
# t.timestamp
# t.time
# t.date
# t.binary
# t.boolean
# t.remove
# t.remove_references
# t.remove_belongs_to
# t.remove_index
# t.remove_timestamps
# end
#
class Table
include TimestampDefaultDeprecation
def initialize(table_name, base)
@table_name = table_name
@base = base
end
# Adds a new column to the named table.
# See TableDefinition#column for details of the options you can use.
#
# ====== Creating a simple column
# t.column(:name, :string)
def column(column_name, type, options = {})
@base.add_column(@table_name, column_name, type, options)
end
# Checks to see if a column exists. See SchemaStatements#column_exists?
def column_exists?(column_name, type = nil, options = {})
@base.column_exists?(@table_name, column_name, type, options)
end
# Adds a new index to the table. +column_name+ can be a single Symbol, or
# an Array of Symbols. See SchemaStatements#add_index
#
# ====== Creating a simple index
# t.index(:name)
# ====== Creating a unique index
# t.index([:branch_id, :party_id], unique: true)
# ====== Creating a named index
# t.index([:branch_id, :party_id], unique: true, name: 'by_branch_party')
def index(column_name, options = {})
@base.add_index(@table_name, column_name, options)
end
# Checks to see if an index exists. See SchemaStatements#index_exists?
def index_exists?(column_name, options = {})
@base.index_exists?(@table_name, column_name, options)
end
# Renames the given index on the table.
#
# t.rename_index(:user_id, :account_id)
def rename_index(index_name, new_index_name)
@base.rename_index(@table_name, index_name, new_index_name)
end
# Adds timestamps (+created_at+ and +updated_at+) columns to the table. See SchemaStatements#add_timestamps
#
# t.timestamps
def timestamps(options = {})
emit_warning_if_null_unspecified(options)
@base.add_timestamps(@table_name, options)
end
# Changes the column's definition according to the new options.
# See TableDefinition#column for details of the options you can use.
#
# t.change(:name, :string, limit: 80)
# t.change(:description, :text)
def change(column_name, type, options = {})
@base.change_column(@table_name, column_name, type, options)
end
# Sets a new default value for a column. See SchemaStatements#change_column_default
#
# t.change_default(:qualification, 'new')
# t.change_default(:authorized, 1)
def change_default(column_name, default)
@base.change_column_default(@table_name, column_name, default)
end
# Removes the column(s) from the table definition.
#
# t.remove(:qualification)
# t.remove(:qualification, :experience)
def remove(*column_names)
@base.remove_columns(@table_name, *column_names)
end
# Removes the given index from the table.
#
# ====== Remove the index_table_name_on_column in the table_name table
# t.remove_index :column
# ====== Remove the index named index_table_name_on_branch_id in the table_name table
# t.remove_index column: :branch_id
# ====== Remove the index named index_table_name_on_branch_id_and_party_id in the table_name table
# t.remove_index column: [:branch_id, :party_id]
# ====== Remove the index named by_branch_party in the table_name table
# t.remove_index name: :by_branch_party
def remove_index(options = {})
@base.remove_index(@table_name, options)
end
# Removes the timestamp columns (+created_at+ and +updated_at+) from the table.
#
# t.remove_timestamps
def remove_timestamps
@base.remove_timestamps(@table_name)
end
# Renames a column.
#
# t.rename(:description, :name)
def rename(column_name, new_column_name)
@base.rename_column(@table_name, column_name, new_column_name)
end
# Adds a reference. Optionally adds a +type+ column, if <tt>:polymorphic</tt> option is provided.
# <tt>references</tt> and <tt>belongs_to</tt> are acceptable. The reference column will be an +integer+
# by default, the <tt>:type</tt> option can be used to specify a different type.
#
# t.references(:user)
# t.references(:user, type: "string")
# t.belongs_to(:supplier, polymorphic: true)
#
# See SchemaStatements#add_reference
def references(*args)
options = args.extract_options!
args.each do |ref_name|
@base.add_reference(@table_name, ref_name, options)
end
end
alias :belongs_to :references
# Removes a reference. Optionally removes a +type+ column.
# <tt>remove_references</tt> and <tt>remove_belongs_to</tt> are acceptable.
#
# t.remove_references(:user)
# t.remove_belongs_to(:supplier, polymorphic: true)
#
# See SchemaStatements#remove_reference
def remove_references(*args)
options = args.extract_options!
args.each do |ref_name|
@base.remove_reference(@table_name, ref_name, options)
end
end
alias :remove_belongs_to :remove_references
# Adds a column or columns of a specified type
#
# t.string(:goat)
# t.string(:goat, :sheep)
[:string, :text, :integer, :float, :decimal, :datetime, :timestamp, :time, :date, :binary, :boolean].each do |column_type|
define_method column_type do |*args|
options = args.extract_options!
args.each do |name|
@base.add_column(@table_name, name, column_type, options)
end
end
end
private
def native
@base.native_database_types
end
end
end
end
| 37.050523 | 160 | 0.614849 |
1d273c2a49f36a1d5af97df1e51c02c7e02e3f30 | 790 | require "spec_helper"
describe HarvesterCore::Modifiers::Truncator do
let(:klass) { HarvesterCore::Modifiers::Truncator }
describe "#initialize" do
it "assigns the original value and the length" do
truncator = klass.new(["Value"], 300)
truncator.original_value.should eq ["Value"]
truncator.length.should eq 300
end
end
describe "modify" do
it "truncates the text to 30 charachters" do
truncator = klass.new(["A string longer than 30 charachters"], 30, "")
truncator.modify.should eq ["A string longer than 30 charac"]
end
it "adds a ommission at the end" do
truncator = klass.new(["A string longer than 30 charachters"], 30, "...")
truncator.modify.should eq ["A string longer than 30 cha..."]
end
end
end | 29.259259 | 79 | 0.670886 |
7a2c2b4ae90c44425afc175dcf00921b9a70b21d | 2,831 | # frozen_string_literal: true
module QA
RSpec.describe 'Verify', :runner, quarantine: {
type: :flaky,
issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/351994'
} do
describe 'Run pipeline with manual jobs' do
let(:project) do
Resource::Project.fabricate_via_api! do |project|
project.name = 'pipeline-with-manual-job'
project.description = 'Project for pipeline with manual job'
end
end
let!(:runner) do
Resource::Runner.fabricate! do |runner|
runner.project = project
runner.name = "qa-runner-#{SecureRandom.hex(3)}"
end
end
let!(:ci_file) do
Resource::Repository::Commit.fabricate_via_api! do |commit|
commit.project = project
commit.commit_message = 'Add .gitlab-ci.yml'
commit.add_files(
[
{
file_path: '.gitlab-ci.yml',
content: <<~YAML
stages:
- Stage1
- Stage2
- Stage3
Prep:
stage: Stage1
script: exit 0
when: manual
Build:
stage: Stage2
needs: ['Prep']
script: exit 0
parallel: 6
Test:
stage: Stage3
needs: ['Build']
script: exit 0
Deploy:
stage: Stage3
needs: ['Test']
script: exit 0
parallel: 6
YAML
}
]
)
end
end
before do
Flow::Login.sign_in
project.visit!
Flow::Pipeline.visit_latest_pipeline(pipeline_condition: 'skipped')
end
after do
runner&.remove_via_api!
project&.remove_via_api!
end
it 'does not leave any job in skipped state', testcase: 'https://gitlab.com/gitlab-org/gitlab/-/quality/test_cases/349158' do
Page::Project::Pipeline::Show.perform do |show|
show.click_job_action('Prep') # Trigger pipeline manually
show.wait_until(max_duration: 300, sleep_interval: 2, reload: false) do
project.pipelines.last[:status] == 'success'
end
aggregate_failures do
expect(show).to have_build('Test', status: :success)
show.click_job_dropdown('Build')
expect(show).not_to have_skipped_job_in_group
show.click_job_dropdown('Build') # Close Build dropdown
show.click_job_dropdown('Deploy')
expect(show).not_to have_skipped_job_in_group
end
end
end
end
end
end
| 28.59596 | 131 | 0.508301 |
26a59fa349035cb6281abedae045e6006dafe5cd | 1,179 | #!/usr/bin/env ruby
#
module MIDIMessage
# this is a helper for converting nibbles and bytes
module TypeConversion
def self.hex_chars_to_numeric_byte_array(nibbles)
nibbles = nibbles.dup
# get rid of last nibble if there's an odd number
# it will be processed later anyway
nibbles.slice!(nibbles.length-2, 1) if nibbles.length.odd?
bytes = []
while !(nibs = nibbles.slice!(0,2)).empty?
byte = (nibs[0].hex << 4) + nibs[1].hex
bytes << byte
end
bytes
end
# convert byte str to byte array
def self.hex_string_to_numeric_byte_array(str)
str = str.dup
bytes = []
until str.eql?("")
bytes << str.slice!(0, 2).hex
end
bytes
end
# converts a string of hex digits to bytes
def self.hex_str_to_hex_chars(str)
str.split(//)
end
def self.numeric_byte_array_to_hex_string(bytes)
bytes.map { |b| s = b.to_s(16); s.length.eql?(1) ? "0#{s}" : s }.join.upcase
end
def self.numeric_byte_to_hex_chars(num)
[((num & 0xF0) >> 4), (num & 0x0F)].map { |n| n.to_s(16) }
end
end
end | 25.085106 | 82 | 0.585242 |
33e4964e33a370f6f23c54592de70b07cb5ffad4 | 645 | User.create!(name: "Example User",
email: "[email protected]",
password: "foobar",
password_confirmation: "foobar",
admin: true)
#オリジナル
User.create!(name: "Toshi",
email: "[email protected]",
password: "foobar2",
password_confirmation: "foobar2")
99.times do |n|
name = Faker::Name.name
email = "example-#{n+1}@railstutorial.org"
password = "password"
User.create!(name: name,
email: email,
password: password,
password_confirmation: password)
end | 32.25 | 48 | 0.527132 |
017e5cb2cea26c666a8be64f8e110c64e5460125 | 180 | # coding: utf-8
module ONIX; module CodeLists
LIST_14 = {
"00" => "Undefined",
"01" => "Sentence case",
"02" => "Title case",
"03" => "All capitals"
}
end; end | 18 | 29 | 0.538889 |
617b3b77e1e8df993f9ca3b395d0d6a203aefcc7 | 188 | class AddAuthSourcesFilter < ActiveRecord::Migration
def self.up
add_column :auth_sources, :filter, :string
end
def self.down
remove_column :auth_sources, :filter
end
end
| 18.8 | 52 | 0.744681 |
33721b9ffe0da7966f640486467bc8e8968edf88 | 36 | module I18n
VERSION = "0.4.2"
end
| 9 | 19 | 0.638889 |
b9f90fc4c62a19c9e6b8f864f171e26bf78178d5 | 1,631 | module BackendClient
class Provider
module Transactions
Transaction = Struct.new(:cinstance, :usage, :timestamp)
def latest_transactions
transactions = account.services.flat_map { |service| ThreeScale::Core::Transaction.load_all(service.backend_id).to_a }
process_transactions(transactions.sort_by(&:timestamp).reverse)
end
private
def process_transactions(transactions)
application_ids = transactions.map(&:application_id)
cinstances = preload_cinstances(application_ids)
services = preload_services(application_ids)
metrics = preload_metrics(services)
transactions.map do |transaction|
Transaction.new(cinstances[transaction.application_id],
process_usage(transaction.usage, metrics),
parse_timestamp(transaction.timestamp))
end
end
def preload_cinstances(application_ids)
account.provided_cinstances.where(application_id: application_ids).index_by(&:application_id)
end
def preload_services(application_ids)
account.services.joins(:application_plans => [:cinstances]).includes(:metrics).where(:cinstances => {:application_id => application_ids})
end
def preload_metrics(services)
services.map(&:metrics).flatten.index_by(&:id)
end
def process_usage(usage, metrics)
usage.map_keys do |metric_id|
metrics[metric_id.to_s.to_i] # usage keys (the metric ids) are actually provided as Symbol by ThreeScale::Core
end
end
end
end
end
| 33.979167 | 145 | 0.676885 |
79684c7b28dbcb6e7342a99d067e8ef7bab6cd6a | 3,793 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "microblogging-app_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.228261 | 102 | 0.757712 |
ed9aa80b7356e7f0dacd38b7a28a4e7010b0da76 | 516 | # frozen_string_literal: true
require 'vcr'
require 'webmock'
VCR.configure do |config|
config.ignore_localhost = true
config.cassette_library_dir = 'spec/fixtures/vcr_cassettes'
config.hook_into :webmock
config.configure_rspec_metadata!
config.allow_http_connections_when_no_cassette = true
config.filter_sensitive_data('<ENCODED_AUTH_HEADER>') do
Base64.strict_encode64("#{ENV.fetch('KLARNA_API_KEY', 'KLARNA_DEFAULT_KEY')}:#{ENV.fetch('KLARNA_API_SECRET', 'KLARNA_DEFAULT_SECRET')}")
end
end
| 30.352941 | 141 | 0.790698 |
289033c71d876b037bc53504b9b2a55e573ad93f | 2,226 | describe AwayCommand do
it "should set IRC connection's AWAY message" do
irc_connection = double(:irc_connection, :authenticated? => true, :registered? => true,
:nick => 'Otto')
expect(irc_connection).to receive(:send_reply).with(/You have been marked as being away/)
expect(irc_connection).to receive(:set_away).with("gone somewhere, brb")
cmd = AwayCommand.new(irc_connection)
cmd.set_data(["gone somewhere, brb"])
expect(cmd).to be_valid
cmd.execute!
end
it "should unset IRC connection's AWAY message" do
irc_connection = double(:irc_connection, :authenticated? => true, :registered? => true,
:nick => 'Otto')
expect(irc_connection).to receive(:send_reply).with(/You are no longer marked as being away/)
expect(irc_connection).to receive(:set_away).with(nil)
cmd = AwayCommand.new(irc_connection)
cmd.set_data([])
expect(cmd).to be_valid
cmd.execute!
end
it "should unset AWAY message with an empty string as well" do
irc_connection = double(:irc_connection, :authenticated? => true, :registered? => true,
:nick => 'Otto')
expect(irc_connection).to receive(:send_reply).with(/You are no longer marked as being away/)
expect(irc_connection).to receive(:set_away).with(nil)
cmd = AwayCommand.new(irc_connection)
cmd.set_data([""]) # empty string instead of empty array
expect(cmd).to be_valid
cmd.execute!
end
it "should set AWAY message even when not authenticated" do
irc_connection = double(:irc_connection, :authenticated? => false, :registered? => true,
:nick => 'Otto')
expect(irc_connection).to receive(:send_reply).with(/You have been marked as being away/)
expect(irc_connection).to receive(:set_away).with("gone somewhere, brb")
cmd = AwayCommand.new(irc_connection)
cmd.set_data(["gone somewhere, brb"])
expect(cmd).to be_valid
cmd.execute!
end
it "shouldn't do anything when not registered" do
irc_connection = double(:irc_connection, :registered? => false,
:nick => 'Otto')
expect(irc_connection).not_to receive(:set_away)
cmd = AwayCommand.new(irc_connection)
cmd.set_data(["gone"])
expect(cmd).not_to be_valid
end
end
| 37.1 | 97 | 0.696765 |
e29115987fd6b40e1c7430782d9b5423a578b2bc | 889 | require 'spec_helper'
describe Weather::Cache do
context 'when cache is implemented' do
before(:all) do
Weather.reset_configuration!
Weather.configuration.set_cache_store(:file_store, 'tmp/cache')
@key = SecureRandom.hex
@timestamp = Time.now
end
it '#write' do
expect( Weather.cache.write(@key, @timestamp, expire: 5.minutes) )
.to be true
end
it '#read' do
expect( Weather.cache.read(@key) )
.to eq @timestamp
end
end
context 'when cache is not implemented' do
before(:all) do
Weather.reset_configuration!
@key = SecureRandom.hex
@timestamp = Time.now
end
it '#write' do
expect( Weather.cache.write(@key, @timestamp, expire: 5.minutes) )
.to be_nil
end
it '#read' do
expect( Weather.cache.read(@key) )
.to be_nil
end
end
end
| 20.674419 | 72 | 0.614173 |
f8a6ca53c0d13f86a91c9562a2fc9b6e1af383c6 | 106 | RSpec.describe Jscop do
it 'has a version number' do
expect(Jscop::VERSION).not_to be nil
end
end
| 17.666667 | 40 | 0.716981 |
5d907e66c42552581b5f63deff42e2591e2aaf80 | 795 | cask 'aliwangwang' do
version '7.03.01-7790'
sha256 '48f400006987ecb3b9f46eaa3add78a4e22304ff15c684ce637a1c087f08f0bd'
# dbison.alicdn.com was verified as official when first introduced to the cask
url "https://dbison.alicdn.com/updates/macww-nosandbox-#{version}.dmg"
name 'Ali Wangwang'
homepage 'https://wangwang.taobao.com'
app 'AliWangwang.app'
uninstall quit: 'com.taobao.aliwangwang'
zap delete: [
'~/Library/Caches/com.taobao.aliwangwang',
'~/Library/Containers/com.taobao.aliwangwang',
'~/Library/Internet Plug-Ins/WangwangPlugin.plugin',
'~/Library/Preferences/com.taobao.aliwangwang.plist',
'~/Library/Saved Application State/com.taobao.aliwangwang.savedState',
]
end
| 36.136364 | 86 | 0.680503 |
08235013eabdf108a59b39ab0ee946851cd8d00a | 1,006 | # frozen_string_literal: true
require 'rails_helper'
require 'mvi/responses/find_candidate'
describe MVI::Responses::Base do
let(:klass) do
Class.new(MVI::Responses::Base) do
mvi_endpoint :PRPA_IN201306UV02
end
end
let(:faraday_response) { instance_double('Faraday::Response') }
let(:body) { File.read('spec/support/mvi/find_candidate_response.xml') }
before(:each) do
allow(faraday_response).to receive(:body) { body }
end
describe '#intialize' do
it 'should be initialized with the correct attrs' do
response = klass.new(faraday_response)
expect(response.code).to eq('AA')
expect(response.query).to_not be_nil
expect(response.original_response).to eq(body)
end
end
describe '#body' do
it 'should invoke the subclass body' do
allow(faraday_response).to receive(:body) { body }
response = klass.new(faraday_response)
expect { response.body }.to raise_error(MVI::Responses::NotImplementedError)
end
end
end
| 28.742857 | 82 | 0.705765 |
79c7baf3b4dd6725a656730c442e1e6a854e5ba0 | 78 | class HomeController < ApplicationController
def index
end
end
| 6.5 | 44 | 0.692308 |
33eb691b173dedc94078e8ecf6a6b90c318b9e1c | 4,162 | # Phusion Passenger - https://www.phusionpassenger.com/
# Copyright (c) 2010-2017 Phusion Holding B.V.
#
# "Passenger", "Phusion Passenger" and "Union Station" are registered
# trademarks of Phusion Holding B.V.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
TEST_BOOST_OXT_LIBRARY = LIBBOOST_OXT
TEST_COMMON_LIBRARY = COMMON_LIBRARY
TEST_COMMON_CFLAGS = "-DTESTING_APPLICATION_POOL"
desc "Run all unit tests and integration tests"
task :test => ['test:oxt', 'test:cxx', 'test:ruby', 'test:node', 'test:integration']
desc "Clean all compiled test files"
task 'test:clean' do
sh("rm -rf #{TEST_OUTPUT_DIR}")
sh("rm -f test/cxx/*.gch")
end
task :clean => 'test:clean'
file "#{TEST_OUTPUT_DIR}allocate_memory" => 'test/support/allocate_memory.c' do
compile_c("#{TEST_OUTPUT_DIR}allocate_memory.o", 'test/support/allocate_memory.c')
create_c_executable("#{TEST_OUTPUT_DIR}allocate_memory", "#{TEST_OUTPUT_DIR}allocate_memory.o")
end
desc "Install developer dependencies"
task 'test:install_deps' do
gem_install = PlatformInfo.gem_command + " install --no-rdoc --no-ri"
gem_install = "#{PlatformInfo.ruby_sudo_command} #{gem_install}" if boolean_option('SUDO')
default = boolean_option('DEVDEPS_DEFAULT', true)
install_base_deps = boolean_option('BASE_DEPS', default)
install_doctools = boolean_option('DOCTOOLS', default)
if deps_target = string_option('DEPS_TARGET')
bundle_args = "--path #{Shellwords.escape deps_target} #{ENV['BUNDLE_ARGS']}".strip
else
bundle_args = ENV['BUNDLE_ARGS'].to_s
end
yarn_args = ENV['YARN_ARGS'].to_s
if !PlatformInfo.locate_ruby_tool('bundle') || bundler_too_old?
sh "#{gem_install} bundler"
end
if install_base_deps && install_doctools
sh "bundle install #{bundle_args} --without="
else
if install_base_deps
sh "bundle install #{bundle_args} --without doc release"
end
if install_doctools
sh "bundle install #{bundle_args} --without base"
end
end
if install_doctools
# workaround for issue "bluecloth not found" when using 1.12.x
sh "#{gem_install} bundler --version 1.11.2"
sh "rvm list"
end
if boolean_option('USH_BUNDLES', default)
# see what is available for Submodule tests just in case Travis CI environment changes
# || true to avoid missing rvm command triggering a failure on Jenkins CI
sh "rvm list || true"
sh "cd src/ruby_supportlib/phusion_passenger/vendor/union_station_hooks_core" \
" && bundle install #{bundle_args} --with travis --without doc notravis"
sh "cd src/ruby_supportlib/phusion_passenger/vendor/union_station_hooks_rails" \
" && bundle install #{bundle_args} --without doc notravis"
sh "cd src/ruby_supportlib/phusion_passenger/vendor/union_station_hooks_rails" \
" && bundle exec rake install_test_app_bundles" \
" BUNDLE_ARGS='#{bundle_args}'"
end
if boolean_option('NODE_MODULES', default)
sh "yarn install #{yarn_args}"
end
end
def bundler_too_old?
`bundle --version` =~ /version (.+)/
version = $1.split('.').map { |x| x.to_i }
version[0] < 1 || version[0] == 1 && version[1] < 10
end
| 39.264151 | 97 | 0.730899 |
6adf2050a8f3c8fe0bb15c60480febd113303d59 | 2,927 | RSpec.shared_examples 'rack examples' do
include Warden::Test::Helpers
let(:endpoint) { 'https://api.airbrake.io/api/v3/projects/113743/notices' }
before do
stub_request(:post, endpoint).to_return(status: 200, body: '')
Airbrake::Config.instance.merge(performance_stats: false)
end
after { Warden.test_reset! }
describe "application routes" do
describe "/index" do
it "successfully returns 200 and body" do
expect(Airbrake).not_to receive(:notify)
get '/'
expect(last_response.status).to eq(200)
expect(last_response.body).to eq('Hello from index')
end
end
describe "/crash" do
it "returns 500 and sends a notice to Airbrake" do
expect(Airbrake).to receive(:notify).with(
an_instance_of(Airbrake::Notice)
) do |notice|
expect(notice[:errors].first[:type]).to eq('AirbrakeTestError')
end
get '/crash'
end
end
end
describe "user payload" do
let(:user) do
OpenStruct.new(
id: 1,
email: '[email protected]',
username: 'qa-dept',
first_name: 'John',
last_name: 'Doe'
)
end
before { login_as(user) }
it "reports user info" do
get '/crash'
sleep 2
body = /
"context":{.*
"user":{
"id":"1",
"name":"John\sDoe",
"username":"qa-dept",
"email":"[email protected]"}
/x
expect(a_request(:post, endpoint).with(body: body))
.to have_been_made.at_least_once
end
end
context "when additional parameters are present" do
before do
get '/crash', nil, 'HTTP_USER_AGENT' => 'Bot', 'HTTP_REFERER' => 'bingo.com'
sleep 2
end
it "contains url" do
body = %r("context":{.*"url":"http://example\.org/crash".*})
expect(a_request(:post, endpoint).with(body: body))
.to have_been_made.at_least_once
end
it "contains hostname" do
body = /"context":{.*"hostname":".+".*}/
expect(a_request(:post, endpoint).with(body: body))
.to have_been_made.at_least_once
end
it "contains userAgent" do
body = /"context":{.*"userAgent":"Bot".*}/
expect(a_request(:post, endpoint).with(body: body))
.to have_been_made.at_least_once
end
it "contains referer" do
body = /"context":{.*"referer":"bingo.com".*}/
expect(a_request(:post, endpoint).with(body: body))
.to have_been_made.at_least_once
end
it "contains HTTP headers" do
body = /"context":{.*"headers":{.*"CONTENT_LENGTH":"0".*}/
expect(a_request(:post, endpoint).with(body: body))
.to have_been_made.at_least_once
end
it "contains HTTP method" do
body = /"context":{.*"httpMethod":"GET".*}/
expect(a_request(:post, endpoint).with(body: body))
.to have_been_made.at_least_once
end
end
end
| 26.369369 | 82 | 0.592415 |
acd24a5a86d68a51874fc63aebb0d54925e633de | 477 | #!C:/Ruby/bin/ruby
require File.dirname(__FILE__) + "/../config/environment" unless defined?(RAILS_ROOT)
# If you're using RubyGems and mod_ruby, this require should be changed to an absolute path one, like:
# "/usr/local/lib/ruby/gems/1.8/gems/rails-0.8.0/lib/dispatcher" -- otherwise performance is severely impaired
require "dispatcher"
ADDITIONAL_LOAD_PATHS.reverse.each { |dir| $:.unshift(dir) if File.directory?(dir) } if defined?(Apache::RubyRun)
Dispatcher.dispatch
| 43.363636 | 113 | 0.75891 |
7aeabb39304b55e982e520249aafa55fed5ba4de | 144 | class Location < ActiveRecord::Base
belongs_to :region
has_many :people
def self.ordered_by_name
Location.all.order(:name)
end
end
| 16 | 35 | 0.743056 |
6afab51529249f4cb25fd34e44b2bae92af0d084 | 2,675 | require 'rails_helper'
describe InternalAttribute, type: :model do
let(:internal_attribute) { InternalAttribute.new }
let(:otu) { FactoryBot.build(:valid_otu) }
let(:predicate) { FactoryBot.create(:valid_controlled_vocabulary_term_predicate) }
context 'validation' do
before(:each) {
internal_attribute.valid?
}
context 'requires' do
specify 'predicate' do
expect(internal_attribute.errors.include?(:predicate)).to be_truthy
end
end
context 'uniqueness' do
let!(:da1) { InternalAttribute.create!(predicate: predicate, value: '1234', attribute_subject: otu) }
specify 'same predicate, same value, is not allowed' do
expect( InternalAttribute.new(predicate: predicate, value: '1234', attribute_subject: otu).valid?).to be_falsey
end
specify 'same predicate, different values are allowed' do
expect( InternalAttribute.create!(predicate: predicate, value: '4567', attribute_subject: otu)).to be_truthy
end
specify 'different predicate, same value are allowed' do
expect( InternalAttribute.create!(predicate: FactoryBot.create(:valid_predicate), value: '1234', attribute_subject: otu)).to be_truthy
end
end
end
specify 'a valid record can be created' do
expect(InternalAttribute.create(predicate: predicate, value: '1234', attribute_subject: otu)).to be_truthy
end
specify 'non-persisted data attribute with non-persisted predicate' do
internal_attribute.value = '1234'
otu
internal_attribute.attribute_subject = otu
new_predicate = FactoryBot.build(:valid_predicate)
internal_attribute.predicate = new_predicate
[new_predicate, internal_attribute].each {|o| o.save!}
end
specify '#predicate returns' do
i = InternalAttribute.create(predicate: predicate, value: '1234', attribute_subject: otu)
expect(i.predicate).to eq(predicate)
i.reload
expect(i.predicate).to eq(predicate)
end
specify 'a valid record can be created with reference to superclass' do
i = DataAttribute.create(
controlled_vocabulary_term_id: predicate.id,
type: 'InternalAttribute',
value: '1234', attribute_subject: otu)
expect(i.valid?).to be(true)
end
specify 'a valid record can be created with reference to superclass' do
i = DataAttribute.create!(
controlled_vocabulary_term_id: predicate.id,
type: 'InternalAttribute',
value: '1234', attribute_subject: otu
)
expect(i.valid?).to be(true)
o = DataAttribute.find(i.id)
expect(o.predicate).to eq(i.predicate)
expect(o.predicate).to eq(predicate)
end
end
| 33.860759 | 142 | 0.703925 |
7957943b7d463741392cbc4cd5a6a3a73061508b | 1,991 | class Mosquitto < Formula
desc "Message broker implementing the MQTT protocol"
homepage "https://mosquitto.org/"
url "https://mosquitto.org/files/source/mosquitto-1.5.2.tar.gz"
sha256 "7e90ccfe95179cfe6bf7d7f725281dd83041f241a8f093c9d3883b926584de9c"
bottle do
sha256 "9f8a48f1841e761956a31628e8aff3ea38524756f7ddf7d916d5a4f340bfc4e4" => :mojave
sha256 "5bc4c7b72153aeeb107e4af15620ad99b5cc553e35ae77b640fa9c21082a1b7a" => :high_sierra
sha256 "cf5c2831ebe5680dc875f42864c805dbc8b2d7b5de35f6f7f626f9af09e50953" => :sierra
sha256 "d746ae9e41a0c61e1dbaddf0eb96215c9587add7a7913d8ec08669ce118ff2f8" => :el_capitan
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "libwebsockets"
depends_on "openssl"
def install
system "cmake", ".", *std_cmake_args, "-DWITH_WEBSOCKETS=ON"
system "make", "install"
end
def post_install
(var/"mosquitto").mkpath
end
def caveats; <<~EOS
mosquitto has been installed with a default configuration file.
You can make changes to the configuration by editing:
#{etc}/mosquitto/mosquitto.conf
EOS
end
plist_options :manual => "mosquitto -c #{HOMEBREW_PREFIX}/etc/mosquitto/mosquitto.conf"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/mosquitto</string>
<string>-c</string>
<string>#{etc}/mosquitto/mosquitto.conf</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<false/>
<key>WorkingDirectory</key>
<string>#{var}/mosquitto</string>
</dict>
</plist>
EOS
end
test do
quiet_system "#{sbin}/mosquitto", "-h"
assert_equal 3, $CHILD_STATUS.exitstatus
end
end
| 30.166667 | 106 | 0.691612 |
bb215d59c45f24f0808259d46fbe4490f7925fa1 | 12,914 | require 'puppet/file_serving/content'
require 'puppet/file_serving/metadata'
require 'puppet/file_serving/terminus_helper'
require 'puppet/http'
module Puppet
# Copy files from a local or remote source. This state *only* does any work
# when the remote file is an actual file; in that case, this state copies
# the file down. If the remote file is a dir or a link or whatever, then
# this state, during retrieval, modifies the appropriate other states
# so that things get taken care of appropriately.
Puppet::Type.type(:file).newparam(:source) do
attr_accessor :source, :local
desc <<-'EOT'
A source file, which will be copied into place on the local system. This
attribute is mutually exclusive with `content` and `target`. Allowed
values are:
* `puppet:` URIs, which point to files in modules or Puppet file server
mount points.
* Fully qualified paths to locally available files (including files on NFS
shares or Windows mapped drives).
* `file:` URIs, which behave the same as local file paths.
* `http:` URIs, which point to files served by common web servers.
The normal form of a `puppet:` URI is:
`puppet:///modules/<MODULE NAME>/<FILE PATH>`
This will fetch a file from a module on the Puppet master (or from a
local module when using Puppet apply). Given a `modulepath` of
`/etc/puppetlabs/code/modules`, the example above would resolve to
`/etc/puppetlabs/code/modules/<MODULE NAME>/files/<FILE PATH>`.
Unlike `content`, the `source` attribute can be used to recursively copy
directories if the `recurse` attribute is set to `true` or `remote`. If
a source directory contains symlinks, use the `links` attribute to
specify whether to recreate links or follow them.
_HTTP_ URIs cannot be used to recursively synchronize whole directory
trees. You cannot use `source_permissions` values other than `ignore`
because HTTP servers do not transfer any metadata that translates to
ownership or permission details.
The `http` source uses the server `Content-MD5` header as a checksum to
determine if the remote file has changed. If the server response does not
include that header, Puppet defaults to using the `Last-Modified` header.
Puppet will update the local file if the header is newer than the modified
time (mtime) of the local file.
Multiple `source` values can be specified as an array, and Puppet will
use the first source that exists. This can be used to serve different
files to different system types:
file { '/etc/nfs.conf':
source => [
"puppet:///modules/nfs/conf.${host}",
"puppet:///modules/nfs/conf.${operatingsystem}",
'puppet:///modules/nfs/conf'
]
}
Alternately, when serving directories recursively, multiple sources can
be combined by setting the `sourceselect` attribute to `all`.
EOT
validate do |sources|
sources = [sources] unless sources.is_a?(Array)
sources.each do |source|
next if Puppet::Util.absolute_path?(source)
begin
uri = URI.parse(Puppet::Util.uri_encode(source))
rescue => detail
self.fail Puppet::Error, "Could not understand source #{source}: #{detail}", detail
end
self.fail "Cannot use relative URLs '#{source}'" unless uri.absolute?
self.fail "Cannot use opaque URLs '#{source}'" unless uri.hierarchical?
unless %w{file puppet http https}.include?(uri.scheme)
self.fail "Cannot use URLs of type '#{uri.scheme}' as source for fileserving"
end
end
end
SEPARATOR_REGEX = [Regexp.escape(File::SEPARATOR.to_s), Regexp.escape(File::ALT_SEPARATOR.to_s)].join
munge do |sources|
sources = [sources] unless sources.is_a?(Array)
sources.map do |source|
source = self.class.normalize(source)
if Puppet::Util.absolute_path?(source)
# CGI.unescape will butcher properly escaped URIs
uri_string = Puppet::Util.path_to_uri(source).to_s
# Ruby 1.9.3 and earlier have a URI bug in URI
# to_s returns an ASCII string despite UTF-8 fragments
# since its escaped its safe to universally call encode
# URI.unescape always returns strings in the original encoding
URI.unescape(uri_string.encode(Encoding::UTF_8))
else
source
end
end
end
def self.normalize(source)
source.sub(/[#{SEPARATOR_REGEX}]+$/, '')
end
def change_to_s(currentvalue, newvalue)
# newvalue = "{md5}#{@metadata.checksum}"
if resource.property(:ensure).retrieve == :absent
return "creating from source #{metadata.source} with contents #{metadata.checksum}"
else
return "replacing from source #{metadata.source} with contents #{metadata.checksum}"
end
end
def checksum
metadata && metadata.checksum
end
# Copy the values from the source to the resource. Yay.
def copy_source_values
devfail "Somehow got asked to copy source values without any metadata" unless metadata
# conditionally copy :checksum
if metadata.ftype != "directory" && !(metadata.ftype == "link" && metadata.links == :manage)
copy_source_value(:checksum)
end
# Take each of the stats and set them as states on the local file
# if a value has not already been provided.
[:owner, :mode, :group].each do |metadata_method|
next if metadata_method == :owner and !Puppet.features.root?
next if metadata_method == :group and !Puppet.features.root?
case resource[:source_permissions]
when :ignore, nil
next
when :use_when_creating
next if Puppet::FileSystem.exist?(resource[:path])
end
copy_source_value(metadata_method)
end
if resource[:ensure] == :absent
# We know all we need to
elsif metadata.ftype != "link"
resource[:ensure] = metadata.ftype
elsif resource[:links] == :follow
resource[:ensure] = :present
else
resource[:ensure] = "link"
resource[:target] = metadata.destination
end
end
attr_writer :metadata
# Provide, and retrieve if necessary, the metadata for this file. Fail
# if we can't find data about this host, and fail if there are any
# problems in our query.
def metadata
@metadata ||= resource.catalog.metadata[resource.title]
return @metadata if @metadata
return nil unless value
value.each do |source|
begin
options = {
:environment => resource.catalog.environment_instance,
:links => resource[:links],
:checksum_type => resource[:checksum],
:source_permissions => resource[:source_permissions]
}
data = Puppet::FileServing::Metadata.indirection.find(source, options)
if data
@metadata = data
@metadata.source = source
break
end
rescue => detail
self.fail Puppet::Error, "Could not retrieve file metadata for #{source}: #{detail}", detail
end
end
self.fail "Could not retrieve information from environment #{resource.catalog.environment} source(s) #{value.join(", ")}" unless @metadata
@metadata
end
def local?
found? and scheme == "file"
end
def full_path
Puppet::Util.uri_to_path(uri) if found?
end
def server?
uri && uri.host && !uri.host.empty?
end
def server
server? ? uri.host : Puppet.settings[:server]
end
def port
(uri and uri.port) or Puppet.settings[:masterport]
end
def uri
@uri ||= URI.parse(Puppet::Util.uri_encode(metadata.source))
end
def write(file)
resource.parameter(:checksum).sum_stream { |sum|
each_chunk_from { |chunk|
sum << chunk
file.print chunk
}
}
end
private
def scheme
(uri and uri.scheme)
end
def found?
! (metadata.nil? or metadata.ftype.nil?)
end
def copy_source_value(metadata_method)
param_name = (metadata_method == :checksum) ? :content : metadata_method
if resource[param_name].nil? or resource[param_name] == :absent
if Puppet::Util::Platform.windows? && [:owner, :group, :mode].include?(metadata_method)
devfail "Should not have tried to use source owner/mode/group on Windows"
end
value = metadata.send(metadata_method)
# Force the mode value in file resources to be a string containing octal.
value = value.to_s(8) if param_name == :mode && value.is_a?(Numeric)
resource[param_name] = value
if (metadata_method == :checksum)
# If copying checksum, also copy checksum_type
resource[:checksum] = metadata.checksum_type
end
end
end
def each_chunk_from(&block)
if Puppet[:default_file_terminus] == :file_server && scheme == 'puppet' && (uri.host.nil? || uri.host.empty?)
chunk_file_from_disk(metadata.path, &block)
elsif local?
chunk_file_from_disk(full_path, &block)
else
chunk_file_from_source(&block)
end
end
def chunk_file_from_disk(local_path)
File.open(local_path, "rb") do |src|
while chunk = src.read(8192) #rubocop:disable Lint/AssignmentInCondition
yield chunk
end
end
end
def get_from_content_uri_source(url, &block)
session = Puppet.lookup(:http_session)
api = session.route_to(:fileserver, url: url)
api.get_static_file_content(
path: URI.unescape(url.path),
environment: resource.catalog.environment_instance.to_s,
code_id: resource.catalog.code_id,
&block
)
end
def get_from_source_uri_source(url, &block)
session = Puppet.lookup(:http_session)
api = session.route_to(:fileserver, url: url)
api.get_file_content(
path: URI.unescape(url.path),
environment: resource.catalog.environment_instance.to_s,
&block
)
end
def get_from_http_source(url, &block)
client = Puppet.runtime['http']
client.get(url) do |response|
raise Puppet::HTTP::ResponseError.new(response) unless response.success?
response.read_body(&block)
end
end
def chunk_file_from_source(&block)
if uri.scheme =~ /^https?/
get_from_http_source(uri, &block)
elsif metadata.content_uri
content_url = URI.parse(Puppet::Util.uri_encode(metadata.content_uri))
get_from_content_uri_source(content_url, &block)
else
get_from_source_uri_source(uri, &block)
end
rescue Puppet::HTTP::ResponseError => e
handle_response_error(e.response)
end
def handle_response_error(response)
message = "Error #{response.code} on SERVER: #{response.body.empty? ? response.reason : response.body}"
raise Net::HTTPError.new(message, response.nethttp)
end
end
Puppet::Type.type(:file).newparam(:source_permissions) do
desc <<-'EOT'
Whether (and how) Puppet should copy owner, group, and mode permissions from
the `source` to `file` resources when the permissions are not explicitly
specified. (In all cases, explicit permissions will take precedence.)
Valid values are `use`, `use_when_creating`, and `ignore`:
* `ignore` (the default) will never apply the owner, group, or mode from
the `source` when managing a file. When creating new files without explicit
permissions, the permissions they receive will depend on platform-specific
behavior. On POSIX, Puppet will use the umask of the user it is running as.
On Windows, Puppet will use the default DACL associated with the user it is
running as.
* `use` will cause Puppet to apply the owner, group,
and mode from the `source` to any files it is managing.
* `use_when_creating` will only apply the owner, group, and mode from the
`source` when creating a file; existing files will not have their permissions
overwritten.
EOT
defaultto :ignore
newvalues(:use, :use_when_creating, :ignore)
munge do |value|
value = value ? value.to_sym : :ignore
if @resource.file && @resource.line && value != :ignore
#TRANSLATORS "source_permissions" is a parameter name and should not be translated
Puppet.puppet_deprecation_warning(_("The `source_permissions` parameter is deprecated. Explicitly set `owner`, `group`, and `mode`."), file: @resource.file, line: @resource.line)
end
value
end
end
end
| 35.872222 | 186 | 0.651928 |
1cf6f85cc56b88306ef37377654e0428dfe5a0f7 | 507 | require 'spec_helper'
describe Nominatim::Polygon do
let(:polygon) { Nominatim::Polygon.new([[-1.816513, 52.5487566], [-1.8164913, 52.548824], [-1.8164685, 52.5488213]]) }
it 'sets coordinates correctly' do
polygon.coordinates.first.lat.should eq -1.816513
polygon.coordinates.first.lon.should eq 52.5487566
end
describe '#coordinates' do
it 'retruns an array of coordinates' do
polygon.coordinates.each do |p|
p.should be_a Nominatim::Point
end
end
end
end | 26.684211 | 120 | 0.692308 |
bf91f0a773800c228e673bced9ede28cbf1403b3 | 381 | #
# hazel/collection_base.rb
# vr 1.0
module Hazel
class CollectionBase
def initialize
@objs = []
end
def add_obj(new_object)
@objs.push(new_object)
return self
end
def rem_obj(new_object)
@objs.delete(new_object)
return self
end
def objs
return @objs
end
def clear
@objs.clear
end
end
end | 12.7 | 30 | 0.593176 |
f7afa6f7466938e8eeaaaae54bf88643d709f41c | 2,713 | require "lib/tag_cloud"
module BlogHelpers
def _get_path(current_page)
"https://developer.mypurecloud.com/blog#{current_page.url}"
end
def email_share_link(current_page)
"mailto:?&subject=#{current_page.metadata[:page][:title]}&body=#{_get_path(current_page)}"
end
def twitter_share_link(current_page)
message = "#{current_page.metadata[:page][:title]} #{_get_path(current_page)} via @PureCloud_Dev"
"https://twitter.com/home?status=#{URI.escape(message)}"
end
def google_share_link(current_page)
"https://plus.google.com/share?url=#{_get_path(current_page)}"
end
def linkedin_share_link(current_page)
"https://www.linkedin.com/shareArticle?mini=true&url=#{_get_path(current_page)}&title=#{URI.escape(current_page.metadata[:page][:title])}&summary=&source="
end
def get_blog_author(email)
allAuthors = data.authors
allAuthors.each do |author|
if
author.imagename = "authorimages/" + email.gsub(/\W/, '') + ".png"
return author if author.email == email
end
end
return nil
end
def tag_cloud(options = {})
[].tap do |html|
TagCloud.new(options).render(blog.tags) do |tag, size, unit|
html << link_to(tag, "/blog" + tag_path(tag), style: "font-size: #{size}#{unit}")
end
end.join(" ")
end
def raise_error(message)
puts message.red
raise message
end
def lint_page(current_page)
#check page for required properties
[:title, :date, :tags, :author].each do |property|
raise_error("Blog page '#{current_page.metadata[:page][:title]}' is missing property #{property}. See 'Required Properties' section of the README") if current_page.metadata[:page][property] == nil
end
#validate author config
author_email = current_page.metadata[:page][:author]
#validate email address
raise_error("Author email address is in an incorrect format" ) unless author_email =~ /\A[\w+\-.]+@[a-z\d\-.]+\.[a-z]+\z/i
author_data = get_blog_author author_email
raise_error("Author data not found in data/authors.yml" ) if author_data == nil
[:email, :name, :bio].each do |property|
raise_error("Author profile is missing property #{property}. See 'Author Bios' section of the README") if author_data[property] == nil
end
raise_error("Author image '#{author_data.imagename}' not found in source/authors" ) unless File.exists?(File.join(File.dirname(__FILE__), "..", 'source/', author_data.imagename))
return
end
end
| 34.341772 | 209 | 0.632879 |
915ab64fa032e5764e174b88b6e03977335fa1ff | 3,375 | require 'msf/core'
###
#
# This class is here to implement advanced features for osx-based
# payloads. OSX payloads are expected to include this module if
# they want to support these features.
#
###
module Msf::Payload::Osx
#
# This mixin is chained within payloads that target the OSX platform.
# It provides special prepends, to support things like chroot and setuid.
#
def initialize(info = {})
ret = super(info)
register_advanced_options(
[
Msf::OptBool.new('PrependSetresuid',
[
false,
"Prepend a stub that executes the setresuid(0, 0, 0) system call",
"false"
]
),
Msf::OptBool.new('PrependSetreuid',
[
false,
"Prepend a stub that executes the setreuid(0, 0) system call",
"false"
]
),
Msf::OptBool.new('PrependSetuid',
[
false,
"Prepend a stub that executes the setuid(0) system call",
"false"
]
),
Msf::OptBool.new('AppendExit',
[
false,
"Append a stub that executes the exit(0) system call",
"false"
]
),
], Msf::Payload::Osx)
ret
end
#
# Overload the generate() call to prefix our stubs
#
def generate(*args)
# Call the real generator to get the payload
buf = super(*args)
pre = ''
app = ''
test_arch = [ *(self.arch) ]
# Handle all x86 code here
if (test_arch.include?(ARCH_X86))
# Prepend
if (datastore['PrependSetresuid'])
# setresuid(0, 0, 0)
pre << "\x31\xc0" +# xorl %eax,%eax #
"\x50" +# pushl %eax #
"\x50" +# pushl %eax #
"\x50" +# pushl %eax #
"\x50" +# pushl %eax #
"\x66\xb8\x37\x01" +# movw $0x0137,%ax #
"\xcd\x80" # int $0x80 #
end
if (datastore['PrependSetreuid'])
# setreuid(0, 0)
pre << "\x31\xc0" +# xorl %eax,%eax #
"\x50" +# pushl %eax #
"\x50" +# pushl %eax #
"\x50" +# pushl %eax #
"\xb0\x7e" +# movb $0x7e,%al #
"\xcd\x80" # int $0x80 #
end
if (datastore['PrependSetuid'])
# setuid(0)
pre << "\x31\xc0" +# xorl %eax,%eax #
"\x50" +# pushl %eax #
"\x50" +# pushl %eax #
"\xb0\x17" +# movb $0x17,%al #
"\xcd\x80" # int $0x80 #
end
# Append
if (datastore['AppendExit'])
# exit(0)
app << "\x31\xc0" +# xorl %eax,%eax #
"\x50" +# pushl %eax #
"\xb0\x01" +# movb $0x01,%al #
"\xcd\x80" # int $0x80 #
end
end
return (pre + buf + app)
end
end
| 28.601695 | 75 | 0.397926 |
e98f0dad5c8afddc0040068f1daf5313d85eaf1b | 9,505 | # encoding: utf-8
require 'test_helper'
# All remote tests require Canada Post development environment credentials
class RemoteCanadaPostPWSPlatformTest < Minitest::Test
include ActiveShipping::Test::Credentials
include ActiveShipping::Test::Fixtures
def setup
@login = credentials(:canada_post_pws_platform).merge(endpoint: "https://ct.soa-gw.canadapost.ca/")
# 100 grams, 93 cm long, 10 cm diameter, cylinders have different volume calculations
# @pkg1 = Package.new(1000, [93,10], :value => 10.00)
@pkg1 = Package.new(10, nil, :value => 10.00)
@pkg2 = Package.new(10, [20.0, 10.0, 1.0], :value => 10.00)
@line_item1 = line_item_fixture
@shipping_opts1 = {:dc => true, :cod => true, :cod_amount => 500.00, :cov => true, :cov_amount => 100.00,
:so => true, :pa18 => true}
@home_params = {
:name => "John Smith",
:company => "test",
:phone => "613-555-1212",
:address1 => "123 Elm St.",
:city => 'Ottawa',
:province => 'ON',
:country => 'CA',
:postal_code => 'K1P 1J1'
}
@home = Location.new(@home_params)
@dom_params = {
:name => "John Smith Sr.",
:company => "",
:phone => '123-123-1234',
:address1 => "5500 Oak Ave",
:city => 'Vancouver',
:province => 'BC',
:country => 'CA',
:postal_code => 'V5J 2T4'
}
@dest_params = {
:name => "Frank White",
:phone => '123-123-1234',
:address1 => '999 Wiltshire Blvd',
:city => 'Beverly Hills',
:state => 'CA',
:country => 'US',
:zip => '90210'
}
@dest = Location.new(@dest_params)
@dom_params = {
:name => "Mrs. Smith",
:company => "",
:phone => "604-555-1212",
:address1 => "5000 Oak St.",
:address2 => "",
:city => 'Vancouver',
:province => 'BC',
:country => 'CA',
:postal_code => 'V5J 2N2'
}
@intl_params = {
:name => "Mrs. Yamamoto",
:company => "",
:phone => "011-123-123-1234",
:address1 => "123 Yokohama Road",
:address2 => "",
:city => 'Tokyo',
:province => '',
:country => 'JP'
}
@usa_params = {
:name => "John Smith",
:company => "",
:phone => "555-555-5555",
:address1 => "123 Fake Street",
:address2 => "",
:city => 'New York',
:province => 'NY',
:country => 'US',
:zip => '12345'
}
@cp = CanadaPostPWS.new(@login)
@cp.logger = Logger.new(StringIO.new)
rescue NoCredentialsFound => e
skip(e.message)
end
def build_options
{ :customer_number => @login[:customer_number] }
end
def test_rates
rate_response = @cp.find_rates(@home_params, @dom_params, [@pkg1], build_options)
assert_kind_of ActiveShipping::RateResponse, rate_response
assert_kind_of ActiveShipping::RateEstimate, rate_response.rates.first
end
def test_rates_with_insurance_changes_price
rates = @cp.find_rates(@home_params, @dom_params, [@pkg1], build_options)
insured_rates = @cp.find_rates(@home_params, @dom_params, [@pkg1], build_options.merge(@shipping_opts1))
refute_equal rates.rates.first.price, insured_rates.rates.first.price
end
def test_rates_with_invalid_customer_raises_exception
opts = {:customer_number => "0000000000", :service => "DOM.XP"}
assert_raises(ResponseError) do
@cp.find_rates(@home_params, @dom_params, [@pkg1], opts)
end
end
def test_rates_USA_returns_small_packet_rates
rates = @cp.find_rates(@home_params, @usa_params, [@pkg1], build_options, @pkg2, ['USA.SP.AIR'])
assert_equal CPPWSRateResponse, rates.class
assert_equal RateEstimate, rates.rates.first.class
assert rates.rates.map(&:service_code).include? "USA.SP.AIR"
end
def test_tracking
pin = "1371134583769923" # valid pin
response = @cp.find_tracking_info(pin, build_options)
assert_equal 'Xpresspost', response.service_name
assert response.expected_date.is_a?(Date)
assert response.customer_number
assert_equal 13, response.shipment_events.count
end
def test_tracking_invalid_pin_raises_exception
pin = "000000000000000"
exception = assert_raises(ResponseError) do
@cp.find_tracking_info(pin, build_options)
end
assert_equal "No Tracking", exception.message
end
def test_create_shipment_with_invalid_customer_raises_exception
opts = {:customer_number => "0000000000", :service => "DOM.XP"}
assert_raises(ResponseError) do
@cp.create_shipment(@home_params, @dom_params, @pkg1, @line_item1, opts)
end
end
def test_register_merchant
response = @cp.register_merchant
assert response.is_a?(CPPWSRegisterResponse)
assert_match(/^(\d|[a-f]){22}$/, response.token_id)
end
def test_merchant_details_empty_details
register_response = @cp.register_merchant
details_response = @cp.retrieve_merchant_details(:token_id => register_response.token_id)
assert_kind_of ActiveShipping::CPPWSMerchantDetailsResponse, details_response
assert_equal '0000000000', details_response.customer_number
assert_equal '1234567890', details_response.contract_number
assert_equal '0000000000000000', details_response.username
assert_equal '1a2b3c4d5e6f7a8b9c0d12', details_response.password
end
def test_find_services_no_country
response = @cp.find_services(nil, build_options)
assert response
end
def test_find_services_country_JP
response = @cp.find_services('JP', build_options)
assert response
end
def test_find_services_invalid_country
exception = assert_raises(ResponseError) do
@cp.find_services('XX', build_options)
end
assert_equal "A valid destination country must be supplied.", exception.message
end
def test_find_service_options_no_country
assert response = @cp.find_service_options("INT.XP", nil, build_options)
assert_equal "INT.XP", response[:service_code]
assert_equal "Xpresspost International", response[:service_name]
assert_equal 5, response[:options].size
assert_equal "COV", response[:options][0][:code]
assert_equal false, response[:options][0][:required]
assert_equal true, response[:options][0][:qualifier_required]
assert_equal 5000, response[:options][0][:qualifier_max]
assert_equal 0, response[:restrictions][:min_weight]
assert_equal 30000, response[:restrictions][:max_weight]
assert_equal 0.1, response[:restrictions][:min_length]
assert_equal 150, response[:restrictions][:max_length]
assert_equal 0.1, response[:restrictions][:min_height]
assert_equal 150, response[:restrictions][:max_height]
assert_equal 0.1, response[:restrictions][:min_width]
assert_equal 150, response[:restrictions][:max_width]
end
def test_find_service_options
assert response = @cp.find_service_options("INT.XP", "JP", build_options)
assert_equal "INT.XP", response[:service_code]
assert_equal "Xpresspost International", response[:service_name]
assert_equal 4, response[:options].size
assert_equal "COV", response[:options][0][:code]
assert_equal false, response[:options][0][:required]
assert_equal true, response[:options][0][:qualifier_required]
assert_equal 1000, response[:options][0][:qualifier_max]
assert_equal 0, response[:restrictions][:min_weight]
assert_equal 30000, response[:restrictions][:max_weight]
assert_equal 0.1, response[:restrictions][:min_length]
assert_equal 150, response[:restrictions][:max_length]
assert_equal 0.1, response[:restrictions][:min_height]
assert_equal 150, response[:restrictions][:max_height]
assert_equal 0.1, response[:restrictions][:min_width]
assert_equal 150, response[:restrictions][:max_width]
end
def test_find_option_details
assert response = @cp.find_option_details("SO", build_options)
assert_equal "SO", response[:code]
assert_equal "Signature option", response[:name]
assert_equal "FEAT", response[:class]
assert_equal true, response[:prints_on_label]
assert_equal false, response[:qualifier_required]
assert_equal 1, response[:conflicting_options].size
assert_equal "LAD", response[:conflicting_options][0]
assert_equal 1, response[:prerequisite_options].size
assert_equal "DC", response[:prerequisite_options][0]
end
def test_find_option_details_french
cp = CanadaPostPWS.new(@login.merge(:language => 'fr'))
assert response = cp.find_option_details("LAD", build_options)
assert_equal "LAD", response[:code]
assert_equal "Laisser à la porte (pas d'avis)", response[:name]
end
def test_register_merchant
response = @cp.register_merchant
assert response.is_a?(CPPWSRegisterResponse)
assert_equal "1111111111111111111111", response.token_id
end
def test_merchant_details
token_id = "1111111111111111111111"
response = @cp.retrieve_merchant_details(:token_id => token_id)
assert response.is_a?(CPPWSMerchantDetailsResponse)
assert_equal "0000000000", response.customer_number
assert_equal "1234567890", response.contract_number
assert_equal "0000000000000000", response.username
assert_equal "1a2b3c4d5e6f7a8b9c0d12", response.password
assert_equal true, response.has_default_credit_card
end
end
| 36.278626 | 109 | 0.678906 |
e2383ea337bfd29147904bda4fa78d61b4a26616 | 265 | # frozen_string_literal: true
require "faraday"
require "json"
require_relative "sentilink/client"
require_relative "sentilink/configuration"
require_relative "sentilink/version"
module Sentilink
class Error < StandardError; end
# Your code goes here...
end
| 18.928571 | 42 | 0.796226 |
b91dea53ea6e1de4ed39f5e339f4e3899b575ff1 | 696 | class NewRelic::MetricParser::Frontend < NewRelic::MetricParser::MetricParser
=begin
def action_name
if segments[-1] =~ /^\(other\)$/
'(template only)'
else
segments[-1]
end
end
=end
def developer_name
url
#"#{controller_name}##{action_name}"
end
def short_name
# standard controller actions
if segments.length > 1
url
else
'All Frontend Urls'
end
end
def url
'/' + segments[1..-1].join('/')
end
# this is used to match transaction traces to controller actions.
# TT's don't have a preceding slash :P
def tt_path
segments[1..-1].join('/')
end
def call_rate_suffix
'rpm'
end
end
| 16.97561 | 77 | 0.609195 |
5dc852c7948ac99857b5c53f939ec02201a4e308 | 151 | require 'spec_helper'
describe 'simplib::sudoers' do
it { is_expected.to compile.with_all_deps }
it { is_expected.to create_class('sudo') }
end
| 16.777778 | 45 | 0.735099 |
8741af18be8dd58b0eb62645b2310a506f3e811d | 704 | class PermissionRequest < ApplicationRecord
## PermissionRequests have...
## - user
## - level_requested (admin or editor)
## - reviewed (boolean)
## - granted (boolean)
## - reviewed_by (user)
## - reviewed_on (datetime)
## - created_at (datetime)
## - updated_at (datetime)
belongs_to :user
belongs_to :reviewed_by, class_name: "User", optional: true
def grant_permission!(reviewer)
set_grant_status(true, reviewer)
end
def decline_permission!(reviewer)
set_grant_status(false, reviewer)
end
def set_grant_status!(status, reviewer)
reviewed_by = reviewer
granted = status
reviewed = true
reviewed_on = Time.now
save!
end
end | 22 | 61 | 0.677557 |
5d8d3c3b36d2943ec8cb7dd2cf37c35c777865ce | 1,743 | require 'abstract_unit'
class TestTestMailer < ActionMailer::Base
end
class ClearTestDeliveriesMixinTest < ActiveSupport::TestCase
include ActionMailer::TestCase::ClearTestDeliveries
def before_setup
ActionMailer::Base.delivery_method, @original_delivery_method = :test, ActionMailer::Base.delivery_method
ActionMailer::Base.deliveries << 'better clear me, setup'
super
end
def after_teardown
super
assert_equal [], ActionMailer::Base.deliveries
ActionMailer::Base.delivery_method = @original_delivery_method
end
def test_deliveries_are_cleared_on_setup_and_teardown
assert_equal [], ActionMailer::Base.deliveries
ActionMailer::Base.deliveries << 'better clear me, teardown'
end
end
class MailerDeliveriesClearingTest < ActionMailer::TestCase
def before_setup
ActionMailer::Base.deliveries << 'better clear me, setup'
super
end
def after_teardown
super
assert_equal [], ActionMailer::Base.deliveries
end
def test_deliveries_are_cleared_on_setup_and_teardown
assert_equal [], ActionMailer::Base.deliveries
ActionMailer::Base.deliveries << 'better clear me, teardown'
end
end
class CrazyNameMailerTest < ActionMailer::TestCase
tests TestTestMailer
def test_set_mailer_class_manual
assert_equal TestTestMailer, self.class.mailer_class
end
end
class CrazySymbolNameMailerTest < ActionMailer::TestCase
tests :test_test_mailer
def test_set_mailer_class_manual_using_symbol
assert_equal TestTestMailer, self.class.mailer_class
end
end
class CrazyStringNameMailerTest < ActionMailer::TestCase
tests 'test_test_mailer'
def test_set_mailer_class_manual_using_string
assert_equal TestTestMailer, self.class.mailer_class
end
end
| 26.014925 | 109 | 0.794607 |
26624842722c588b669fe2e775a3d1613f986879 | 997 | # Challenge name: Add Digits
#
# Given a non-negative integer num, repeatedly add all its digits until the result has only one digit.
#
# Example:
#
# Input: 38
# Output: 2
# Explanation: The process is like: 3 + 8 = 11, 1 + 1 = 2.
# Since 2 has only one digit, return it.
#
# Follow up:
# Could you do it without any loop/recursion in O(1) runtime?
# @param {Integer} num
# @return {Integer}
#
# Approach 1: Recursion
#
# Time complexity: O(n)
#
def add_digits(num)
if num.to_s.length < 2
return num
end
digits_to_sum = num.to_s.split('')
sum = 0
digits_to_sum.each do |num|
sum += num.to_i
end
add_digits(sum)
end
puts(add_digits(38))
# # => 2
puts(add_digits(284))
# # => 5
#
# Approach 2: Without recursion
#
def add_digits(num)
until num.to_s.length < 2
digits_to_sum = num.to_s.split('')
num = 0
digits_to_sum.each do |number|
num += number.to_i
end
end
num
end
puts(add_digits(38))
# => 2
puts(add_digits(284))
# => 5
| 16.080645 | 102 | 0.637914 |
1ce78a3705300c3c97c6e822e1e4bf4934723a7e | 46 | module LazopApiClient
VERSION = "1.2.6"
end
| 11.5 | 21 | 0.717391 |
01b68268d79de5dfc0fc2c3c9d60d78f4d52b7a7 | 2,733 | module Sorcery
module Controller
module Submodules
# The Remember Me submodule takes care of setting the user's cookie so that he will
# be automatically logged in to the site on every visit,
# until the cookie expires.
# See Sorcery::Model::Submodules::RememberMe for configuration options.
module RememberMe
def self.included(base)
base.send(:include, InstanceMethods)
Config.login_sources << :login_from_cookie
Config.after_login << :remember_me_if_asked_to
Config.after_logout << :forget_me!
end
module InstanceMethods
# This method sets the cookie and calls the user to save the token and the expiration to db.
def remember_me!
current_user.remember_me!
set_remember_me_cookie!(current_user)
end
# Clears the cookie and clears the token from the db.
def forget_me!
@current_user.forget_me!
cookies.delete(:remember_me_token, :domain => Config.cookie_domain)
end
# Override.
# logins a user instance, and optionally remembers him.
def auto_login(user, should_remember = false)
session[:user_id] = user.id
@current_user = user
remember_me! if should_remember
end
protected
# calls remember_me! if a third credential was passed to the login method.
# Runs as a hook after login.
def remember_me_if_asked_to(user, credentials)
remember_me! if ( credentials.size == 3 && credentials[2] && credentials[2] != "0" )
end
# Checks the cookie for a remember me token, tried to find a user with that token
# and logs the user in if found.
# Runs as a login source. See 'current_user' method for how it is used.
def login_from_cookie
user = cookies.signed[:remember_me_token] && user_class.find_by_remember_me_token(cookies.signed[:remember_me_token])
if user && user.remember_me_token?
set_remember_me_cookie!(user)
session[:user_id] = user.id
@current_user = user
else
@current_user = false
end
end
def set_remember_me_cookie!(user)
cookies.signed[:remember_me_token] = {
:value => user.send(user.sorcery_config.remember_me_token_attribute_name),
:expires => user.send(user.sorcery_config.remember_me_token_expires_at_attribute_name),
:httponly => true,
:domain => Config.cookie_domain
}
end
end
end
end
end
end | 37.958333 | 129 | 0.612514 |
e89a8849694589ca7f9153fe20744efc8692e1a0 | 1,425 | require 'test_helper'
module Enumerize
class AttributeMapTest < MiniTest::Spec
subject { AttributeMap.new }
def make_attr(name)
Attribute.new(nil, name, :in => %[a b])
end
it 'empty when no attrs' do
subject.must_be_empty
end
it 'not empty when attr added' do
subject << make_attr(:a)
subject.wont_be_empty
end
it 'iterates over added attrs' do
attr_1 = make_attr(:a)
attr_2 = make_attr(:b)
subject << attr_1
subject << attr_2
count = 0
actual = []
subject.each do |element|
count += 1
actual << element
end
count.must_equal 2
actual.must_equal [attr_1, attr_2]
end
it 'reads attribute by name' do
attr = make_attr(:a)
subject << attr
subject[:a].must_equal attr
end
it 'reads attribute by name using string' do
attr = make_attr(:a)
subject << attr
subject['a'].must_equal attr
end
it 'updates dependants' do
attr = make_attr(:a)
dependant = MiniTest::Mock.new
dependant.expect(:<<, nil, [attr])
subject.add_dependant dependant
subject << attr
dependant.verify
end
it 'adds attrs to dependant' do
attr = make_attr(:a)
subject << attr
dependant = AttributeMap.new
subject.add_dependant dependant
dependant[:a].must_equal attr
end
end
end
| 20.652174 | 48 | 0.600702 |
d561ce34c7786e5e54fba8e3cb74b2361a2bfe95 | 531 | # Recall that hashes are collections of key-value pairs,LHS is key,RHS is value
breakfast = {
"bacon" => "tasty",
"eggs" => "tasty",
"oatmeal" => "healthy",
"OJ" => "juicy"
}
# Remember that keys must be unique, but values can repeat.
# That’s why we can have more than one key share the value “tasty.”
#We can create hashes several ways, but two of the most popular are
# 1.hash literal notation:
breakfast={"bacon" => "tasty"}
# 2.Hash constructor notation
dinner = Hash.new
dinner["eggs"] = "healthy" | 29.5 | 80 | 0.664783 |
032a9e4d571a6a911c638ccffc2b65738aba7f6a | 1,734 | # frozen_string_literal: true
require_relative 'lib/rubocop/version'
Gem::Specification.new do |s|
s.name = 'rubocop'
s.version = RuboCop::Version::STRING
s.platform = Gem::Platform::RUBY
s.required_ruby_version = '>= 2.5.0'
s.authors = ['Bozhidar Batsov', 'Jonas Arvidsson', 'Yuji Nakayama']
s.description = <<-DESCRIPTION
RuboCop is a Ruby code style checking and code formatting tool.
It aims to enforce the community-driven Ruby Style Guide.
DESCRIPTION
s.email = '[email protected]'
s.files = Dir.glob('{assets,config,lib}/**/*', File::FNM_DOTMATCH)
s.bindir = 'exe'
s.executables = ['rubocop']
s.extra_rdoc_files = ['LICENSE.txt', 'README.md']
s.homepage = 'https://github.com/rubocop/rubocop'
s.licenses = ['MIT']
s.summary = 'Automatic Ruby code style checking tool.'
s.metadata = {
'homepage_uri' => 'https://rubocop.org/',
'changelog_uri' => 'https://github.com/rubocop/rubocop/blob/master/CHANGELOG.md',
'source_code_uri' => 'https://github.com/rubocop/rubocop/',
'documentation_uri' => "https://docs.rubocop.org/rubocop/#{RuboCop::Version.document_version}/",
'bug_tracker_uri' => 'https://github.com/rubocop/rubocop/issues'
}
s.add_runtime_dependency('parallel', '~> 1.10')
s.add_runtime_dependency('parser', '>= 3.0.0.0')
s.add_runtime_dependency('rainbow', '>= 2.2.2', '< 4.0')
s.add_runtime_dependency('regexp_parser', '>= 1.8', '< 3.0')
s.add_runtime_dependency('rexml')
s.add_runtime_dependency('rubocop-ast', '>= 1.7.0', '< 2.0')
s.add_runtime_dependency('ruby-progressbar', '~> 1.7')
s.add_runtime_dependency('unicode-display_width', '>= 1.4.0', '< 3.0')
s.add_development_dependency('bundler', '>= 1.15.0', '< 3.0')
end
| 39.409091 | 100 | 0.679931 |
ff3d56b5da52d0106eb33ffe54fd13e5612226fc | 1,033 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'tofulcrum/version'
Gem::Specification.new do |spec|
spec.name = "tofulcrum"
spec.version = Tofulcrum::VERSION
spec.authors = ["Zac McCormick"]
spec.email = ["[email protected]"]
spec.description = %q{Convert data to Fulcrum}
spec.summary = %q{Import data into Fulcrum from a CSV}
spec.homepage = "https://github.com/zhm/tofulcrum"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "faraday", "~> 0.7.6"
spec.add_dependency "thor"
spec.add_dependency "fulcrum"
spec.add_dependency "axlsx"
spec.add_dependency "roo"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
end
| 34.433333 | 74 | 0.663117 |
4a40ec0f421aa055f40e86ee1437822943d91ad5 | 17,591 | require 'puppet/util'
require 'puppet/util/cacher'
require 'monitor'
require 'puppet/parser/parser_factory'
# Just define it, so this class has fewer load dependencies.
class Puppet::Node
end
# Puppet::Node::Environment acts as a container for all configuration
# that is expected to vary between environments.
#
# ## Global variables
#
# The Puppet::Node::Environment uses a number of global variables.
#
# ### `$environment`
#
# The 'environment' global variable represents the current environment that's
# being used in the compiler.
#
# ### `$known_resource_types`
#
# The 'known_resource_types' global variable represents a singleton instance
# of the Puppet::Resource::TypeCollection class. The variable is discarded
# and regenerated if it is accessed by an environment that doesn't match the
# environment of the 'known_resource_types'
#
# This behavior of discarding the known_resource_types every time the
# environment changes is not ideal. In the best case this can cause valid data
# to be discarded and reloaded. If Puppet is being used with numerous
# environments then this penalty will be repeatedly incurred.
#
# In the worst case (#15106) demonstrates that if a different environment is
# accessed during catalog compilation, for whatever reason, the
# known_resource_types can be discarded which loses information that cannot
# be recovered and can cause a catalog compilation to completely fail.
#
# ## The root environment
#
# In addition to normal environments that are defined by the user,there is a
# special 'root' environment. It is defined as an instance variable on the
# Puppet::Node::Environment metaclass. The environment name is `*root*` and can
# be accessed by calling {Puppet::Node::Environment.root}.
#
# The primary purpose of the root environment is to contain parser functions
# that are not bound to a specific environment. The main case for this is for
# logging functions. Logging functions are attached to the 'root' environment
# when {Puppet::Parser::Functions.reset} is called.
class Puppet::Node::Environment
# This defines a mixin for classes that have an environment. It implements
# `environment` and `environment=` that respects the semantics of the
# Puppet::Node::Environment class
#
# @api public
module Helper
def environment
Puppet::Node::Environment.new(@environment)
end
def environment=(env)
if env.is_a?(String) or env.is_a?(Symbol)
@environment = env
else
@environment = env.name
end
end
end
include Puppet::Util::Cacher
# @api private
def self.seen
@seen ||= {}
end
# Create a new environment with the given name, or return an existing one
#
# The environment class memoizes instances so that attempts to instantiate an
# environment with the same name with an existing environment will return the
# existing environment.
#
# @overload self.new(environment)
# @param environment [Puppet::Node::Environment]
# @return [Puppet::Node::Environment] the environment passed as the param,
# this is implemented so that a calling class can use strings or
# environments interchangeably.
#
# @overload self.new(string)
# @param string [String, Symbol]
# @return [Puppet::Node::Environment] An existing environment if it exists,
# else a new environment with that name
#
# @overload self.new()
# @return [Puppet::Node::Environment] The environment as set by
# Puppet.settings[:environment]
#
# @api public
def self.new(name = nil)
return name if name.is_a?(self)
name ||= Puppet.settings.value(:environment)
raise ArgumentError, "Environment name must be specified" unless name
symbol = name.to_sym
return seen[symbol] if seen[symbol]
obj = self.create(symbol,
split_path(Puppet.settings.value(:modulepath, symbol)),
Puppet.settings.value(:manifest, symbol))
seen[symbol] = obj
end
# Create a new environment with the given name
#
# @param name [Symbol] the name of the
# @param modulepath [Array<String>] the list of paths from which to load modules
# @param manifest [String] the path to the manifest for the environment
# @return [Puppet::Node::Environment]
#
# @api public
def self.create(name, modulepath, manifest)
obj = self.allocate
obj.send(:initialize,
name,
expand_dirs(extralibs() + modulepath),
manifest)
obj
end
# Instantiate a new environment
#
# @note {Puppet::Node::Environment.new} is overridden to return memoized
# objects, so this will not be invoked with the normal Ruby initialization
# semantics.
#
# @param name [Symbol] The environment name
def initialize(name, modulepath, manifest)
@name = name
@modulepath = modulepath
@manifest = manifest
end
# Retrieve the environment for the current process.
#
# @note This should only used when a catalog is being compiled.
#
# @api private
#
# @return [Puppet::Node::Environment] the currently set environment if one
# has been explicitly set, else it will return the '*root*' environment
def self.current
Puppet.deprecation_warning("Remove me.")
Puppet.lookup(:current_environment)
end
# @return [Puppet::Node::Environment] The `*root*` environment.
#
# This is only used for handling functions that are not attached to a
# specific environment.
#
# @api private
def self.root
@root ||= create(:'*root*', split_path(Puppet[:modulepath]), Puppet[:manifest])
end
# Clear all memoized environments and the 'current' environment
#
# @api private
def self.clear
seen.clear
$environment = nil
end
# @!attribute [r] name
# @api public
# @return [Symbol] the human readable environment name that serves as the
# environment identifier
attr_reader :name
# @api public
# @return [Array<String>] All directories present on disk in the modulepath
def modulepath
@modulepath.find_all do |p|
FileTest.directory?(p)
end
end
# @api public
# @return [Array<String>] All directories in the modulepath (even if they are not present on disk)
def full_modulepath
@modulepath
end
# @!attribute [r] manifest
# @api public
# @return [String] path to the manifest file or directory.
attr_reader :manifest
# Return an environment-specific Puppet setting.
#
# @api public
#
# @param param [String, Symbol] The environment setting to look up
# @return [Object] The resolved setting value
def [](param)
Puppet.settings.value(param, self.name)
end
# The current global TypeCollection
#
# @note The environment is loosely coupled with the {Puppet::Resource::TypeCollection}
# class. While there is a 1:1 relationship between an environment and a
# TypeCollection instance, there is only one TypeCollection instance
# available at any given time. It is stored in `$known_resource_types`.
# `$known_resource_types` is accessed as an instance method, but is global
# to all environment variables.
#
# @api public
# @return [Puppet::Resource::TypeCollection] The current global TypeCollection
def known_resource_types
# This makes use of short circuit evaluation to get the right thread-safe
# per environment semantics with an efficient most common cases; we almost
# always just return our thread's known-resource types. Only at the start
# of a compilation (after our thread var has been set to nil) or when the
# environment has changed or when the known resource types have become stale
# do we delve deeper.
$known_resource_types = nil if $known_resource_types &&
($known_resource_types.environment != self || !@known_resource_types_being_imported && $known_resource_types.stale?)
$known_resource_types ||=
if @known_resource_types.nil? or @known_resource_types.require_reparse?
#set the global variable $known_resource_types immediately as it will be queried
#resursively from the parser which would set it anyway, just executing more code in vain
@known_resource_types = $known_resource_types = Puppet::Resource::TypeCollection.new(self)
#avoid an infinite recursion (called from the parser) if Puppet[:filetimeout] is set to -1 and
#$known_resource_types.stale? returns always true; let's set a flag that we're importing
#so if this method is called recursively we'll skip testing the stale status
begin
@known_resource_types_being_imported = true
@known_resource_types.import_ast(perform_initial_import, '')
ensure
@known_resource_types_being_imported = false
end
@known_resource_types
else
@known_resource_types
end
end
# Yields each modules' plugin directory if the plugin directory (modulename/lib)
# is present on the filesystem.
#
# @yield [String] Yields the plugin directory from each module to the block.
# @api public
def each_plugin_directory(&block)
modules.map(&:plugin_directory).each do |lib|
lib = Puppet::Util::Autoload.cleanpath(lib)
yield lib if File.directory?(lib)
end
end
# Locate a module instance by the module name alone.
#
# @api public
#
# @param name [String] The module name
# @return [Puppet::Module, nil] The module if found, else nil
def module(name)
modules.find {|mod| mod.name == name}
end
# Locate a module instance by the full forge name (EG authorname/module)
#
# @api public
#
# @param forge_name [String] The module name
# @return [Puppet::Module, nil] The module if found, else nil
def module_by_forge_name(forge_name)
author, modname = forge_name.split('/')
found_mod = self.module(modname)
found_mod and found_mod.forge_name == forge_name ?
found_mod :
nil
end
# @!attribute [r] modules
# Return all modules for this environment in the order they appear in the
# modulepath.
# @note If multiple modules with the same name are present they will
# both be added, but methods like {#module} and {#module_by_forge_name}
# will return the first matching entry in this list.
# @note This value is cached so that the filesystem doesn't have to be
# re-enumerated every time this method is invoked, since that
# enumeration could be a costly operation and this method is called
# frequently. The cache expiry is determined by `Puppet[:filetimeout]`.
# @see Puppet::Util::Cacher.cached_attr
# @api public
# @return [Array<Puppet::Module>] All modules for this environment
cached_attr(:modules, Puppet[:filetimeout]) do
module_references = []
seen_modules = {}
modulepath.each do |path|
Dir.entries(path).each do |name|
warn_about_mistaken_path(path, name)
next if module_references.include?(name)
if not seen_modules[name]
module_references << {:name => name, :path => File.join(path, name)}
seen_modules[name] = true
end
end
end
module_references.collect do |reference|
begin
Puppet::Module.new(reference[:name], reference[:path], self)
rescue Puppet::Module::Error
nil
end
end.compact
end
# Generate a warning if the given directory in a module path entry is named `lib`.
#
# @api private
#
# @param path [String] The module directory containing the given directory
# @param name [String] The directory name
def warn_about_mistaken_path(path, name)
if name == "lib"
Puppet.debug("Warning: Found directory named 'lib' in module path ('#{path}/lib'); unless " +
"you are expecting to load a module named 'lib', your module path may be set " +
"incorrectly.")
end
end
# Modules broken out by directory in the modulepath
#
# @note This method _changes_ the current working directory while enumerating
# the modules. This seems rather dangerous.
#
# @api public
#
# @return [Hash<String, Array<Puppet::Module>>] A hash whose keys are file
# paths, and whose values is an array of Puppet Modules for that path
def modules_by_path
modules_by_path = {}
modulepath.each do |path|
Dir.chdir(path) do
module_names = Dir.glob('*').select do |d|
FileTest.directory?(d) && (File.basename(d) =~ /\A\w+(-\w+)*\Z/)
end
modules_by_path[path] = module_names.sort.map do |name|
Puppet::Module.new(name, File.join(path, name), self)
end
end
end
modules_by_path
end
# All module requirements for all modules in the environment modulepath
#
# @api public
#
# @comment This has nothing to do with an environment. It seems like it was
# stuffed into the first convenient class that vaguely involved modules.
#
# @example
# environment.module_requirements
# # => {
# # 'username/amodule' => [
# # {
# # 'name' => 'username/moduledep',
# # 'version' => '1.2.3',
# # 'version_requirement' => '>= 1.0.0',
# # },
# # {
# # 'name' => 'username/anotherdep',
# # 'version' => '4.5.6',
# # 'version_requirement' => '>= 3.0.0',
# # }
# # ]
# # }
# #
#
# @return [Hash<String, Array<Hash<String, String>>>] See the method example
# for an explanation of the return value.
def module_requirements
deps = {}
modules.each do |mod|
next unless mod.forge_name
deps[mod.forge_name] ||= []
mod.dependencies and mod.dependencies.each do |mod_dep|
deps[mod_dep['name']] ||= []
dep_details = {
'name' => mod.forge_name,
'version' => mod.version,
'version_requirement' => mod_dep['version_requirement']
}
deps[mod_dep['name']] << dep_details
end
end
deps.each do |mod, mod_deps|
deps[mod] = mod_deps.sort_by {|d| d['name']}
end
deps
end
# Set a periodic watcher on the file, so we can tell if it has changed.
# @param filename [File,String] File instance or filename
# @api private
def watch_file(file)
known_resource_types.watch_file(file.to_s)
end
# @return [String] The stringified value of the `name` instance variable
# @api public
def to_s
name.to_s
end
# @return [Symbol] The `name` value, cast to a string, then cast to a symbol.
#
# @api public
#
# @note the `name` instance variable is a Symbol, but this casts the value
# to a String and then converts it back into a Symbol which will needlessly
# create an object that needs to be garbage collected
def to_sym
to_s.to_sym
end
# Return only the environment name when serializing.
#
# The only thing we care about when serializing an environment is its
# identity; everything else is ephemeral and should not be stored or
# transmitted.
#
# @api public
def to_zaml(z)
self.to_s.to_zaml(z)
end
private
def self.split_path(path_string)
path_string.split(File::PATH_SEPARATOR)
end
def self.extralibs()
if ENV["PUPPETLIB"]
split_path(ENV["PUPPETLIB"])
else
[]
end
end
def self.expand_dirs(dirs)
dirs.collect do |dir|
File.expand_path(dir)
end
end
# Reparse the manifests for the given environment
#
# There are two sources that can be used for the initial parse:
#
# 1. The value of `Puppet.settings[:code]`: Puppet can take a string from
# its settings and parse that as a manifest. This is used by various
# Puppet applications to read in a manifest and pass it to the
# environment as a side effect. This is attempted first.
# 2. The contents of `Puppet.settings[:manifest]`: Puppet will try to load
# the environment manifest. By default this is `$manifestdir/site.pp`
#
# @note This method will return an empty hostclass if
# `Puppet.settings[:ignoreimport]` is set to true.
#
# @return [Puppet::Parser::AST::Hostclass] The AST hostclass object
# representing the 'main' hostclass
def perform_initial_import
return empty_parse_result if Puppet[:ignoreimport]
parser = Puppet::Parser::ParserFactory.parser(self)
if code = Puppet[:code] and code != ""
parser.string = code
parser.parse
else
file = self.manifest
# if the manifest file is a reference to a directory, parse and combine all .pp files in that
# directory
if File.directory?(file)
parse_results = Dir.entries(file).find_all { |f| f =~ /\.pp$/ }.sort.map do |pp_file|
parser.file = File.join(file, pp_file)
parser.parse
end
# Use a parser type specific merger to concatenate the results
Puppet::Parser::AST::Hostclass.new('', :code => Puppet::Parser::ParserFactory.code_merger.concatenate(parse_results))
else
parser.file = file
parser.parse
end
end
rescue => detail
known_resource_types.parse_failed = true
msg = "Could not parse for environment #{self}: #{detail}"
error = Puppet::Error.new(msg)
error.set_backtrace(detail.backtrace)
raise error
end
# Return an empty toplevel hostclass to indicate that no file was loaded
#
# This is used as the return value of {#perform_initial_import} when
# `Puppet.settings[:ignoreimport]` is true.
#
# @return [Puppet::Parser::AST::Hostclass]
def empty_parse_result
return Puppet::Parser::AST::Hostclass.new('')
end
end
| 33.570611 | 125 | 0.680575 |
1c71aa4ec7101f30b84564305447a92d9344b909 | 12,732 | require 'rails_helper'
RSpec.describe CandidateMailer, type: :mailer do
include TestHelpers::MailerSetupHelper
subject(:mailer) { described_class }
let(:application_form) do
build_stubbed(:application_form, first_name: 'Fred',
candidate: candidate,
support_reference: 'SUPPORT-REFERENCE',
application_choices: application_choices)
end
let(:candidate) { build_stubbed(:candidate) }
let(:application_choices) { [build_stubbed(:application_choice)] }
let(:dbd_application) { build_stubbed(:application_choice, :dbd) }
before do
magic_link_stubbing(candidate)
end
describe '.application_submitted' do
let(:email) { mailer.application_submitted(application_form) }
it_behaves_like(
'a mail with subject and content',
I18n.t!('candidate_mailer.application_submitted.subject'),
'heading' => 'Application submitted',
'support reference' => 'SUPPORT-REFERENCE',
'magic link to authenticate' => 'http://localhost:3000/candidate/sign-in/confirm?token=raw_token',
)
end
describe 'Candidate decision chaser email' do
let(:email) { mailer.chase_candidate_decision(application_form) }
let(:offer) do
build_stubbed(:application_choice, :with_offer,
sent_to_provider_at: Time.zone.today,
course_option: course_option)
end
let(:course_option) do
build_stubbed(:course_option, course: build_stubbed(:course,
name: 'Applied Science (Psychology)',
code: '3TT5', provider: provider))
end
let(:provider) { build_stubbed(:provider, name: 'Brighthurst Technical College') }
context 'when a candidate has one appication choice with offer' do
let(:application_choices) { [offer] }
it_behaves_like(
'a mail with subject and content',
I18n.t!('candidate_mailer.chase_candidate_decision.subject_singular'),
'heading' => 'Dear Fred',
'dbd date' => 'respond by 8 November',
'course name and code' => ' Applied Science (Psychology)',
'provider name' => 'Brighthurst Technical College',
)
end
context 'when a candidate has multiple application choices with offer' do
let(:second_offer) do
build_stubbed(:application_choice, :with_offer,
sent_to_provider_at: Time.zone.today,
course_option: second_course_option)
end
let(:second_course_option) do
build_stubbed(:course_option, course: build_stubbed(:course,
name: 'Code Refactoring',
code: 'CRF5',
provider: other_provider))
end
let(:other_provider) { build_stubbed(:provider, name: 'Ting University') }
let(:application_choices) { [offer, second_offer] }
it_behaves_like(
'a mail with subject and content',
I18n.t!('candidate_mailer.chase_candidate_decision.subject_plural'),
'first course with offer' => 'Applied Science (Psychology)',
'first course provider with offer' => 'Brighthurst Technical College',
'second course with offer' => 'Code Refactoring',
'second course provider with offer' => 'Ting University',
)
end
end
describe '.decline_by_default' do
let(:email) { mailer.declined_by_default(application_form) }
context 'when a candidate has 1 offer that was declined' do
let(:application_choices) { [dbd_application] }
it_behaves_like(
'a mail with subject and content',
'You did not respond to your offer: next steps',
'heading' => 'Dear Fred',
'days left to respond' => '10 working days',
)
end
context 'when a candidate has 2 or 3 offers that were declined' do
let(:application_choices) { [dbd_application, dbd_application] }
it_behaves_like 'a mail with subject and content', 'You did not respond to your offers: next steps', {}
end
context 'when a candidate has 1 offer that was declined by default and a rejection' do
let(:application_choices) { [dbd_application, build_stubbed(:application_choice, status: 'rejected')] }
it_behaves_like(
'a mail with subject and content',
'You did not respond to your offer: next steps',
'heading' => 'Dear Fred',
'DBD_days_they_had_to_respond' => '10 working days',
'still_interested' => 'If now’s the right time for you',
)
end
context 'when a candidate has 2 offers that were declined by default and a rejection' do
let(:application_choices) { [dbd_application, dbd_application, build_stubbed(:application_choice, status: 'rejected')] }
it_behaves_like(
'a mail with subject and content',
'You did not respond to your offers: next steps',
'heading' => 'Dear Fred',
'DBD_days_they_had_to_respond' => '10 working days',
'still_interested' => 'If now’s the right time for you',
)
end
context 'when a candidate has 1 offer that was declined and it awaiting another decision' do
let(:application_choices) { [dbd_application, build_stubbed(:application_choice, status: 'awaiting_provider_decision')] }
it_behaves_like(
'a mail with subject and content',
'Application withdrawn automatically',
'heading' => 'Dear Fred',
'days left to respond' => '10 working days',
)
end
context 'when a candidate has 2 offers that was declined and it awaiting another decision' do
let(:application_choices) { [dbd_application, dbd_application, build_stubbed(:application_choice, status: 'awaiting_provider_decision')] }
it_behaves_like(
'a mail with subject and content',
'Applications withdrawn automatically',
'heading' => 'Dear Fred',
'days left to respond' => '10 working days',
)
end
end
describe '.withdraw_last_application_choice' do
let(:email) { mailer.withdraw_last_application_choice(application_form) }
context 'when a candidate has 1 course choice that was withdrawn' do
let(:application_choices) { [build_stubbed(:application_choice, status: 'withdrawn')] }
it_behaves_like(
'a mail with subject and content',
'You’ve withdrawn your application: next steps',
'heading' => 'Dear Fred',
'application_withdrawn' => 'You’ve withdrawn your application',
)
end
context 'when a candidate has 2 or 3 offers that were declined' do
let(:application_choices) { [build_stubbed(:application_choice, :withdrawn), build_stubbed(:application_choice, :withdrawn)] }
it_behaves_like(
'a mail with subject and content',
'You’ve withdrawn your applications: next steps',
'application_withdrawn' => 'You’ve withdrawn your application',
)
end
end
describe '.decline_last_application_choice' do
let(:email) { described_class.decline_last_application_choice(application_form.application_choices.first) }
let(:application_choices) { [build_stubbed(:application_choice, status: :declined)] }
it_behaves_like(
'a mail with subject and content',
'You’ve declined an offer: next steps',
'greeting' => 'Dear Fred',
'content' => 'declined your offer to study',
)
end
describe '#apply_again_call_to_action' do
let(:email) { described_class.apply_again_call_to_action(application_form) }
let(:application_choices) { [build_stubbed(:application_choice, status: :rejected)] }
it_behaves_like(
'a mail with subject and content',
'You can still apply for teacher training',
'content' => 'You can apply for teacher training again if you have not got a place yet',
)
end
describe '.chase_reference_again' do
let(:email) { described_class.chase_reference_again(referee) }
let(:referee) { build_stubbed(:reference, name: 'Jolyne Doe', application_form: application_form) }
let(:application_choices) { [] }
it_behaves_like(
'a mail with subject and content',
'Jolyne Doe has not responded yet',
'magic_link' => '/candidate/sign-in/confirm?token=raw_token',
)
end
describe '.offer_accepted' do
let(:email) { described_class.offer_accepted(application_form.application_choices.first) }
let(:application_choices) do
[build_stubbed(
:application_choice,
status: 'pending_conditions',
course_option: build_stubbed(
:course_option,
course: build_stubbed(
:course,
name: 'Mathematics',
code: 'M101',
start_date: Time.zone.local(2021, 9, 6),
provider: build_stubbed(
:provider,
name: 'Arithmetic College',
),
),
),
)]
end
it_behaves_like(
'a mail with subject and content',
'You’ve accepted Arithmetic College’s offer to study Mathematics (M101)',
'greeting' => 'Dear Fred,',
'offer_details' => 'You’ve accepted Arithmetic College’s offer to study Mathematics (M101)',
'course start' => 'September 2021',
)
end
describe '.unconditional_offer_accepted' do
let(:email) { described_class.unconditional_offer_accepted(application_form.application_choices.first) }
let(:application_choices) do
[build_stubbed(
:application_choice,
status: 'pending_conditions',
course_option: build_stubbed(
:course_option,
course: build_stubbed(
:course,
name: 'Mathematics',
code: 'M101',
start_date: Time.zone.local(2021, 9, 6),
provider: build_stubbed(
:provider,
name: 'Arithmetic College',
),
),
),
)]
end
it_behaves_like(
'a mail with subject and content',
'You’ve accepted Arithmetic College’s offer to study Mathematics (M101)',
'greeting' => 'Dear Fred,',
'offer_details' => 'You’ve accepted Arithmetic College’s offer to study Mathematics (M101)',
'course start' => 'September 2021',
)
end
context 'Interview emails' do
let(:provider) { create(:provider, name: 'Hogwards') }
let(:interview) do
create(:interview,
date_and_time: Time.zone.local(2021, 1, 15, 9, 30),
location: 'Hogwarts Castle',
additional_details: 'Bring your magic wand for the spells test',
provider: provider)
end
let(:application_choice_with_interview) { interview.application_choice }
before do
build_stubbed(:application_form,
first_name: 'Fred',
candidate: candidate,
application_choices: [application_choice_with_interview])
end
describe '.new_interview' do
let(:email) { mailer.new_interview(application_choice_with_interview, interview) }
it_behaves_like(
'a mail with subject and content',
'Interview arranged - Hogwards',
'greeting' => 'Dear Fred,',
'details' => 'You have an interview with Hogwards',
'interview date and time' => '15 January 2021 at 9:30am',
'interview location' => 'Hogwarts Castle',
'additional interview details' => 'Bring your magic wand for the spells test',
)
end
describe '.interview_updated' do
let(:email) { mailer.interview_updated(application_choice_with_interview, interview) }
it_behaves_like(
'a mail with subject and content',
'Interview details updated - Hogwards',
'greeting' => 'Dear Fred,',
'details' => 'Hogwards has updated the details of the interview',
'interview date and time' => '15 January 2021 at 9:30am',
'interview location' => 'Hogwarts Castle',
'additional interview details' => 'Bring your magic wand for the spells test',
)
end
describe '.interview_cancelled' do
let(:email) { mailer.interview_cancelled(application_choice_with_interview, interview, 'We recruited someone else') }
it_behaves_like(
'a mail with subject and content',
'Interview cancelled - Hogwards',
'greeting' => 'Dear Fred,',
'details' => 'Hogwards has cancelled the interview on 15 January 2021 at 9:30am',
'cancellation reason' => 'We recruited someone else',
)
end
end
end
| 38.00597 | 144 | 0.636349 |
1a25777fc595c3b440a70ce406c17957a1281246 | 4,087 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Bibliography do
subject(:bibliography) { described_class.new(bibtex) }
context 'rendering bibliography as HTML' do
context 'phdthesis' do
let(:bibtex) { Pathname('spec/fixtures/bibliography/phdthesis.bib') }
it '#to_html' do
expect(bibliography.to_html).to include 'Wilson, E. A. 1968. “A Critic'\
'al Text, with Commentary of MS Eng. Theol. f. 39 in the Bodleian Li'\
'brary.” B.Litt., Oxford: University of Oxford.'
end
end
context 'incollection' do
let(:bibtex) { Pathname('spec/fixtures/bibliography/incollection.bib') }
it '#to_html' do
expect(bibliography.to_html).to include 'Whatley, E. G. 1986. “A ‘Symp'\
'le Wrecche’ at Work: the Life and Miracles of St. Erkenwald in the Gi'\
'lte Legende, BL Add. 35298.” In <i>Legenda Aurea. Sept Siècles De Dif'\
'fusion. Actes Du Colloque International Sur La Legenda Aurea, Univers'\
'ité Du Québec, Montréal, 11-12 Mai 1983</i>, edited by B. Dunn-Lardea'\
'u, 1st ed., 1:333–43. Textes Et Études Du Moyen Âge. Montréal/Paris: '\
'My Publisher.'
end
end
context 'book' do
let(:bibtex) { Pathname('spec/fixtures/bibliography/book.bib') }
it '#to_html' do
expect(bibliography.to_html).to include 'Azevedo, R. de. 1962. <i>A Car'\
'ta Ou Memória Do Cruzado Inglês R. Para Osberto De Bawdsey Sobre a Co'\
'nquista De Lisboa Em 1147</i>. 1st ed. Vol. I. Coimbra: Faculdade de '\
'Letras da Universidade de Coimbra.'
end
end
context 'article' do
let(:bibtex) { Pathname('spec/fixtures/bibliography/article.bib') }
it '#to_html' do
expect(bibliography.to_html).to include 'Wille, Clara. 2004. “Quelques'\
' Observations Sur Le Porc-Épic Et Le Hérisson Dans La Littérature Et '\
'l’Iconographie Médiévale.” <i>Reinardus. Yearbook of the Internationa'\
'l Reynard Society</i> 17 (1): 181–201. https://doi.org/10.1075/rein.17.14wil.'
end
end
end
context 'sorting bibliography' do
let(:bibtex) { `cat spec/fixtures/bibliography/*.bib` }
it '#bibliography (unsorted)' do # rubocop: disable RSpec/ExampleLength
expect(bibliography.bibliography.count).to eq 15
expect(bibliography.bibliography.collect(&:id)).to include(
'http://zotero.org/groups/1051392/items/QTWBAWKX',
'http://zotero.org/groups/1051392/items/TXXUJDG2',
'http://zotero.org/groups/1051392/items/EI8BRRXB',
'http://zotero.org/groups/1051392/items/SQA6T57X',
'http://zotero.org/groups/1051392/items/JMIMQVT6',
'http://zotero.org/groups/1051392/items/E9MZZKFV',
'http://zotero.org/groups/1051392/items/6Q6TF4HD',
'http://zotero.org/groups/1051392/items/E3MS2TQK',
'http://zotero.org/groups/1051392/items/MT4A9V2I'
)
end
it '#to_html (in sorted order)' do
expect(bibliography.to_html).to eq File.read('spec/fixtures/bibliography/rendered.html').strip
end
end
context 'initializer support for different forms' do
context 'a BibTeX::Bibliography' do
let(:bibtex) { BibTeX.open('spec/fixtures/bibliography/phdthesis.bib') }
it '#bibliography' do
expect(bibliography.bibliography).to be_a(BibTeX::Bibliography)
end
end
context 'a Pathname' do
let(:bibtex) { Pathname('spec/fixtures/bibliography/phdthesis.bib') }
it '#bibliography' do
expect(bibliography.bibliography).to be_a(BibTeX::Bibliography)
end
end
context 'a String (data)' do
let(:bibtex) { Pathname('spec/fixtures/bibliography/phdthesis.bib').read }
it '#bibliography' do
expect(bibliography.bibliography).to be_a(BibTeX::Bibliography)
end
end
context 'an unsupported form' do
let(:bibtex) { nil }
it '#bibliography' do
expect { bibliography.bibliography }.to raise_error(ArgumentError, /Unsupported type/)
end
end
end
end
| 36.168142 | 100 | 0.655738 |
91069e8641d1f6d2324ee5147c2eaac0720b64e1 | 3,495 | # vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
# Copyright 2013-present Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'mixlib/config'
require 'taste_tester/logging'
require 'between_meals/util'
module TasteTester
# Config file parser and config object
# Uses Mixlib::Config v1 syntax so it works in Chef10 omnibus...
# it's compatible with v2, so it should work in 11 too.
module Config
extend Mixlib::Config
extend TasteTester::Logging
extend BetweenMeals::Util
repo "#{ENV['HOME']}/ops"
repo_type 'auto'
base_dir 'chef'
cookbook_dirs ['cookbooks']
role_dir 'roles'
databag_dir 'databags'
role_type 'rb'
config_file '/etc/taste-tester-config.rb'
plugin_path nil
chef_zero_path nil
verbosity Logger::WARN
timestamp false
user 'root'
ref_file "#{ENV['HOME']}/.chef/taste-tester-ref.json"
knife_config "#{ENV['HOME']}/.chef/knife-#{ENV['USER']}-taste-tester.rb"
checksum_dir "#{ENV['HOME']}/.chef/checksums"
skip_repo_checks false
chef_client_command 'chef-client'
testing_time 3600
chef_port_range [5000, 5500]
tunnel_port 4001
timestamp_file '/etc/chef/test_timestamp'
use_ssh_tunnels false
ssh_command 'ssh'
use_ssl true
chef_zero_logging true
chef_config_path '/etc/chef'
chef_config 'client.rb'
my_hostname nil
track_symlinks false
transport 'ssh'
skip_pre_upload_hook false
skip_post_upload_hook false
skip_pre_test_hook false
skip_post_test_hook false
skip_repo_checks_hook false
def self.cookbooks
cookbook_dirs.map do |x|
File.join(repo, base_dir, x)
end
end
def self.relative_cookbook_dirs
cookbook_dirs.map do |x|
base_dir && !base_dir.empty? ? File.join(base_dir, x) : x
end
end
def self.roles
File.join(repo, base_dir, role_dir)
end
def self.relative_role_dir
base_dir && !base_dir.empty? ? File.join(base_dir, role_dir) : role_dir
end
def self.databags
File.join(repo, base_dir, databag_dir)
end
def self.relative_databag_dir
if base_dir && !base_dir.empty?
File.join(base_dir, databag_dir)
else
databag_dir
end
end
def self.chef_port
require 'taste_tester/state'
port_range = (
chef_port_range.first.to_i..chef_port_range.last.to_i
).to_a.shuffle
if TasteTester::State.port
port_range.unshift(TasteTester::State.port)
end
port_range.each do |port|
unless port_open?(port)
return port
end
end
logger.error 'Could not find a free port in range' +
" [#{chef_port_range.first}, #{chef_port_range.last}]"
exit 1
end
def self.testing_end_time
if TasteTester::Config.testing_until
TasteTester::Config.testing_until
else
Time.now + TasteTester::Config.testing_time
end
end
end
end
| 27.519685 | 77 | 0.684692 |
6a61a707a1ec8b7cde9b12383f189e7d87ec9f2f | 17,966 | require 'cocoapods-core/specification/consumer'
require 'cocoapods-core/specification/dsl'
require 'cocoapods-core/specification/linter'
require 'cocoapods-core/specification/root_attribute_accessors'
require 'cocoapods-core/specification/set'
require 'cocoapods-core/specification/yaml'
module Pod
# The Specification provides a DSL to describe a Pod. A pod is defined as a
# library originating from a source. A specification can support detailed
# attributes for modules of code through subspecs.
#
# Usually it is stored in files with `podspec` extension.
#
class Specification
include Pod::Specification::DSL
include Pod::Specification::DSL::Deprecations
include Pod::Specification::RootAttributesAccessors
include Pod::Specification::YAMLSupport
# @return [Specification] the parent of the specification unless the
# specification is a root.
#
attr_reader :parent
# @param [Specification] parent @see parent
#
# @param [String] name
# the name of the specification.
#
def initialize(parent = nil, name = nil)
@attributes_hash = {}
@subspecs = []
@consumers = {}
@parent = parent
attributes_hash['name'] = name
yield self if block_given?
end
# @return [Hash] the hash that stores the information of the attributes of
# the specification.
#
attr_accessor :attributes_hash
# @return [Array<Specification>] The subspecs of the specification.
#
attr_accessor :subspecs
# Checks if a specification is equal to the given one according its name
# and to its version.
#
# @param [Specification] other
# the specification to compare with.
#
# @todo Not sure if comparing only the name and the version is the way to
# go. This is used by the installer to group specifications by root
# spec.
#
# @return [Bool] Whether the specifications are equal.
#
def ==(other)
# TODO
# self.class === other &&
# attributes_hash == other.attributes_hash &&
# subspecs == other.subspecs &&
# pre_install_callback == other.pre_install_callback &&
# post_install_callback == other.post_install_callback
self.to_s == other.to_s
end
# @see ==
#
def eql?(other)
self == other
end
# Return the hash value for this specification according to its attributes
# hash.
#
# @note This function must have the property that a.eql?(b) implies
# a.hash == b.hash.
#
# @note This method is used by the Hash class.
#
# @return [Fixnum] The hash value.
#
def hash
to_s.hash
end
# @return [String] A string suitable for representing the specification in
# clients.
#
def to_s
if name && !version.version.empty?
"#{name} (#{version})"
elsif name
name
else
"No-name"
end
end
# @return [String] A string suitable for debugging.
#
def inspect
"#<#{self.class.name} name=#{name.inspect}>"
end
# @param [String] string_representation
# the string that describes a {Specification} generated from
# {Specification#to_s}.
#
# @example Input examples
#
# "libPusher (1.0)"
# "libPusher (HEAD based on 1.0)"
# "RestKit/JSON (1.0)"
#
# @return [Array<String, Version>] the name and the version of a
# pod.
#
def self.name_and_version_from_string(string_representation)
match_data = string_representation.match(/(\S*) \((.*)\)/)
unless match_data
raise Informative, "Invalid string representation for a Specification: `#{string_representation}`."
end
name = match_data[1]
vers = Version.new(match_data[2])
[name, vers]
end
# Returns the root name of a specification.
#
# @param [String] the name of a specification or of a subspec.
#
# @return [String] the root name
#
def self.root_name(full_name)
full_name.split('/').first
end
#-------------------------------------------------------------------------#
public
# @!group Hierarchy
# @return [Specification] The root specification or itself if it is root.
#
def root
parent ? parent.root : self
end
# @return [Bool] whether the specification is root.
#
def root?
parent.nil?
end
# @return [Bool] whether the specification is a subspec.
#
def subspec?
!parent.nil?
end
#-------------------------------------------------------------------------#
public
# @!group Dependencies & Subspecs
# @return [Array<Specifications>] the recursive list of all the subspecs of
# a specification.
#
def recursive_subspecs
mapper = lambda do |spec|
spec.subspecs.map do |subspec|
[subspec, *mapper.call(subspec)]
end.flatten
end
mapper.call(self)
end
# Returns the subspec with the given name or the receiver if the name is
# nil or equal to the name of the receiver.
#
# @param [String] relative_name
# the relative name of the subspecs starting from the receiver
# including the name of the receiver.
#
# @example Retrieving a subspec
#
# s.subspec_by_name('Pod/subspec').name #=> 'subspec'
#
# @return [Specification] the subspec with the given name or self.
#
def subspec_by_name(relative_name)
if relative_name.nil? || relative_name == base_name
self
else
remainder = relative_name[base_name.size+1..-1]
subspec_name = remainder.split('/').shift
subspec = subspecs.find { |s| s.name == "#{self.name}/#{subspec_name}" }
unless subspec
raise Informative, "Unable to find a specification named " \
"`#{relative_name}` in `#{self.name} (#{self.version})`."
end
subspec.subspec_by_name(remainder)
end
end
# @return [String] the name of the default subspec if provided.
#
def default_subspec
attributes_hash["default_subspec"]
end
# Returns the dependencies on subspecs.
#
# @note A specification has a dependency on either the
# {#default_subspec} or each of its children subspecs that are
# compatible with its platform.
#
# @return [Array<Dependency>] the dependencies on subspecs.
#
def subspec_dependencies(platform = nil)
if default_subspec
specs = [subspec_by_name("#{name}/#{default_subspec}")]
else
specs = subspecs.compact
end
if platform
specs = specs.select { |s| s.supported_on_platform?(platform) }
end
specs.map { |s| Dependency.new(s.name) }
end
# Returns the dependencies on other Pods or subspecs of other Pods.
#
# @param [Bool] all_platforms
# whether the dependencies should be returned for all platforms
# instead of the active one.
#
# @note External dependencies are inherited by subspecs
#
# @return [Array<Dependency>] the dependencies on other Pods.
#
def dependencies(platform = nil)
if platform
consumer(platform).dependencies || []
else
available_platforms.map do |spec_platform|
consumer(spec_platform).dependencies
end.flatten.uniq
end
end
# @return [Array<Dependency>] all the dependencies of the specification.
#
def all_dependencies(platform = nil)
dependencies(platform) + subspec_dependencies(platform)
end
# Returns a consumer to access the multi-platform attributes.
#
# @param [String, Symbol, Platform] platform
# he platform of the consumer
#
# @return [Specification::Consumer] the consumer for the given platform
#
def consumer(platform)
platform = platform.to_sym
@consumers[platform] ||= Consumer.new(self, platform)
end
#-------------------------------------------------------------------------#
public
# @!group DSL helpers
# @return [Bool] whether the specification should use a directory as it
# source.
#
def local?
!!(source[:path] || source[:local]) rescue false
end
# @return [Bool] whether the specification is supported in the given
# platform.
#
# @overload supported_on_platform?(platform)
#
# @param [Platform] platform
# the platform which is checked for support.
#
# @overload supported_on_platform?(symbolic_name, deployment_target)
#
# @param [Symbol] symbolic_name
# the name of the platform which is checked for support.
#
# @param [String] deployment_target
# the deployment target which is checked for support.
#
def supported_on_platform?(*platform)
platform = Platform.new(*platform)
available_platforms.any? { |available| platform.supports?(available) }
end
# @return [Array<Platform>] The platforms that the Pod is supported on.
#
# @note If no platform is specified, this method returns all known
# platforms.
#
def available_platforms
names = supported_platform_names
names = PLATFORMS if names.empty?
names.map { |name| Platform.new(name, deployment_target(name)) }
end
# Returns the deployment target for the specified platform.
#
# @param [String] platform_name
# the symbolic name of the platform.
#
# @return [String] the deployment target
# @return [Nil] if not deployment target was specified for the platform.
#
def deployment_target(platform_name)
result = platform_hash[platform_name.to_s]
result ||= parent.deployment_target(platform_name) if parent
result
end
protected
# @return [Array[Symbol]] the symbolic name of the platform in which the
# specification is supported.
#
# @return [Nil] if the specification is supported on all the known
# platforms.
#
def supported_platform_names
result = platform_hash.keys
if result.empty? && parent
result = parent.supported_platform_names
end
result
end
# @return [Hash] the normalized hash which represents the platform
# information.
#
def platform_hash
case value = attributes_hash["platforms"]
when String
{ value => nil }
when Array
result = {}
value.each do |a_value|
result[a_value] = nil
end
result
when Hash
value
else
Hash.new
end
end
public
# @!group Deprecated Hooks support
#-------------------------------------------------------------------------#
# @return [Proc] the pre install callback if defined.
#
attr_reader :pre_install_callback
# @return [Proc] the post install callback if defined.
#
attr_reader :post_install_callback
# Calls the pre install callback if defined.
#
# @param [Pod::LocalPod] pod
# the local pod instance that manages the files described by this
# specification.
#
# @param [Podfile::TargetDefinition] target_definition
# the target definition that required this specification as a
# dependency.
#
# @return [Bool] whether a pre install callback was specified and it was
# called.
#
def pre_install!(pod, target_definition)
return false unless @pre_install_callback
@pre_install_callback.call(pod, target_definition)
true
end
# Calls the post install callback if defined.
#
# @param [Pod::TargetInstaller] target_installer
# the target installer that is performing the installation of the
# pod.
#
# @return [Bool] whether a post install callback was specified and it was
# called.
#
def post_install!(target_installer)
return false unless @post_install_callback
@post_install_callback.call(target_installer)
true
end
#-------------------------------------------------------------------------#
public
# @!group DSL attribute writers
# Sets the value for the attribute with the given name.
#
# @param [Symbol] name
# the name of the attribute.
#
# @param [Object] value
# the value to store.
#
# @param [Symbol] platform.
# If provided the attribute is stored only for the given platform.
#
# @note If the provides value is Hash the keys are converted to a string.
#
# @return void
#
def store_attribute(name, value, platform_name = nil)
name = name.to_s
value = convert_keys_to_string(value) if value.is_a?(Hash)
if platform_name
platform_name = platform_name.to_s
attributes_hash[platform_name] ||= {}
attributes_hash[platform_name][name] = value
else
attributes_hash[name] = value
end
end
# Defines the setters methods for the attributes providing support for the
# Ruby DSL.
#
DSL.attributes.values.each do |a|
define_method(a.writer_name) do |value|
store_attribute(a.name, value)
end
if a.writer_singular_form
alias_method(a.writer_singular_form, a.writer_name)
end
end
private
# Converts the keys of the given hash to a string.
#
# @param [Object] value
# the value that needs to be stripped from the Symbols.
#
# @return [Hash] the hash with the strings instead of the keys.
#
def convert_keys_to_string(value)
return unless value
result = {}
value.each do |key, subvalue|
subvalue = convert_keys_to_string(subvalue) if subvalue.is_a?(Hash)
result[key.to_s] = subvalue
end
result
end
#-------------------------------------------------------------------------#
public
# @!group File representation
# @return [String] The SHA1 digest of the file in which the specification
# is defined.
#
# @return [Nil] If the specification is not defined in a file.
#
def checksum
require 'digest'
unless defined_in_file.nil?
checksum = Digest::SHA1.hexdigest(File.read(defined_in_file))
checksum = checksum.encode('UTF-8') if checksum.respond_to?(:encode)
checksum
end
end
# @return [String] the path where the specification is defined, if loaded
# from a file.
#
def defined_in_file
root? ? @defined_in_file : root.defined_in_file
end
# Loads a specification form the given path.
#
# @param [Pathname, String] path
# the path of the `podspec` file.
#
# @param [String] subspec_name
# the name of the specification that should be returned. If it is
# nil returns the root specification.
#
# @raise If the file doesn't return a Pods::Specification after
# evaluation.
#
# @return [Specification] the specification
#
def self.from_file(path, subspec_name = nil)
path = Pathname.new(path)
unless path.exist?
raise Informative, "No podspec exists at path `#{path}`."
end
string = File.open(path, 'r:utf-8') { |f| f.read }
# Work around for Rubinius incomplete encoding in 1.9 mode
if string.respond_to?(:encoding) && string.encoding.name != "UTF-8"
string.encode!('UTF-8')
end
from_string(string, path, subspec_name)
end
# Loads a specification with the given string.
#
# @param [String] spec_contents
# A string describing a specification.
#
# @param [Pathname, String] path @see from_file
# @param [String] subspec_name @see from_file
#
# @return [Specification] the specification
#
def self.from_string(spec_contents, path, subspec_name = nil)
path = Pathname.new(path)
case path.extname
when '.podspec'
spec = ::Pod._eval_podspec(spec_contents, path)
unless spec.is_a?(Specification)
raise Informative, "Invalid podspec file at path `#{path}`."
end
when '.yaml'
spec = Specification.from_yaml(spec_contents)
else
raise Informative, "Unsupported specification format `#{path.extname}`."
end
spec.defined_in_file = path
spec.subspec_by_name(subspec_name)
end
# Sets the path of the `podspec` file used to load the specification.
#
# @param [String] file
# the `podspec` file.
#
# @return [void]
#
# @visibility private
#
def defined_in_file=(file)
unless root?
raise StandardError, "Defined in file can be set only for root specs."
end
@defined_in_file = file
end
end
#---------------------------------------------------------------------------#
# @visibility private
#
# Evaluates the given string in the namespace of the Pod module.
#
# @param [String] string
# The string containing the Ruby description of the Object to
# evaluate.
#
# @param [Pathname] path
# The path where the object to evaluate is stored.
#
# @return [Object] it can return any object but, is expected to be called on
# `podspec` files that should return a #{Specification}.
#
#
def self._eval_podspec(string, path)
begin
eval(string, nil, path.to_s)
rescue Exception => e
raise DSLError.new("Invalid `#{path.basename}` file: #{e.message}", path, e.backtrace)
end
end
end
| 29.30832 | 107 | 0.602583 |
b9fdf991d1a73c31c7fd7869aff0463f37b58f77 | 1,333 | module RedditKit
class Client
# Methods for voting on links and comments.
module Voting
# Upvotes a link or comment.
#
# @param link_or_comment [String, RedditKit::Comment, RedditKit::Link] The link or comment to upvote.
def upvote(link_or_comment)
vote link_or_comment, 1
end
# Downvotes a link or comment.
#
# @param link_or_comment [String, RedditKit::Comment, RedditKit::Link] The link or comment to downvote.
def downvote(link_or_comment)
vote link_or_comment, -1
end
# Withdraws a vote on a link or comment.
#
# @param link_or_comment [String, RedditKit::Comment, RedditKit::Link] The link or comment from which to withdraw the vote.
def withdraw_vote(link_or_comment)
vote link_or_comment, 0
end
# Votes on a link or comment.
#
# @param link_or_comment [String, RedditKit::Comment, RedditKit::Link] The link or comment from which to withdraw the vote.
# @param direction [-1, 0, 1] Downvote, no vote, and upvote respectively.
def vote(link_or_comment, direction)
full_name = extract_full_name(link_or_comment)
parameters = { :id => full_name, :dir => direction, :api_type => 'json' }
post('api/vote', parameters)
end
end
end
end
| 31.738095 | 129 | 0.654914 |
2605ed77771f554ea3e1d48b46073d0046a372a4 | 1,362 | # Encoding: UTF-8
require File.expand_path('../../core/lib/refinery/version', __FILE__)
version = Refinery::Version.to_s
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = %q{refinerycms-resources}
s.version = version
s.summary = %q{Resources extension for Refinery CMS}
s.description = %q{Handles all file upload and processing functionality in Refinery CMS.}
s.email = %q{[email protected]}
s.homepage = %q{http://refinerycms.com}
s.rubyforge_project = %q{refinerycms}
s.authors = ['Philip Arndt', 'Uģis Ozols', 'Rob Yurkowski']
s.license = %q{MIT}
s.require_paths = %w(lib)
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.add_dependency 'acts_as_indexed', '~> 0.8.0'
s.add_dependency 'dragonfly', '~> 1.1.0'
s.add_dependency 'globalize', ['>= 4.0.0', '< 5.2']
s.add_dependency 'refinerycms-core', version
s.required_ruby_version = Refinery::Version.required_ruby_version
s.cert_chain = [File.expand_path("../../certs/parndt.pem", __FILE__)]
if $0 =~ /gem\z/ && ARGV.include?("build") && ARGV.include?(__FILE__)
s.signing_key = File.expand_path("~/.ssh/gem-private_key.pem")
end
end
| 40.058824 | 97 | 0.604993 |
03aa7385204dc245593ebebc6bca459e0b7e0fbd | 3,109 | require_relative 'ionoscloud_base'
class Chef
class Knife
class IonoscloudContractList < Knife
include Knife::IonoscloudBase
banner 'knife ionoscloud contract list'
attr_reader :description, :required_options
def initialize(args = [])
super(args)
@description =
'Lists information about available contract resources.'
@required_options = [:ionoscloud_username, :ionoscloud_password]
end
def run
$stdout.sync = true
handle_extra_config
validate_required_params(@required_options, config)
contract = Ionoscloud::ContractApi.new(api_client).contracts_get()
puts "#{ui.color('Contract Type', :cyan)}: #{contract.type}"
puts "#{ui.color('Contract Owner', :cyan)}: #{contract.properties.owner}"
puts "#{ui.color('Contract Number', :cyan)}: #{contract.properties.contract_number}"
puts "#{ui.color('Registration Domain', :cyan)}: #{contract.properties.reg_domain}"
puts "#{ui.color('Status', :cyan)}: #{contract.properties.status}"
puts "#{ui.color('Cores per contract', :cyan)}: #{contract.properties.resource_limits.cores_per_contract}"
puts "#{ui.color('Cores per server', :cyan)}: #{contract.properties.resource_limits.cores_per_server}"
puts "#{ui.color('Cores provisioned', :cyan)}: #{contract.properties.resource_limits.cores_provisioned}"
puts "#{ui.color('HDD limit per contract', :cyan)}: #{contract.properties.resource_limits.hdd_limit_per_contract}"
puts "#{ui.color('HDD limit per volume', :cyan)}: #{contract.properties.resource_limits.hdd_limit_per_volume}"
puts "#{ui.color('HDD volume provisioned', :cyan)}: #{contract.properties.resource_limits.hdd_volume_provisioned}"
puts "#{ui.color('RAM per contract', :cyan)}: #{contract.properties.resource_limits.ram_per_contract}"
puts "#{ui.color('RAM per server', :cyan)}: #{contract.properties.resource_limits.ram_per_server}"
puts "#{ui.color('RAM provisioned', :cyan)}: #{contract.properties.resource_limits.ram_provisioned}"
puts "#{ui.color('Reservable IPs', :cyan)}: #{contract.properties.resource_limits.reservable_ips}"
puts "#{ui.color('Reservable IPs in use', :cyan)}: #{contract.properties.resource_limits.reserved_ips_in_use}"
puts "#{ui.color('Reservable IPs on contract', :cyan)}: #{contract.properties.resource_limits.reserved_ips_on_contract}"
puts "#{ui.color('SSD limit per contract', :cyan)}: #{contract.properties.resource_limits.ssd_limit_per_contract}"
puts "#{ui.color('SSD limit per volume', :cyan)}: #{contract.properties.resource_limits.ssd_limit_per_volume}"
puts "#{ui.color('SSD volume provisioned', :cyan)}: #{contract.properties.resource_limits.ssd_volume_provisioned}"
puts "#{ui.color('K8s Cluster Limit Total', :cyan)}: #{contract.properties.resource_limits.k8s_cluster_limit_total}"
puts "#{ui.color('K8s Clusters provisioned', :cyan)}: #{contract.properties.resource_limits.k8s_clusters_provisioned}"
end
end
end
end
| 59.788462 | 128 | 0.69733 |
91e90315b3e55088f6db6ec378416b4f5f9a3455 | 1,170 | # frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Graphql::FindArgumentInParent do
describe '#find' do
def build_node(parent = nil, args: {})
props = { irep_node: double(arguments: args) }
props[:parent] = parent if parent # The root node shouldn't respond to parent
double(props)
end
let(:parent) do
build_node(
build_node(
build_node(
build_node,
args: { myArg: 1 }
)
)
)
end
let(:arg_name) { :my_arg }
it 'searches parents and returns the argument' do
expect(described_class.find(parent, :my_arg)).to eq(1)
end
it 'can find argument when passed in as both Ruby and GraphQL-formatted symbols and strings' do
[:my_arg, :myArg, 'my_arg', 'myArg'].each do |arg|
expect(described_class.find(parent, arg)).to eq(1)
end
end
it 'returns nil if no arguments found in parents' do
expect(described_class.find(parent, :bar)).to eq(nil)
end
it 'can limit the depth it searches to' do
expect(described_class.find(parent, :my_arg, limit_depth: 1)).to eq(nil)
end
end
end
| 26 | 99 | 0.626496 |
7a878ac978cf5a566222381bd2e1ff1ebb6104cb | 66 | Signal.trap('INT') do
puts self
exit
end
p Process.pid
sleep
| 8.25 | 21 | 0.69697 |
e22b4355b3e82191b0cf4db0e2b05993e99d331e | 795 | require "json"
package = JSON.parse(File.read(File.join(__dir__, "package.json")))
Pod::Spec.new do |s|
s.name = "react-native-kustomerv2-sdk"
s.version = package["version"]
s.summary = package["description"]
s.description = <<-DESC
react-native-kustomerv2-sdk
DESC
s.homepage = "https://github.com/alex-lanclos/react-native-kustomerv2-sdk.git"
s.license = "MIT"
s.authors = { "Alex Lanclos" => "[email protected]" }
s.platforms = { :ios => "11.0" }
s.source = { :git => "https://github.com/alex-lanclos/react-native-kustomerv2-sdk.git", :tag => "#{s.version}" }
s.dependency 'KustomerChat'
s.source_files = "ios/**/*.{h,c,m,swift}"
s.requires_arc = true
s.dependency "React"
end
| 31.8 | 120 | 0.6 |
f8c0e0db76ea37c01fb040b0c533dbf9053dafc1 | 857 | require 'xsd/qname'
# {http://www.jin.gr.jp/~nahi/xmlns/sample/Person}Person
class Person
@@schema_type = "Person"
@@schema_ns = "http://www.jin.gr.jp/~nahi/xmlns/sample/Person"
@@schema_element = [["familyname", ["SOAP::SOAPString", XSD::QName.new(nil, "familyname")]], ["givenname", ["SOAP::SOAPString", XSD::QName.new(nil, "givenname")]], ["var1", ["SOAP::SOAPInt", XSD::QName.new(nil, "var1")]], ["var2", ["SOAP::SOAPDouble", XSD::QName.new(nil, "var2")]], ["var3", ["SOAP::SOAPString", XSD::QName.new(nil, "var3")]]]
attr_accessor :familyname
attr_accessor :givenname
attr_accessor :var1
attr_accessor :var2
attr_accessor :var3
def initialize(familyname = nil, givenname = nil, var1 = nil, var2 = nil, var3 = nil)
@familyname = familyname
@givenname = givenname
@var1 = var1
@var2 = var2
@var3 = var3
end
end
| 37.26087 | 345 | 0.65811 |
bb6bedd2923d19dad37945967dfcc9cc7f781ae1 | 570 | ################################################################################
# mp3 ファイルのタグを扱うテストです。
################################################################################
require "mp3info"
# 作業ディレクトリのパス
WORKING_DIRECTORY_PATH = __dir__.encode("UTF-8").freeze
# 作業ディレクトリと同階層にあるすべての mp3 ファイルに対して処理を行う
Dir.glob("#{WORKING_DIRECTORY_PATH}/*.mp3").each do |f|
Mp3Info.open(f) do |mp3|
puts "#{f}"
# トラック番号を設定
# mp3.tag2.TRCK = File.basename(f, ".*").gsub(/[^0-9 _\-]*/, "").split(/[ _\-]/).last
# タグ情報を出力
p mp3.tag2
end
puts
end
| 23.75 | 89 | 0.454386 |
18d6f3f9d324c78dd4a515edc169a06090cc527b | 162 | module WiMP
class SimpleArtist < Struct.new(:name, :id)
#
# @return String
#
def url
"http://wimpmusic.se/artist/#{id}"
end
end
end
| 14.727273 | 45 | 0.567901 |
b9de1baf440fb46321ff61e9329976428a44fcfd | 5,769 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
require 'features/page_objects/notification'
describe 'Upload attachment to wiki page', js: true do
let(:user) do
FactoryBot.create :user,
member_in_project: project,
member_with_permissions: %i[view_wiki_pages edit_wiki_pages]
end
let(:project) { FactoryBot.create(:project) }
let(:attachments) { ::Components::Attachments.new }
let(:image_fixture) { Rails.root.join('spec/fixtures/files/image.png') }
let(:editor) { ::Components::WysiwygEditor.new }
let(:wiki_page_content) { project.wiki.pages.first.content.text }
before do
login_as(user)
end
it 'can upload an image to new and existing wiki page via drag & drop' do
visit project_wiki_path(project, 'test')
# adding an image
editor.in_editor do |container, editable|
attachments.drag_and_drop_file(editable, image_fixture)
# Besides testing caption functionality this also slows down clicking on the submit button
# so that the image is properly embedded
editable.find('figure.image figcaption').base.send_keys('Image uploaded the first time')
end
expect(page).to have_selector('attachment-list-item', text: 'image.png')
expect(page).not_to have_selector('notification-upload-progress')
click_on 'Save'
expect(page).to have_selector('#content img', count: 1)
expect(page).to have_content('Image uploaded the first time')
expect(page).to have_selector('attachment-list-item', text: 'image.png')
within '.toolbar-items' do
click_on "Edit"
end
# Replace one image with a named attachment URL (Regression #28381)
editor.set_markdown "\n\nText that prevents the two images colliding"
editor.in_editor do |container, editable|
# Expect URL is mapped to the correct URL
expect(container).to have_selector('img[src^="/api/v3/attachments/"')
expect(container).to have_no_selector('img[src="image.png"]')
attachments.drag_and_drop_file(editable, image_fixture)
# Besides testing caption functionality this also slows down clicking on the submit button
# so that the image is properly embedded
editable.find('figure.image figcaption').base.send_keys('Image uploaded the second time')
end
expect(page).to have_selector('attachment-list-item', text: 'image.png', count: 2)
expect(page).not_to have_selector('notification-upload-progress')
click_on 'Save'
expect(page).to have_selector('#content img', count: 2)
expect(page).to have_content('Image uploaded the second time')
expect(page).to have_selector('attachment-list-item', text: 'image.png', count: 2)
# Rendered once through the name in the backend
expect(page).to have_selector('img[src^="/attachments', count: 1)
# And once with the full url
expect(page).to have_selector('img[src^="/api/v3/attachments/"', count: 1)
expect(wiki_page_content).to include ''
expect(wiki_page_content).to include '' do
visit project_wiki_path(project, 'test')
expect(page).to have_selector('#content_page_title')
expect(page).to have_selector('.work-package--attachments--drop-box')
# Upload image to dropzone
expect(page).to have_no_selector('.work-package--attachments--filename')
attachments.attach_file_on_input(image_fixture)
expect(page).not_to have_selector('notification-upload-progress')
expect(page).to have_selector('.work-package--attachments--filename', text: 'image.png')
# Assume we could still save the page with an empty title
page.execute_script 'jQuery("#content_page_title").removeAttr("required aria-required");'
# Remove title so we will result in an error
fill_in 'content_page_title', with: ''
click_on 'Save'
expect(page).to have_selector('#errorExplanation', text: "Title can't be blank.")
expect(page).to have_selector('.work-package--attachments--filename', text: 'image.png')
editor.in_editor do |container, editable|
editable.send_keys 'hello there.'
end
fill_in 'content_page_title', with: 'Test'
click_on 'Save'
expect(page).to have_selector('.controller-wiki.action-show')
expect(page).to have_selector('h2', text: 'Test')
expect(page).to have_selector('.work-package--attachments--filename', text: 'image.png')
end
end
| 40.626761 | 126 | 0.723349 |
ac0f595f14434e31380daed929b1d548809d9a99 | 1,240 | #!/usr/bin/env ruby
# <bitbar.title>Your time on earth</bitbar.title>
# <bitbar.version>v1.0</bitbar.version>
# <bitbar.author>Tim Regener</bitbar.author>
# <bitbar.author.github>timlapluie</bitbar.author.github>
# <bitbar.desc>Displays the time you are already living.</bitbar.desc>
# <bitbar.image>http://i.imgur.com/EzUARsL.png</bitbar.image>
# <bitbar.dependencies>ruby</bitbar.dependencies>
# --------------------- #
# EDIT THESE VARIABLES. #
# --------------------- #
# Add your Birthday here
# Format: 'YYYY-MM-DD [hh:mm (optional)] [UTC Offset (optional)]'
BIRTHDAY = '1997-06-07'
# -------------------------------------------------------- #
# DON'T EDIT BELOW HERE UNLESS YOU KNOW WHAT YOU'RE DOING. #
# -------------------------------------------------------- #
require 'date'
birth_time = DateTime.parse(BIRTHDAY)
time_now = DateTime.now
delta = time_now - birth_time
days = delta.to_i
hours = (delta * 24)
minutes = (hours % 1) * 60
seconds = (minutes % 1) * 60
def format_int(number)
number.to_i.to_s.reverse.gsub(/(\d{3})(?=\d)/, '\\1,').reverse
end
puts "#{format_int(days)} days on 🌍"
puts '---'
puts "Impressive! That's #{format_int(hours)} hours, #{format_int(minutes)} minutes, #{format_int(seconds)} seconds." | 31 | 117 | 0.606452 |
790011fa4facb686a98e51da2ebc48597e6a01f2 | 931 | class Article < ApplicationRecord
belongs_to :creator, class_name: 'User', foreign_key: :user_id
belongs_to :category, class_name: 'Category', foreign_key: :category_id
has_many :comments, dependent: :destroy
has_many :votes, dependent: :destroy
has_one_attached :image
validates :title, length: { minimum: 6, maximum: 50 }, presence: true
validates :text, length: { minimum: 6, maximum: 2000 }, presence: true
validates :category_id, presence: true
scope :most_recent_by_category, lambda {
from(
<<~SQL
(
SELECT articles.*
FROM articles JOIN (
SELECT category_id, max(created_at) AS created_at
FROM articles
GROUP BY category_id
) latest_by_category
ON articles.created_at = latest_by_category.created_at
AND articles.category_id = latest_by_category.category_id
) articles
SQL
)
}
end
| 32.103448 | 73 | 0.669173 |
28d2dc4a1b839e62e5b00c89e38601a92c60043c | 2,297 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ServiceFabric::V6_4_0_36
module Models
#
# Information about load reported by replica.
#
class LoadMetricReportInfo
include MsRestAzure
# @return [String] The name of the metric.
attr_accessor :name
# @return [Integer] The value of the load for the metric. In future
# releases of Service Fabric this parameter will be deprecated in favor
# of CurrentValue.
attr_accessor :value
# @return [String] The double value of the load for the metric.
attr_accessor :current_value
# @return [DateTime] The UTC time when the load is reported.
attr_accessor :last_reported_utc
#
# Mapper for LoadMetricReportInfo class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'LoadMetricReportInfo',
type: {
name: 'Composite',
class_name: 'LoadMetricReportInfo',
model_properties: {
name: {
client_side_validation: true,
required: false,
serialized_name: 'Name',
type: {
name: 'String'
}
},
value: {
client_side_validation: true,
required: false,
serialized_name: 'Value',
type: {
name: 'Number'
}
},
current_value: {
client_side_validation: true,
required: false,
serialized_name: 'CurrentValue',
type: {
name: 'String'
}
},
last_reported_utc: {
client_side_validation: true,
required: false,
serialized_name: 'LastReportedUtc',
type: {
name: 'DateTime'
}
}
}
}
}
end
end
end
end
| 28.012195 | 77 | 0.516326 |
01854e90188aed868233303285a07c47fb2b883c | 181 | class AddModeratorToUsers < ActiveRecord::Migration
def change
add_column :users, :moderator, :boolean, null: false, default: false
add_index :users, :moderator
end
end
| 25.857143 | 72 | 0.745856 |
e89a4040665ca616918dca590611f74857b00c02 | 184 | class Api::V1::IpoEventsController < ApplicationController
def index
ipo_events = IpoEvent.recent.pending.limit(15)
render json: IpoEventSerializer.new(ipo_events)
end
end
| 26.285714 | 58 | 0.777174 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.