hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
28e64a5d4fea51eba8b9fca5bf533970ee9301fa | 211 | # -*- encoding : utf-8 -*-
class RenameStatusIdOnCard < ActiveRecord::Migration
def up
rename_column :cards, :status_id, :deck_id
end
def down
rename_column :cards, :deck_id, :status_id
end
end
| 19.181818 | 52 | 0.696682 |
33755307333162d8fc62b29891f014575e8bded0 | 2,256 | require 'spec_helper'
describe "a built instance" do
include FactoryBot::Syntax::Methods
before do
define_model('User')
define_model('Post', user_id: :integer) do
belongs_to :user
end
FactoryBot.define do
factory :user
factory :post do
user
end
end
end
subject { build(:post) }
it { should be_new_record }
context "when the :use_parent_strategy config option has not been set" do
before { FactoryBot.use_parent_strategy = nil }
it "assigns and saves associations" do
expect(subject.user).to be_kind_of(User)
expect(subject.user).not_to be_new_record
end
end
context "when the :use_parent_strategy config option has been enabled" do
before { FactoryBot.use_parent_strategy = true }
it "assigns but does not save associations" do
expect(subject.user).to be_kind_of(User)
expect(subject.user).to be_new_record
end
end
it "assigns but does not save associations when using parent strategy" do
FactoryBot.use_parent_strategy = true
expect(subject.user).to be_kind_of(User)
expect(subject.user).to be_new_record
end
end
describe "a built instance with strategy: :create" do
include FactoryBot::Syntax::Methods
before do
define_model('User')
define_model('Post', user_id: :integer) do
belongs_to :user
end
FactoryBot.define do
factory :user
factory :post do
association(:user, strategy: :create)
end
end
end
subject { build(:post) }
it { should be_new_record }
it "assigns and saves associations" do
expect(subject.user).to be_kind_of(User)
expect(subject.user).not_to be_new_record
end
end
describe "calling `build` with a block" do
include FactoryBot::Syntax::Methods
before do
define_model('Company', name: :string)
FactoryBot.define do
factory :company
end
end
it "passes the built instance" do
build(:company, name: 'thoughtbot') do |company|
expect(company.name).to eq('thoughtbot')
end
end
it "returns the built instance" do
expected = nil
result = build(:company) do |company|
expected = company
"hello!"
end
expect(result).to eq expected
end
end
| 21.084112 | 75 | 0.679965 |
ed9e04d1dada8e2ad7fba9f87013ca72d1500f14 | 643 | require 'net/http'
require 'nokogiri'
require 'addressable/uri'
require 'correios/address_search_response'
class Cep
URL = 'http://m.correios.com.br/movel/buscaCepConfirma.do'
def search(term)
response = query({cepEntrada: term.parameterize(' '), metodo: :buscarCep})
Correios::AddressSearchResponse.new(response.body).addresses
end
alias_method :address, :search
def cep(cep)
search(cep)[0]
end
def url(params = {})
uri = Addressable::URI.new
uri.query_values = params
url = "#{Cep::URL}?#{uri.query}"
end
def query(params = {})
uri = URI(URL)
Net::HTTP.post_form(uri, params)
end
end | 21.433333 | 78 | 0.679627 |
1cbc7ccd19b5edb128016634d08095807ab7ec38 | 180 | class CreateAccounts < ActiveRecord::Migration
def self.up
create_table :accounts do |t|
t.timestamps
end
end
def self.down
drop_table :accounts
end
end
| 15 | 46 | 0.688889 |
08c0d2ca5d912cdba3c3af42aae35888e35271ca | 8,347 | # TODO: move paranoid property concerns to a ParanoidModel that is mixed
# into Model when a Paranoid property is used
# TODO: update Model#respond_to? to return true if method_method missing
# would handle the message
module DataMapper
module Model
module Property
Model.append_extensions self
extend Chainable
def self.extended(model)
model.instance_variable_set(:@properties, {})
model.instance_variable_set(:@field_naming_conventions, {})
model.instance_variable_set(:@paranoid_properties, {})
end
chainable do
def inherited(model)
model.instance_variable_set(:@properties, {})
model.instance_variable_set(:@field_naming_conventions, {})
model.instance_variable_set(:@paranoid_properties, @paranoid_properties.dup)
@properties.each do |repository_name, properties|
repository(repository_name) do
properties.each do |property|
model.property(property.name, property.type, property.options)
end
end
end
super
end
end
##
# Defines a Property on the Resource
#
# @param [Symbol] name
# the name for which to call this property
# @param [Type] type
# the type to define this property ass
# @param [Hash(Symbol => String)] options
# a hash of available options
#
# @return [Property]
# the created Property
#
# @see Property
#
# @api public
def property(name, type, options = {})
property = DataMapper::Property.new(self, name, type, options)
properties(repository_name) << property
# Add property to the other mappings as well if this is for the default
# repository.
if repository_name == default_repository_name
@properties.except(repository_name).each do |repository_name, properties|
next if properties.named?(name)
# make sure the property is created within the correct repository scope
DataMapper.repository(repository_name) do
properties << DataMapper::Property.new(self, name, type, options)
end
end
end
# Add the property to the lazy_loads set for this resources repository
# only.
# TODO Is this right or should we add the lazy contexts to all
# repositories?
if property.lazy?
context = options.fetch(:lazy, :default)
context = :default if context == true
Array(context).each do |item|
properties(repository_name).lazy_context(item) << name
end
end
# add the property to the child classes only if the property was
# added after the child classes' properties have been copied from
# the parent
if respond_to?(:descendants)
descendants.each do |descendant|
next if descendant.properties(repository_name).named?(name)
descendant.property(name, type, options)
end
end
create_reader_for(property)
create_writer_for(property)
property
end
##
# Gets a list of all properties that have been defined on this Model in
# the requested repository
#
# @param [Symbol, String] repository_name
# The name of the repository to use. Uses the default Repository
# if none is specified.
#
# @return [Array]
# A list of Properties defined on this Model in the given Repository
#
# @api public
def properties(repository_name = default_repository_name)
# TODO: create PropertySet#copy that will copy the properties, but assign the
# new Relationship objects to a supplied repository and model. dup does not really
# do what is needed
@properties[repository_name] ||= if repository_name == default_repository_name
PropertySet.new
else
properties(default_repository_name).dup
end
end
##
# Gets the list of key fields for this Model in +repository_name+
#
# @param [String] repository_name
# The name of the Repository for which the key is to be reported
#
# @return [Array]
# The list of key fields for this Model in +repository_name+
#
# @api public
def key(repository_name = default_repository_name)
properties(repository_name).key
end
# TODO: document
# @api public
def identity_field(repository_name = default_repository_name)
# XXX: should identity_field be the same thing as key?
key(repository_name).detect { |property| property.serial? }
end
##
# Gets the field naming conventions for this resource in the given Repository
#
# @param [String, Symbol] repository_name
# the name of the Repository for which the field naming convention
# will be retrieved
#
# @return [#call]
# The naming convention for the given Repository
#
# @api public
def field_naming_convention(repository_name = default_storage_name)
@field_naming_conventions[repository_name] ||= repository(repository_name).adapter.field_naming_convention
end
# TODO: document
# @api private
def properties_with_subclasses(repository_name = default_repository_name)
properties = PropertySet.new
models = [ self ].to_set
models.merge(descendants) if respond_to?(:descendants)
models.each do |model|
model.properties(repository_name).each do |property|
properties << property unless properties.named?(property.name)
end
end
properties
end
# TODO: document
# @api private
def paranoid_properties
@paranoid_properties
end
# TODO: document
# @api private
def set_paranoid_property(name, &block)
paranoid_properties[name] = block
end
# TODO: document
# @api private
def typecast_key(key)
self.key(repository_name).zip(key).map { |property, value| property.typecast(value) }
end
# TODO: document
# @api private
def key_conditions(repository, key)
self.key(repository.name).zip(key).to_hash
end
private
# defines the reader method for the property
#
# @api private
def create_reader_for(property)
name = property.name.to_s
reader_visibility = property.reader_visibility
instance_variable_name = property.instance_variable_name
primitive = property.primitive
unless resource_method_defined?(name)
class_eval <<-RUBY, __FILE__, __LINE__ + 1
#{reader_visibility}
def #{name}
return #{instance_variable_name} if defined?(#{instance_variable_name})
#{instance_variable_name} = properties[#{name.inspect}].get(self)
end
RUBY
end
boolean_reader_name = "#{name}?"
if primitive == TrueClass && !resource_method_defined?(boolean_reader_name)
class_eval <<-RUBY, __FILE__, __LINE__ + 1
#{reader_visibility}
alias #{boolean_reader_name} #{name}
RUBY
end
end
# defines the setter for the property
#
# @api private
def create_writer_for(property)
name = property.name
writer_visibility = property.writer_visibility
writer_name = "#{name}="
return if resource_method_defined?(writer_name)
class_eval <<-RUBY, __FILE__, __LINE__ + 1
#{writer_visibility}
def #{writer_name}(value)
properties[#{name.inspect}].set(self, value)
end
RUBY
end
chainable do
# TODO: document
# @api public
def method_missing(method, *args, &block)
if property = properties(repository_name)[method]
return property
end
super
end
end
end # module Property
end # module Model
end # module DataMapper
| 31.379699 | 114 | 0.614832 |
262d690c761fac17798899c88a3cc82822fcdc6b | 72 | module ActiveRecord
module Bitemporal
VERSION = "0.1.0"
end
end
| 12 | 21 | 0.694444 |
e836461b0990c97366dbf38b227dd4c2edfcf692 | 3,914 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::AlertManagementHelper do
include Gitlab::Routing.url_helpers
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:current_user) { create(:user) }
let(:project_path) { project.full_path }
let(:project_id) { project.id }
describe '#alert_management_data' do
let(:user_can_enable_alert_management) { true }
let(:setting_path) { project_settings_operations_path(project, anchor: 'js-alert-management-settings') }
subject(:data) { helper.alert_management_data(current_user, project) }
before do
allow(helper)
.to receive(:can?)
.with(current_user, :admin_operations, project)
.and_return(user_can_enable_alert_management)
end
context 'without alert_managements_setting' do
it 'returns index page configuration' do
expect(helper.alert_management_data(current_user, project)).to match(
'project-path' => project_path,
'enable-alert-management-path' => setting_path,
'alerts-help-url' => 'http://test.host/help/operations/incident_management/alerts.md',
'populating-alerts-help-url' => 'http://test.host/help/operations/incident_management/integrations.md#configuration',
'empty-alert-svg-path' => match_asset_path('/assets/illustrations/alert-management-empty-state.svg'),
'user-can-enable-alert-management' => 'true',
'alert-management-enabled' => 'false',
'text-query': nil,
'assignee-username-query': nil
)
end
end
context 'with prometheus service' do
let_it_be(:prometheus_service) { create(:prometheus_service, project: project) }
context 'when prometheus service is active' do
it 'enables alert management' do
expect(data).to include(
'alert-management-enabled' => 'true'
)
end
end
context 'when prometheus service is inactive' do
it 'disables alert management' do
prometheus_service.update!(manual_configuration: false)
expect(data).to include(
'alert-management-enabled' => 'false'
)
end
end
end
context 'with http integration' do
let_it_be(:integration) { create(:alert_management_http_integration, project: project) }
context 'when integration is active' do
it 'enables alert management' do
expect(data).to include(
'alert-management-enabled' => 'true'
)
end
end
context 'when integration is inactive' do
it 'disables alert management' do
integration.update!(active: false)
expect(data).to include(
'alert-management-enabled' => 'false'
)
end
end
end
context 'with an alert' do
let_it_be(:alert) { create(:alert_management_alert, project: project) }
it 'enables alert management' do
expect(data).to include(
'alert-management-enabled' => 'true'
)
end
end
context 'when user does not have requisite enablement permissions' do
let(:user_can_enable_alert_management) { false }
it 'shows error tracking enablement as disabled' do
expect(helper.alert_management_data(current_user, project)).to include(
'user-can-enable-alert-management' => 'false'
)
end
end
end
describe '#alert_management_detail_data' do
let(:alert_id) { 1 }
let(:issues_path) { project_issues_path(project) }
it 'returns detail page configuration' do
expect(helper.alert_management_detail_data(project, alert_id)).to eq(
'alert-id' => alert_id,
'project-path' => project_path,
'project-id' => project_id,
'project-issues-path' => issues_path,
'page' => 'OPERATIONS'
)
end
end
end
| 31.821138 | 127 | 0.64512 |
ff983ed4d800f37ece83132cf0456034bfbedcba | 8,170 | $:.unshift File.join(File.dirname(__FILE__),"..","lib")
require 'minitest/autorun'
require 'rgen/template_language'
require 'rgen/metamodel_builder'
class TemplateContainerTest < MiniTest::Test
TEMPLATES_DIR = File.dirname(__FILE__)+"/template_language_test/templates"
OUTPUT_DIR = File.dirname(__FILE__)+"/template_language_test"
module MyMM
class Chapter
attr_reader :title
def initialize(title)
@title = title
end
end
class Document
attr_reader :title, :authors, :chapters
attr_accessor :sampleArray
def initialize(title)
@title = title
@chapters = []
@authors = []
end
end
class Author
attr_reader :name, :email
def initialize(name, email)
@name, @email = name, email
end
end
end
module CCodeMM
class CArray < RGen::MetamodelBuilder::MMBase
has_attr 'name'
has_attr 'size', Integer
has_attr 'type'
end
class PrimitiveInitValue < RGen::MetamodelBuilder::MMBase
has_attr 'value', Integer
end
CArray.has_many 'initvalue', PrimitiveInitValue
end
TEST_MODEL = MyMM::Document.new("SomeDocument")
TEST_MODEL.authors << MyMM::Author.new("Martin", "[email protected]")
TEST_MODEL.authors << MyMM::Author.new("Otherguy", "[email protected]")
TEST_MODEL.chapters << MyMM::Chapter.new("Intro")
TEST_MODEL.chapters << MyMM::Chapter.new("MainPart")
TEST_MODEL.chapters << MyMM::Chapter.new("Summary")
TEST_MODEL.sampleArray = CCodeMM::CArray.new(:name => "myArray", :type => "int", :size => 5,
:initvalue => (1..5).collect { |v| CCodeMM::PrimitiveInitValue.new(:value => v) })
def test_with_model
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
File.delete(OUTPUT_DIR+"/testout.txt") if File.exists? OUTPUT_DIR+"/testout.txt"
tc.expand('root::Root', :for => TEST_MODEL, :indent => 1)
result = expected = ""
File.open(OUTPUT_DIR+"/testout.txt") {|f| result = f.read}
File.open(OUTPUT_DIR+"/expected_result1.txt") {|f| expected = f.read}
assert_equal expected, result
end
def test_immediate_result
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
expected = ""
File.open(OUTPUT_DIR+"/expected_result2.txt","rb") {|f| expected = f.read}
assert_equal expected, tc.expand('code/array::ArrayDefinition', :for => TEST_MODEL.sampleArray).to_s
end
def test_indent_string
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
tc.indentString = " " # 2 spaces instead of 3 (default)
tc.expand('indent_string_test::IndentStringTest', :for => :dummy)
File.open(OUTPUT_DIR+"/indentStringTestDefaultIndent.out","rb") do |f|
assert_equal " <- your default here\r\n", f.read
end
File.open(OUTPUT_DIR+"/indentStringTestTabIndent.out","rb") do |f|
assert_equal "\t<- tab\r\n", f.read
end
end
def test_null_context
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_raises StandardError do
# the template must raise an exception because it calls expand :for => nil
tc.expand('null_context_test::NullContextTestBad', :for => :dummy)
end
assert_raises StandardError do
# the template must raise an exception because it calls expand :foreach => nil
tc.expand('null_context_test::NullContextTestBad2', :for => :dummy)
end
tc.expand('null_context_test::NullContextTestOk', :for => :dummy)
end
def test_no_indent
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_equal " xxx<---\r\n xxx<---\r\n xxx<---\r\n xxx<---\r\n", tc.expand('no_indent_test/test::Test', :for => :dummy)
end
def test_no_indent2
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_equal " return xxxx;\r\n", tc.expand("no_indent_test/test2::Test", :for => :dummy)
end
def test_no_indent3
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_equal " l1<---\r\n l2\r\n\r\n", tc.expand("no_indent_test/test3::Test", :for => :dummy)
end
def test_template_resolution
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_equal "Sub1\r\nSub1 in sub1\r\n", tc.expand('template_resolution_test/test::Test', :for => :dummy)
assert_equal "Sub1\r\nSub1\r\nSub1 in sub1\r\n", tc.expand('template_resolution_test/sub1::Test', :for => :dummy)
end
def test_evaluate
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_equal "xx1xxxx2xxxx3xxxx4xx\r\n", tc.expand('evaluate_test/test::Test', :for => :dummy)
end
def test_define_local
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_equal "Local1\r\n", tc.expand('define_local_test/test::Test', :for => :dummy)
assert_raises StandardError do
tc.expand('define_local_test/test::TestForbidden', :for => :dummy)
end
end
def test_no_backslash_r
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
expected = ""
File.open(OUTPUT_DIR+"/expected_result3.txt") {|f| expected = f.read}
assert_equal expected, tc.expand('no_backslash_r_test::Test', :for => :dummy).to_s
end
def test_callback_indent
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_equal("|before callback\r\n |in callback\r\n|after callback\r\n |after iinc\r\n",
tc.expand('callback_indent_test/a::caller', :for => :dummy))
end
def test_indent_nonl_at_eof
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_equal(" Sub\n",
tc.expand('indent_nonl_at_eof_test/test::Test', :for => :dummy))
end
def test_indent_same_line_sub
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_equal(" Start Sub2\r\n Sub\r\n",
tc.expand('indent_same_line_sub/test::Test', :for => :dummy))
end
def test_line_endings
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
tc.expand('line_endings/unix::Unix', :for => :dummy)
tc.expand('line_endings/windows::Windows', :for => :dummy)
tc.expand('line_endings/mixed::Mixed', :for => :dummy)
unix = binread(OUTPUT_DIR+'/line_endings_unix.txt')
assert unix.include?("|\n") && !unix.include?("|\r\n"), unix
windows = binread(OUTPUT_DIR+'/line_endings_windows.txt')
assert windows.include?("|\r\n") && !windows.include?("|\n"), windows
mixed = binread(OUTPUT_DIR+'/line_endings_mixed.txt')
assert mixed.include?("|\r\n") && mixed.include?("|\n"), mixed
end
def test_ws
tc = RGen::TemplateLanguage::DirectoryTemplateContainer.new([MyMM, CCodeMM], OUTPUT_DIR)
tc.load(TEMPLATES_DIR)
assert_equal("/*\n *\n */\n",
tc.expand('ws_test::WSTest', :for => :dummy))
assert_equal("somevar = 1;\n",
tc.expand('ws_test::WSTest2', :for => :dummy))
assert_equal(" /*\n *\n */\n",
tc.expand('ws_test::WSTest3', :for => :dummy))
end
private
def binread(file)
result = nil
File.open(file, "rb") do |f|
result = f.read
end
result
end
end
| 38 | 132 | 0.664015 |
4a549fd537f33b97e9053043f986c723d1cfc956 | 1,238 | class Profanity < Formula
desc "Console based XMPP client"
homepage "http://www.profanity.im/"
url "http://www.profanity.im/profanity-0.4.7.tar.gz"
sha256 "b02c4e029fe84941050ccab6c8cdf5f15df23de5d1384b4d1ec66da6faee11dd"
revision 3
head "https://github.com/boothj5/profanity.git"
bottle do
sha256 "f822fb49ab3e57dc2c0e155ea23f93c0d8f8e230a5b6e7f41401368ef9cde23d" => :sierra
sha256 "266839844245169363cbe2f8a4e09023adef02d529ded253aa5bf30c8607a473" => :el_capitan
sha256 "17ceae0ada23ce3686e534ad7a3644348105805dc1f3da570cb6c08b749a41a9" => :yosemite
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "pkg-config" => :build
depends_on "ossp-uuid"
depends_on "libstrophe"
depends_on "readline"
depends_on "glib"
depends_on "openssl"
depends_on "gnutls"
depends_on "libotr" => :recommended
depends_on "gpgme" => :recommended
depends_on "terminal-notifier" => :optional
def install
system "./bootstrap.sh"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
system "profanity", "-v"
end
end
| 30.195122 | 92 | 0.708401 |
1a4f8dc186e36295a23d0d00fef8544e7ae043df | 720 | cask 'ableton-live' do
version '9.2.1'
if Hardware::CPU.is_32_bit?
sha256 'ce37b5ec6c2f717c8db83e39a75398e462bd131d39df5a559b265fa27eba38f6'
url "http://cdn2-downloads.ableton.com/channels/#{version}/ableton_live_trial_#{version}_32.dmg"
else
sha256 '30599a21a857be855e687e3d5a162cefb84ff98491bc2757d0580e811114295e'
url "http://cdn2-downloads.ableton.com/channels/#{version}/ableton_live_trial_#{version}_64.dmg"
end
name 'Ableton Live'
homepage 'https://ableton.com/en/live'
license :commercial
app "Ableton Live #{version.to_i} Trial.app"
zap :delete => '~/Library/*/*[Aa]bleton*',
:rmdir => '~/Music/Ableton/Factory Packs'
#:trash => '~/Music/Ableton/User Library'
end
| 32.727273 | 100 | 0.733333 |
1d8164703d0fcbbdc9b2d94eeff7f8dbb61a752d | 744 | Pod::Spec.new do |s|
s.name = "CategoryPickerLabel"
s.version = "0.0.4"
s.summary = "A label that opens a picker for selection."
s.homepage = "https://github.com/perlguy99/CategoryPickerLabel"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Brent Michalski" => "[email protected]" }
s.social_media_url = "http://twitter.com/perlguy2"
s.platform = :ios, "12.0"
s.source = { :git => "https://github.com/perlguy99/CategoryPickerLabel.git", :tag => "#{s.version}" }
s.source_files = "CategoryPickerLabel/**/*.{swift}"
s.framework = "UIKit"
s.dependency "FontAwesome.swift", "~> 1.4.8"
s.swift_version = "4.2"
s.requires_arc = true
end
| 29.76 | 109 | 0.602151 |
2615a1fce9b8c6348a29feabf3dbd4f198917af0 | 396 | cask :v1 => 'vagrant' do
version '1.7.2'
sha256 '78d02afada2f066368bd0ce1883f900f89b6dc20f860463ce125e7cb295e347c'
url "https://dl.bintray.com/mitchellh/vagrant/vagrant_#{version}.dmg"
homepage 'http://www.vagrantup.com'
license :mit
pkg 'Vagrant.pkg'
uninstall :script => { :executable => 'uninstall.tool', :input => %w[Yes] },
:pkgutil => 'com.vagrant.vagrant'
end
| 28.285714 | 78 | 0.689394 |
e91067b977fd55d0769f24be8ece1a320d27e13e | 102,150 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/endpoint_discovery.rb'
require 'aws-sdk-core/plugins/endpoint_pattern.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/client_metrics_plugin.rb'
require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/json_rpc.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:codecommit)
module Aws::CodeCommit
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :codecommit
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::EndpointDiscovery)
add_plugin(Aws::Plugins::EndpointPattern)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::ClientMetricsPlugin)
add_plugin(Aws::Plugins::ClientMetricsSendPlugin)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::JsonRpc)
# @overload initialize(options)
# @param [Hash] options
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::SharedCredentials` - Used for loading credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2 IMDS instance profile - When used by default, the timeouts are
# very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` to enable retries and extended
# timeouts.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is search for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :active_endpoint_cache (false)
# When set to `true`, a thread polling for endpoints will be running in
# the background every 60 secs (default). Defaults to `false`.
#
# @option options [Boolean] :client_side_monitoring (false)
# When `true`, client-side metrics will be collected for all API requests from
# this client.
#
# @option options [String] :client_side_monitoring_client_id ("")
# Allows you to provide an identifier for this client which will be attached to
# all generated client side metrics. Defaults to an empty string.
#
# @option options [Integer] :client_side_monitoring_port (31000)
# Required for publishing client metrics. The port that the client side monitoring
# agent is running on, where client metrics will be published via UDP.
#
# @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher)
# Allows you to provide a custom client-side monitoring publisher class. By default,
# will use the Client Side Monitoring Agent Publisher.
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [Boolean] :disable_host_prefix_injection (false)
# Set to true to disable SDK automatically adding host prefix
# to default service endpoint when available.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test endpoints. This should be avalid HTTP(S) URI.
#
# @option options [Integer] :endpoint_cache_max_entries (1000)
# Used for the maximum size limit of the LRU cache storing endpoints data
# for endpoint discovery enabled operations. Defaults to 1000.
#
# @option options [Integer] :endpoint_cache_max_threads (10)
# Used for the maximum threads in use for polling endpoints to be cached, defaults to 10.
#
# @option options [Integer] :endpoint_cache_poll_interval (60)
# When :endpoint_discovery and :active_endpoint_cache is enabled,
# Use this option to config the time interval in seconds for making
# requests fetching endpoints information. Defaults to 60 sec.
#
# @option options [Boolean] :endpoint_discovery (false)
# When set to `true`, endpoint discovery will be enabled for operations when available. Defaults to `false`.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Float] :retry_base_delay (0.3)
# The base delay in seconds used by the default backoff function.
#
# @option options [Symbol] :retry_jitter (:none)
# A delay randomiser function used by the default backoff function. Some predefined functions can be referenced by name - :none, :equal, :full, otherwise a Proc that takes and returns a number.
#
# @see https://www.awsarchitectureblog.com/2015/03/backoff.html
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors and auth
# errors from expired credentials.
#
# @option options [Integer] :retry_max_delay (0)
# The maximum number of seconds to delay between retries (0 for no limit) used by the default backoff function.
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :simple_json (false)
# Disables request parameter conversion, validation, and formatting.
# Also disable response data type conversions. This option is useful
# when you want to ensure the highest level of performance by
# avoiding overhead of walking request parameters and response data
# structures.
#
# When `:simple_json` is enabled, the request parameters hash must
# be formatted exactly as the DynamoDB API expects.
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
def initialize(*args)
super
end
# @!group API Operations
# Returns information about one or more repositories.
#
# <note markdown="1"> The description field for a repository accepts all HTML characters and
# all valid Unicode characters. Applications that do not HTML-encode the
# description and display it in a web page could expose users to
# potentially malicious code. Make sure that you HTML-encode the
# description field in any application that uses this API to display the
# repository description on a web page.
#
# </note>
#
# @option params [required, Array<String>] :repository_names
# The names of the repositories to get information about.
#
# @return [Types::BatchGetRepositoriesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::BatchGetRepositoriesOutput#repositories #repositories} => Array<Types::RepositoryMetadata>
# * {Types::BatchGetRepositoriesOutput#repositories_not_found #repositories_not_found} => Array<String>
#
# @example Request syntax with placeholder values
#
# resp = client.batch_get_repositories({
# repository_names: ["RepositoryName"], # required
# })
#
# @example Response structure
#
# resp.repositories #=> Array
# resp.repositories[0].account_id #=> String
# resp.repositories[0].repository_id #=> String
# resp.repositories[0].repository_name #=> String
# resp.repositories[0].repository_description #=> String
# resp.repositories[0].default_branch #=> String
# resp.repositories[0].last_modified_date #=> Time
# resp.repositories[0].creation_date #=> Time
# resp.repositories[0].clone_url_http #=> String
# resp.repositories[0].clone_url_ssh #=> String
# resp.repositories[0].arn #=> String
# resp.repositories_not_found #=> Array
# resp.repositories_not_found[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/BatchGetRepositories AWS API Documentation
#
# @overload batch_get_repositories(params = {})
# @param [Hash] params ({})
def batch_get_repositories(params = {}, options = {})
req = build_request(:batch_get_repositories, params)
req.send_request(options)
end
# Creates a new branch in a repository and points the branch to a
# commit.
#
# <note markdown="1"> Calling the create branch operation does not set a repository's
# default branch. To do this, call the update default branch operation.
#
# </note>
#
# @option params [required, String] :repository_name
# The name of the repository in which you want to create the new branch.
#
# @option params [required, String] :branch_name
# The name of the new branch to create.
#
# @option params [required, String] :commit_id
# The ID of the commit to point the new branch to.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.create_branch({
# repository_name: "RepositoryName", # required
# branch_name: "BranchName", # required
# commit_id: "CommitId", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/CreateBranch AWS API Documentation
#
# @overload create_branch(params = {})
# @param [Hash] params ({})
def create_branch(params = {}, options = {})
req = build_request(:create_branch, params)
req.send_request(options)
end
# Creates a pull request in the specified repository.
#
# @option params [required, String] :title
# The title of the pull request. This title will be used to identify the
# pull request to other users in the repository.
#
# @option params [String] :description
# A description of the pull request.
#
# @option params [required, Array<Types::Target>] :targets
# The targets for the pull request, including the source of the code to
# be reviewed (the source branch), and the destination where the creator
# of the pull request intends the code to be merged after the pull
# request is closed (the destination branch).
#
# @option params [String] :client_request_token
# A unique, client-generated idempotency token that when provided in a
# request, ensures the request cannot be repeated with a changed
# parameter. If a request is received with the same parameters and a
# token is included, the request will return information about the
# initial request that used that token.
#
# <note markdown="1"> The AWS SDKs prepopulate client request tokens. If using an AWS SDK,
# you do not have to generate an idempotency token, as this will be done
# for you.
#
# </note>
#
# **A suitable default value is auto-generated.** You should normally
# not need to pass this option.**
#
# @return [Types::CreatePullRequestOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreatePullRequestOutput#pull_request #pull_request} => Types::PullRequest
#
# @example Request syntax with placeholder values
#
# resp = client.create_pull_request({
# title: "Title", # required
# description: "Description",
# targets: [ # required
# {
# repository_name: "RepositoryName", # required
# source_reference: "ReferenceName", # required
# destination_reference: "ReferenceName",
# },
# ],
# client_request_token: "ClientRequestToken",
# })
#
# @example Response structure
#
# resp.pull_request.pull_request_id #=> String
# resp.pull_request.title #=> String
# resp.pull_request.description #=> String
# resp.pull_request.last_activity_date #=> Time
# resp.pull_request.creation_date #=> Time
# resp.pull_request.pull_request_status #=> String, one of "OPEN", "CLOSED"
# resp.pull_request.author_arn #=> String
# resp.pull_request.pull_request_targets #=> Array
# resp.pull_request.pull_request_targets[0].repository_name #=> String
# resp.pull_request.pull_request_targets[0].source_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_commit #=> String
# resp.pull_request.pull_request_targets[0].source_commit #=> String
# resp.pull_request.pull_request_targets[0].merge_base #=> String
# resp.pull_request.pull_request_targets[0].merge_metadata.is_merged #=> Boolean
# resp.pull_request.pull_request_targets[0].merge_metadata.merged_by #=> String
# resp.pull_request.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/CreatePullRequest AWS API Documentation
#
# @overload create_pull_request(params = {})
# @param [Hash] params ({})
def create_pull_request(params = {}, options = {})
req = build_request(:create_pull_request, params)
req.send_request(options)
end
# Creates a new, empty repository.
#
# @option params [required, String] :repository_name
# The name of the new repository to be created.
#
# <note markdown="1"> The repository name must be unique across the calling AWS account. In
# addition, repository names are limited to 100 alphanumeric, dash, and
# underscore characters, and cannot include certain characters. For a
# full description of the limits on repository names, see [Limits][1] in
# the AWS CodeCommit User Guide. The suffix ".git" is prohibited.
#
# </note>
#
#
#
# [1]: http://docs.aws.amazon.com/codecommit/latest/userguide/limits.html
#
# @option params [String] :repository_description
# A comment or description about the new repository.
#
# <note markdown="1"> The description field for a repository accepts all HTML characters and
# all valid Unicode characters. Applications that do not HTML-encode the
# description and display it in a web page could expose users to
# potentially malicious code. Make sure that you HTML-encode the
# description field in any application that uses this API to display the
# repository description on a web page.
#
# </note>
#
# @return [Types::CreateRepositoryOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateRepositoryOutput#repository_metadata #repository_metadata} => Types::RepositoryMetadata
#
# @example Request syntax with placeholder values
#
# resp = client.create_repository({
# repository_name: "RepositoryName", # required
# repository_description: "RepositoryDescription",
# })
#
# @example Response structure
#
# resp.repository_metadata.account_id #=> String
# resp.repository_metadata.repository_id #=> String
# resp.repository_metadata.repository_name #=> String
# resp.repository_metadata.repository_description #=> String
# resp.repository_metadata.default_branch #=> String
# resp.repository_metadata.last_modified_date #=> Time
# resp.repository_metadata.creation_date #=> Time
# resp.repository_metadata.clone_url_http #=> String
# resp.repository_metadata.clone_url_ssh #=> String
# resp.repository_metadata.arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/CreateRepository AWS API Documentation
#
# @overload create_repository(params = {})
# @param [Hash] params ({})
def create_repository(params = {}, options = {})
req = build_request(:create_repository, params)
req.send_request(options)
end
# Deletes a branch from a repository, unless that branch is the default
# branch for the repository.
#
# @option params [required, String] :repository_name
# The name of the repository that contains the branch to be deleted.
#
# @option params [required, String] :branch_name
# The name of the branch to delete.
#
# @return [Types::DeleteBranchOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteBranchOutput#deleted_branch #deleted_branch} => Types::BranchInfo
#
# @example Request syntax with placeholder values
#
# resp = client.delete_branch({
# repository_name: "RepositoryName", # required
# branch_name: "BranchName", # required
# })
#
# @example Response structure
#
# resp.deleted_branch.branch_name #=> String
# resp.deleted_branch.commit_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/DeleteBranch AWS API Documentation
#
# @overload delete_branch(params = {})
# @param [Hash] params ({})
def delete_branch(params = {}, options = {})
req = build_request(:delete_branch, params)
req.send_request(options)
end
# Deletes the content of a comment made on a change, file, or commit in
# a repository.
#
# @option params [required, String] :comment_id
# The unique, system-generated ID of the comment. To get this ID, use
# GetCommentsForComparedCommit or GetCommentsForPullRequest.
#
# @return [Types::DeleteCommentContentOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteCommentContentOutput#comment #comment} => Types::Comment
#
# @example Request syntax with placeholder values
#
# resp = client.delete_comment_content({
# comment_id: "CommentId", # required
# })
#
# @example Response structure
#
# resp.comment.comment_id #=> String
# resp.comment.content #=> String
# resp.comment.in_reply_to #=> String
# resp.comment.creation_date #=> Time
# resp.comment.last_modified_date #=> Time
# resp.comment.author_arn #=> String
# resp.comment.deleted #=> Boolean
# resp.comment.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/DeleteCommentContent AWS API Documentation
#
# @overload delete_comment_content(params = {})
# @param [Hash] params ({})
def delete_comment_content(params = {}, options = {})
req = build_request(:delete_comment_content, params)
req.send_request(options)
end
# Deletes a specified file from a specified branch. A commit is created
# on the branch that contains the revision. The file will still exist in
# the commits prior to the commit that contains the deletion.
#
# @option params [required, String] :repository_name
# The name of the repository that contains the file to delete.
#
# @option params [required, String] :branch_name
# The name of the branch where the commit will be made deleting the
# file.
#
# @option params [required, String] :file_path
# The fully-qualified path to the file that will be deleted, including
# the full name and extension of that file. For example,
# /examples/file.md is a fully qualified path to a file named file.md in
# a folder named examples.
#
# @option params [required, String] :parent_commit_id
# The ID of the commit that is the tip of the branch where you want to
# create the commit that will delete the file. This must be the HEAD
# commit for the branch. The commit that deletes the file will be
# created from this commit ID.
#
# @option params [Boolean] :keep_empty_folders
# Specifies whether to delete the folder or directory that contains the
# file you want to delete if that file is the only object in the folder
# or directory. By default, empty folders will be deleted. This includes
# empty folders that are part of the directory structure. For example,
# if the path to a file is dir1/dir2/dir3/dir4, and dir2 and dir3 are
# empty, deleting the last file in dir4 will also delete the empty
# folders dir4, dir3, and dir2.
#
# @option params [String] :commit_message
# The commit message you want to include as part of deleting the file.
# Commit messages are limited to 256 KB. If no message is specified, a
# default message will be used.
#
# @option params [String] :name
# The name of the author of the commit that deletes the file. If no name
# is specified, the user's ARN will be used as the author name and
# committer name.
#
# @option params [String] :email
# The email address for the commit that deletes the file. If no email
# address is specified, the email address will be left blank.
#
# @return [Types::DeleteFileOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteFileOutput#commit_id #commit_id} => String
# * {Types::DeleteFileOutput#blob_id #blob_id} => String
# * {Types::DeleteFileOutput#tree_id #tree_id} => String
# * {Types::DeleteFileOutput#file_path #file_path} => String
#
# @example Request syntax with placeholder values
#
# resp = client.delete_file({
# repository_name: "RepositoryName", # required
# branch_name: "BranchName", # required
# file_path: "Path", # required
# parent_commit_id: "CommitId", # required
# keep_empty_folders: false,
# commit_message: "Message",
# name: "Name",
# email: "Email",
# })
#
# @example Response structure
#
# resp.commit_id #=> String
# resp.blob_id #=> String
# resp.tree_id #=> String
# resp.file_path #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/DeleteFile AWS API Documentation
#
# @overload delete_file(params = {})
# @param [Hash] params ({})
def delete_file(params = {}, options = {})
req = build_request(:delete_file, params)
req.send_request(options)
end
# Deletes a repository. If a specified repository was already deleted, a
# null repository ID will be returned.
#
# Deleting a repository also deletes all associated objects and
# metadata. After a repository is deleted, all future push calls to the
# deleted repository will fail.
#
# @option params [required, String] :repository_name
# The name of the repository to delete.
#
# @return [Types::DeleteRepositoryOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteRepositoryOutput#repository_id #repository_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.delete_repository({
# repository_name: "RepositoryName", # required
# })
#
# @example Response structure
#
# resp.repository_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/DeleteRepository AWS API Documentation
#
# @overload delete_repository(params = {})
# @param [Hash] params ({})
def delete_repository(params = {}, options = {})
req = build_request(:delete_repository, params)
req.send_request(options)
end
# Returns information about one or more pull request events.
#
# @option params [required, String] :pull_request_id
# The system-generated ID of the pull request. To get this ID, use
# ListPullRequests.
#
# @option params [String] :pull_request_event_type
# Optional. The pull request event type about which you want to return
# information.
#
# @option params [String] :actor_arn
# The Amazon Resource Name (ARN) of the user whose actions resulted in
# the event. Examples include updating the pull request with additional
# commits or changing the status of a pull request.
#
# @option params [String] :next_token
# An enumeration token that when provided in a request, returns the next
# batch of the results.
#
# @option params [Integer] :max_results
# A non-negative integer used to limit the number of returned results.
# The default is 100 events, which is also the maximum number of events
# that can be returned in a result.
#
# @return [Types::DescribePullRequestEventsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribePullRequestEventsOutput#pull_request_events #pull_request_events} => Array<Types::PullRequestEvent>
# * {Types::DescribePullRequestEventsOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_pull_request_events({
# pull_request_id: "PullRequestId", # required
# pull_request_event_type: "PULL_REQUEST_CREATED", # accepts PULL_REQUEST_CREATED, PULL_REQUEST_STATUS_CHANGED, PULL_REQUEST_SOURCE_REFERENCE_UPDATED, PULL_REQUEST_MERGE_STATE_CHANGED
# actor_arn: "Arn",
# next_token: "NextToken",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.pull_request_events #=> Array
# resp.pull_request_events[0].pull_request_id #=> String
# resp.pull_request_events[0].event_date #=> Time
# resp.pull_request_events[0].pull_request_event_type #=> String, one of "PULL_REQUEST_CREATED", "PULL_REQUEST_STATUS_CHANGED", "PULL_REQUEST_SOURCE_REFERENCE_UPDATED", "PULL_REQUEST_MERGE_STATE_CHANGED"
# resp.pull_request_events[0].actor_arn #=> String
# resp.pull_request_events[0].pull_request_created_event_metadata.repository_name #=> String
# resp.pull_request_events[0].pull_request_created_event_metadata.source_commit_id #=> String
# resp.pull_request_events[0].pull_request_created_event_metadata.destination_commit_id #=> String
# resp.pull_request_events[0].pull_request_created_event_metadata.merge_base #=> String
# resp.pull_request_events[0].pull_request_status_changed_event_metadata.pull_request_status #=> String, one of "OPEN", "CLOSED"
# resp.pull_request_events[0].pull_request_source_reference_updated_event_metadata.repository_name #=> String
# resp.pull_request_events[0].pull_request_source_reference_updated_event_metadata.before_commit_id #=> String
# resp.pull_request_events[0].pull_request_source_reference_updated_event_metadata.after_commit_id #=> String
# resp.pull_request_events[0].pull_request_source_reference_updated_event_metadata.merge_base #=> String
# resp.pull_request_events[0].pull_request_merged_state_changed_event_metadata.repository_name #=> String
# resp.pull_request_events[0].pull_request_merged_state_changed_event_metadata.destination_reference #=> String
# resp.pull_request_events[0].pull_request_merged_state_changed_event_metadata.merge_metadata.is_merged #=> Boolean
# resp.pull_request_events[0].pull_request_merged_state_changed_event_metadata.merge_metadata.merged_by #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/DescribePullRequestEvents AWS API Documentation
#
# @overload describe_pull_request_events(params = {})
# @param [Hash] params ({})
def describe_pull_request_events(params = {}, options = {})
req = build_request(:describe_pull_request_events, params)
req.send_request(options)
end
# Returns the base-64 encoded content of an individual blob within a
# repository.
#
# @option params [required, String] :repository_name
# The name of the repository that contains the blob.
#
# @option params [required, String] :blob_id
# The ID of the blob, which is its SHA-1 pointer.
#
# @return [Types::GetBlobOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetBlobOutput#content #content} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_blob({
# repository_name: "RepositoryName", # required
# blob_id: "ObjectId", # required
# })
#
# @example Response structure
#
# resp.content #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetBlob AWS API Documentation
#
# @overload get_blob(params = {})
# @param [Hash] params ({})
def get_blob(params = {}, options = {})
req = build_request(:get_blob, params)
req.send_request(options)
end
# Returns information about a repository branch, including its name and
# the last commit ID.
#
# @option params [String] :repository_name
# The name of the repository that contains the branch for which you want
# to retrieve information.
#
# @option params [String] :branch_name
# The name of the branch for which you want to retrieve information.
#
# @return [Types::GetBranchOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetBranchOutput#branch #branch} => Types::BranchInfo
#
# @example Request syntax with placeholder values
#
# resp = client.get_branch({
# repository_name: "RepositoryName",
# branch_name: "BranchName",
# })
#
# @example Response structure
#
# resp.branch.branch_name #=> String
# resp.branch.commit_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetBranch AWS API Documentation
#
# @overload get_branch(params = {})
# @param [Hash] params ({})
def get_branch(params = {}, options = {})
req = build_request(:get_branch, params)
req.send_request(options)
end
# Returns the content of a comment made on a change, file, or commit in
# a repository.
#
# @option params [required, String] :comment_id
# The unique, system-generated ID of the comment. To get this ID, use
# GetCommentsForComparedCommit or GetCommentsForPullRequest.
#
# @return [Types::GetCommentOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetCommentOutput#comment #comment} => Types::Comment
#
# @example Request syntax with placeholder values
#
# resp = client.get_comment({
# comment_id: "CommentId", # required
# })
#
# @example Response structure
#
# resp.comment.comment_id #=> String
# resp.comment.content #=> String
# resp.comment.in_reply_to #=> String
# resp.comment.creation_date #=> Time
# resp.comment.last_modified_date #=> Time
# resp.comment.author_arn #=> String
# resp.comment.deleted #=> Boolean
# resp.comment.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetComment AWS API Documentation
#
# @overload get_comment(params = {})
# @param [Hash] params ({})
def get_comment(params = {}, options = {})
req = build_request(:get_comment, params)
req.send_request(options)
end
# Returns information about comments made on the comparison between two
# commits.
#
# @option params [required, String] :repository_name
# The name of the repository where you want to compare commits.
#
# @option params [String] :before_commit_id
# To establish the directionality of the comparison, the full commit ID
# of the 'before' commit.
#
# @option params [required, String] :after_commit_id
# To establish the directionality of the comparison, the full commit ID
# of the 'after' commit.
#
# @option params [String] :next_token
# An enumeration token that when provided in a request, returns the next
# batch of the results.
#
# @option params [Integer] :max_results
# A non-negative integer used to limit the number of returned results.
# The default is 100 comments, and is configurable up to 500.
#
# @return [Types::GetCommentsForComparedCommitOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetCommentsForComparedCommitOutput#comments_for_compared_commit_data #comments_for_compared_commit_data} => Array<Types::CommentsForComparedCommit>
# * {Types::GetCommentsForComparedCommitOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_comments_for_compared_commit({
# repository_name: "RepositoryName", # required
# before_commit_id: "CommitId",
# after_commit_id: "CommitId", # required
# next_token: "NextToken",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.comments_for_compared_commit_data #=> Array
# resp.comments_for_compared_commit_data[0].repository_name #=> String
# resp.comments_for_compared_commit_data[0].before_commit_id #=> String
# resp.comments_for_compared_commit_data[0].after_commit_id #=> String
# resp.comments_for_compared_commit_data[0].before_blob_id #=> String
# resp.comments_for_compared_commit_data[0].after_blob_id #=> String
# resp.comments_for_compared_commit_data[0].location.file_path #=> String
# resp.comments_for_compared_commit_data[0].location.file_position #=> Integer
# resp.comments_for_compared_commit_data[0].location.relative_file_version #=> String, one of "BEFORE", "AFTER"
# resp.comments_for_compared_commit_data[0].comments #=> Array
# resp.comments_for_compared_commit_data[0].comments[0].comment_id #=> String
# resp.comments_for_compared_commit_data[0].comments[0].content #=> String
# resp.comments_for_compared_commit_data[0].comments[0].in_reply_to #=> String
# resp.comments_for_compared_commit_data[0].comments[0].creation_date #=> Time
# resp.comments_for_compared_commit_data[0].comments[0].last_modified_date #=> Time
# resp.comments_for_compared_commit_data[0].comments[0].author_arn #=> String
# resp.comments_for_compared_commit_data[0].comments[0].deleted #=> Boolean
# resp.comments_for_compared_commit_data[0].comments[0].client_request_token #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetCommentsForComparedCommit AWS API Documentation
#
# @overload get_comments_for_compared_commit(params = {})
# @param [Hash] params ({})
def get_comments_for_compared_commit(params = {}, options = {})
req = build_request(:get_comments_for_compared_commit, params)
req.send_request(options)
end
# Returns comments made on a pull request.
#
# @option params [required, String] :pull_request_id
# The system-generated ID of the pull request. To get this ID, use
# ListPullRequests.
#
# @option params [String] :repository_name
# The name of the repository that contains the pull request.
#
# @option params [String] :before_commit_id
# The full commit ID of the commit in the destination branch that was
# the tip of the branch at the time the pull request was created.
#
# @option params [String] :after_commit_id
# The full commit ID of the commit in the source branch that was the tip
# of the branch at the time the comment was made.
#
# @option params [String] :next_token
# An enumeration token that when provided in a request, returns the next
# batch of the results.
#
# @option params [Integer] :max_results
# A non-negative integer used to limit the number of returned results.
# The default is 100 comments. You can return up to 500 comments with a
# single request.
#
# @return [Types::GetCommentsForPullRequestOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetCommentsForPullRequestOutput#comments_for_pull_request_data #comments_for_pull_request_data} => Array<Types::CommentsForPullRequest>
# * {Types::GetCommentsForPullRequestOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_comments_for_pull_request({
# pull_request_id: "PullRequestId", # required
# repository_name: "RepositoryName",
# before_commit_id: "CommitId",
# after_commit_id: "CommitId",
# next_token: "NextToken",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.comments_for_pull_request_data #=> Array
# resp.comments_for_pull_request_data[0].pull_request_id #=> String
# resp.comments_for_pull_request_data[0].repository_name #=> String
# resp.comments_for_pull_request_data[0].before_commit_id #=> String
# resp.comments_for_pull_request_data[0].after_commit_id #=> String
# resp.comments_for_pull_request_data[0].before_blob_id #=> String
# resp.comments_for_pull_request_data[0].after_blob_id #=> String
# resp.comments_for_pull_request_data[0].location.file_path #=> String
# resp.comments_for_pull_request_data[0].location.file_position #=> Integer
# resp.comments_for_pull_request_data[0].location.relative_file_version #=> String, one of "BEFORE", "AFTER"
# resp.comments_for_pull_request_data[0].comments #=> Array
# resp.comments_for_pull_request_data[0].comments[0].comment_id #=> String
# resp.comments_for_pull_request_data[0].comments[0].content #=> String
# resp.comments_for_pull_request_data[0].comments[0].in_reply_to #=> String
# resp.comments_for_pull_request_data[0].comments[0].creation_date #=> Time
# resp.comments_for_pull_request_data[0].comments[0].last_modified_date #=> Time
# resp.comments_for_pull_request_data[0].comments[0].author_arn #=> String
# resp.comments_for_pull_request_data[0].comments[0].deleted #=> Boolean
# resp.comments_for_pull_request_data[0].comments[0].client_request_token #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetCommentsForPullRequest AWS API Documentation
#
# @overload get_comments_for_pull_request(params = {})
# @param [Hash] params ({})
def get_comments_for_pull_request(params = {}, options = {})
req = build_request(:get_comments_for_pull_request, params)
req.send_request(options)
end
# Returns information about a commit, including commit message and
# committer information.
#
# @option params [required, String] :repository_name
# The name of the repository to which the commit was made.
#
# @option params [required, String] :commit_id
# The commit ID. Commit IDs are the full SHA of the commit.
#
# @return [Types::GetCommitOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetCommitOutput#commit #commit} => Types::Commit
#
# @example Request syntax with placeholder values
#
# resp = client.get_commit({
# repository_name: "RepositoryName", # required
# commit_id: "ObjectId", # required
# })
#
# @example Response structure
#
# resp.commit.commit_id #=> String
# resp.commit.tree_id #=> String
# resp.commit.parents #=> Array
# resp.commit.parents[0] #=> String
# resp.commit.message #=> String
# resp.commit.author.name #=> String
# resp.commit.author.email #=> String
# resp.commit.author.date #=> String
# resp.commit.committer.name #=> String
# resp.commit.committer.email #=> String
# resp.commit.committer.date #=> String
# resp.commit.additional_data #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetCommit AWS API Documentation
#
# @overload get_commit(params = {})
# @param [Hash] params ({})
def get_commit(params = {}, options = {})
req = build_request(:get_commit, params)
req.send_request(options)
end
# Returns information about the differences in a valid commit specifier
# (such as a branch, tag, HEAD, commit ID or other fully qualified
# reference). Results can be limited to a specified path.
#
# @option params [required, String] :repository_name
# The name of the repository where you want to get differences.
#
# @option params [String] :before_commit_specifier
# The branch, tag, HEAD, or other fully qualified reference used to
# identify a commit. For example, the full commit ID. Optional. If not
# specified, all changes prior to the `afterCommitSpecifier` value will
# be shown. If you do not use `beforeCommitSpecifier` in your request,
# consider limiting the results with `maxResults`.
#
# @option params [required, String] :after_commit_specifier
# The branch, tag, HEAD, or other fully qualified reference used to
# identify a commit.
#
# @option params [String] :before_path
# The file path in which to check for differences. Limits the results to
# this path. Can also be used to specify the previous name of a
# directory or folder. If `beforePath` and `afterPath` are not
# specified, differences will be shown for all paths.
#
# @option params [String] :after_path
# The file path in which to check differences. Limits the results to
# this path. Can also be used to specify the changed name of a directory
# or folder, if it has changed. If not specified, differences will be
# shown for all paths.
#
# @option params [Integer] :max_results
# A non-negative integer used to limit the number of returned results.
#
# @option params [String] :next_token
# An enumeration token that when provided in a request, returns the next
# batch of the results.
#
# @return [Types::GetDifferencesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetDifferencesOutput#differences #differences} => Array<Types::Difference>
# * {Types::GetDifferencesOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_differences({
# repository_name: "RepositoryName", # required
# before_commit_specifier: "CommitName",
# after_commit_specifier: "CommitName", # required
# before_path: "Path",
# after_path: "Path",
# max_results: 1,
# next_token: "NextToken",
# })
#
# @example Response structure
#
# resp.differences #=> Array
# resp.differences[0].before_blob.blob_id #=> String
# resp.differences[0].before_blob.path #=> String
# resp.differences[0].before_blob.mode #=> String
# resp.differences[0].after_blob.blob_id #=> String
# resp.differences[0].after_blob.path #=> String
# resp.differences[0].after_blob.mode #=> String
# resp.differences[0].change_type #=> String, one of "A", "M", "D"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetDifferences AWS API Documentation
#
# @overload get_differences(params = {})
# @param [Hash] params ({})
def get_differences(params = {}, options = {})
req = build_request(:get_differences, params)
req.send_request(options)
end
# Returns the base-64 encoded contents of a specified file and its
# metadata.
#
# @option params [required, String] :repository_name
# The name of the repository that contains the file.
#
# @option params [String] :commit_specifier
# The fully-quaified reference that identifies the commit that contains
# the file. For example, you could specify a full commit ID, a tag, a
# branch name, or a reference such as refs/heads/master. If none is
# provided, then the head commit will be used.
#
# @option params [required, String] :file_path
# The fully-qualified path to the file, including the full name and
# extension of the file. For example, /examples/file.md is the
# fully-qualified path to a file named file.md in a folder named
# examples.
#
# @return [Types::GetFileOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetFileOutput#commit_id #commit_id} => String
# * {Types::GetFileOutput#blob_id #blob_id} => String
# * {Types::GetFileOutput#file_path #file_path} => String
# * {Types::GetFileOutput#file_mode #file_mode} => String
# * {Types::GetFileOutput#file_size #file_size} => Integer
# * {Types::GetFileOutput#file_content #file_content} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_file({
# repository_name: "RepositoryName", # required
# commit_specifier: "CommitName",
# file_path: "Path", # required
# })
#
# @example Response structure
#
# resp.commit_id #=> String
# resp.blob_id #=> String
# resp.file_path #=> String
# resp.file_mode #=> String, one of "EXECUTABLE", "NORMAL", "SYMLINK"
# resp.file_size #=> Integer
# resp.file_content #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetFile AWS API Documentation
#
# @overload get_file(params = {})
# @param [Hash] params ({})
def get_file(params = {}, options = {})
req = build_request(:get_file, params)
req.send_request(options)
end
# Returns the contents of a specified folder in a repository.
#
# @option params [required, String] :repository_name
# The name of the repository.
#
# @option params [String] :commit_specifier
# A fully-qualified reference used to identify a commit that contains
# the version of the folder's content to return. A fully-qualified
# reference can be a commit ID, branch name, tag, or reference such as
# HEAD. If no specifier is provided, the folder content will be returned
# as it exists in the HEAD commit.
#
# @option params [required, String] :folder_path
# The fully-qualified path to the folder whose contents will be
# returned, including the folder name. For example, /examples is a
# fully-qualified path to a folder named examples that was created off
# of the root directory (/) of a repository.
#
# @return [Types::GetFolderOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetFolderOutput#commit_id #commit_id} => String
# * {Types::GetFolderOutput#folder_path #folder_path} => String
# * {Types::GetFolderOutput#tree_id #tree_id} => String
# * {Types::GetFolderOutput#sub_folders #sub_folders} => Array<Types::Folder>
# * {Types::GetFolderOutput#files #files} => Array<Types::File>
# * {Types::GetFolderOutput#symbolic_links #symbolic_links} => Array<Types::SymbolicLink>
# * {Types::GetFolderOutput#sub_modules #sub_modules} => Array<Types::SubModule>
#
# @example Request syntax with placeholder values
#
# resp = client.get_folder({
# repository_name: "RepositoryName", # required
# commit_specifier: "CommitName",
# folder_path: "Path", # required
# })
#
# @example Response structure
#
# resp.commit_id #=> String
# resp.folder_path #=> String
# resp.tree_id #=> String
# resp.sub_folders #=> Array
# resp.sub_folders[0].tree_id #=> String
# resp.sub_folders[0].absolute_path #=> String
# resp.sub_folders[0].relative_path #=> String
# resp.files #=> Array
# resp.files[0].blob_id #=> String
# resp.files[0].absolute_path #=> String
# resp.files[0].relative_path #=> String
# resp.files[0].file_mode #=> String, one of "EXECUTABLE", "NORMAL", "SYMLINK"
# resp.symbolic_links #=> Array
# resp.symbolic_links[0].blob_id #=> String
# resp.symbolic_links[0].absolute_path #=> String
# resp.symbolic_links[0].relative_path #=> String
# resp.symbolic_links[0].file_mode #=> String, one of "EXECUTABLE", "NORMAL", "SYMLINK"
# resp.sub_modules #=> Array
# resp.sub_modules[0].commit_id #=> String
# resp.sub_modules[0].absolute_path #=> String
# resp.sub_modules[0].relative_path #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetFolder AWS API Documentation
#
# @overload get_folder(params = {})
# @param [Hash] params ({})
def get_folder(params = {}, options = {})
req = build_request(:get_folder, params)
req.send_request(options)
end
# Returns information about merge conflicts between the before and after
# commit IDs for a pull request in a repository.
#
# @option params [required, String] :repository_name
# The name of the repository where the pull request was created.
#
# @option params [required, String] :destination_commit_specifier
# The branch, tag, HEAD, or other fully qualified reference used to
# identify a commit. For example, a branch name or a full commit ID.
#
# @option params [required, String] :source_commit_specifier
# The branch, tag, HEAD, or other fully qualified reference used to
# identify a commit. For example, a branch name or a full commit ID.
#
# @option params [required, String] :merge_option
# The merge option or strategy you want to use to merge the code. The
# only valid value is FAST\_FORWARD\_MERGE.
#
# @return [Types::GetMergeConflictsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetMergeConflictsOutput#mergeable #mergeable} => Boolean
# * {Types::GetMergeConflictsOutput#destination_commit_id #destination_commit_id} => String
# * {Types::GetMergeConflictsOutput#source_commit_id #source_commit_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_merge_conflicts({
# repository_name: "RepositoryName", # required
# destination_commit_specifier: "CommitName", # required
# source_commit_specifier: "CommitName", # required
# merge_option: "FAST_FORWARD_MERGE", # required, accepts FAST_FORWARD_MERGE
# })
#
# @example Response structure
#
# resp.mergeable #=> Boolean
# resp.destination_commit_id #=> String
# resp.source_commit_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetMergeConflicts AWS API Documentation
#
# @overload get_merge_conflicts(params = {})
# @param [Hash] params ({})
def get_merge_conflicts(params = {}, options = {})
req = build_request(:get_merge_conflicts, params)
req.send_request(options)
end
# Gets information about a pull request in a specified repository.
#
# @option params [required, String] :pull_request_id
# The system-generated ID of the pull request. To get this ID, use
# ListPullRequests.
#
# @return [Types::GetPullRequestOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetPullRequestOutput#pull_request #pull_request} => Types::PullRequest
#
# @example Request syntax with placeholder values
#
# resp = client.get_pull_request({
# pull_request_id: "PullRequestId", # required
# })
#
# @example Response structure
#
# resp.pull_request.pull_request_id #=> String
# resp.pull_request.title #=> String
# resp.pull_request.description #=> String
# resp.pull_request.last_activity_date #=> Time
# resp.pull_request.creation_date #=> Time
# resp.pull_request.pull_request_status #=> String, one of "OPEN", "CLOSED"
# resp.pull_request.author_arn #=> String
# resp.pull_request.pull_request_targets #=> Array
# resp.pull_request.pull_request_targets[0].repository_name #=> String
# resp.pull_request.pull_request_targets[0].source_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_commit #=> String
# resp.pull_request.pull_request_targets[0].source_commit #=> String
# resp.pull_request.pull_request_targets[0].merge_base #=> String
# resp.pull_request.pull_request_targets[0].merge_metadata.is_merged #=> Boolean
# resp.pull_request.pull_request_targets[0].merge_metadata.merged_by #=> String
# resp.pull_request.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetPullRequest AWS API Documentation
#
# @overload get_pull_request(params = {})
# @param [Hash] params ({})
def get_pull_request(params = {}, options = {})
req = build_request(:get_pull_request, params)
req.send_request(options)
end
# Returns information about a repository.
#
# <note markdown="1"> The description field for a repository accepts all HTML characters and
# all valid Unicode characters. Applications that do not HTML-encode the
# description and display it in a web page could expose users to
# potentially malicious code. Make sure that you HTML-encode the
# description field in any application that uses this API to display the
# repository description on a web page.
#
# </note>
#
# @option params [required, String] :repository_name
# The name of the repository to get information about.
#
# @return [Types::GetRepositoryOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetRepositoryOutput#repository_metadata #repository_metadata} => Types::RepositoryMetadata
#
# @example Request syntax with placeholder values
#
# resp = client.get_repository({
# repository_name: "RepositoryName", # required
# })
#
# @example Response structure
#
# resp.repository_metadata.account_id #=> String
# resp.repository_metadata.repository_id #=> String
# resp.repository_metadata.repository_name #=> String
# resp.repository_metadata.repository_description #=> String
# resp.repository_metadata.default_branch #=> String
# resp.repository_metadata.last_modified_date #=> Time
# resp.repository_metadata.creation_date #=> Time
# resp.repository_metadata.clone_url_http #=> String
# resp.repository_metadata.clone_url_ssh #=> String
# resp.repository_metadata.arn #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetRepository AWS API Documentation
#
# @overload get_repository(params = {})
# @param [Hash] params ({})
def get_repository(params = {}, options = {})
req = build_request(:get_repository, params)
req.send_request(options)
end
# Gets information about triggers configured for a repository.
#
# @option params [required, String] :repository_name
# The name of the repository for which the trigger is configured.
#
# @return [Types::GetRepositoryTriggersOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetRepositoryTriggersOutput#configuration_id #configuration_id} => String
# * {Types::GetRepositoryTriggersOutput#triggers #triggers} => Array<Types::RepositoryTrigger>
#
# @example Request syntax with placeholder values
#
# resp = client.get_repository_triggers({
# repository_name: "RepositoryName", # required
# })
#
# @example Response structure
#
# resp.configuration_id #=> String
# resp.triggers #=> Array
# resp.triggers[0].name #=> String
# resp.triggers[0].destination_arn #=> String
# resp.triggers[0].custom_data #=> String
# resp.triggers[0].branches #=> Array
# resp.triggers[0].branches[0] #=> String
# resp.triggers[0].events #=> Array
# resp.triggers[0].events[0] #=> String, one of "all", "updateReference", "createReference", "deleteReference"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/GetRepositoryTriggers AWS API Documentation
#
# @overload get_repository_triggers(params = {})
# @param [Hash] params ({})
def get_repository_triggers(params = {}, options = {})
req = build_request(:get_repository_triggers, params)
req.send_request(options)
end
# Gets information about one or more branches in a repository.
#
# @option params [required, String] :repository_name
# The name of the repository that contains the branches.
#
# @option params [String] :next_token
# An enumeration token that allows the operation to batch the results.
#
# @return [Types::ListBranchesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListBranchesOutput#branches #branches} => Array<String>
# * {Types::ListBranchesOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_branches({
# repository_name: "RepositoryName", # required
# next_token: "NextToken",
# })
#
# @example Response structure
#
# resp.branches #=> Array
# resp.branches[0] #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/ListBranches AWS API Documentation
#
# @overload list_branches(params = {})
# @param [Hash] params ({})
def list_branches(params = {}, options = {})
req = build_request(:list_branches, params)
req.send_request(options)
end
# Returns a list of pull requests for a specified repository. The return
# list can be refined by pull request status or pull request author ARN.
#
# @option params [required, String] :repository_name
# The name of the repository for which you want to list pull requests.
#
# @option params [String] :author_arn
# Optional. The Amazon Resource Name (ARN) of the user who created the
# pull request. If used, this filters the results to pull requests
# created by that user.
#
# @option params [String] :pull_request_status
# Optional. The status of the pull request. If used, this refines the
# results to the pull requests that match the specified status.
#
# @option params [String] :next_token
# An enumeration token that when provided in a request, returns the next
# batch of the results.
#
# @option params [Integer] :max_results
# A non-negative integer used to limit the number of returned results.
#
# @return [Types::ListPullRequestsOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListPullRequestsOutput#pull_request_ids #pull_request_ids} => Array<String>
# * {Types::ListPullRequestsOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_pull_requests({
# repository_name: "RepositoryName", # required
# author_arn: "Arn",
# pull_request_status: "OPEN", # accepts OPEN, CLOSED
# next_token: "NextToken",
# max_results: 1,
# })
#
# @example Response structure
#
# resp.pull_request_ids #=> Array
# resp.pull_request_ids[0] #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/ListPullRequests AWS API Documentation
#
# @overload list_pull_requests(params = {})
# @param [Hash] params ({})
def list_pull_requests(params = {}, options = {})
req = build_request(:list_pull_requests, params)
req.send_request(options)
end
# Gets information about one or more repositories.
#
# @option params [String] :next_token
# An enumeration token that allows the operation to batch the results of
# the operation. Batch sizes are 1,000 for list repository operations.
# When the client sends the token back to AWS CodeCommit, another page
# of 1,000 records is retrieved.
#
# @option params [String] :sort_by
# The criteria used to sort the results of a list repositories
# operation.
#
# @option params [String] :order
# The order in which to sort the results of a list repositories
# operation.
#
# @return [Types::ListRepositoriesOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListRepositoriesOutput#repositories #repositories} => Array<Types::RepositoryNameIdPair>
# * {Types::ListRepositoriesOutput#next_token #next_token} => String
#
# @example Request syntax with placeholder values
#
# resp = client.list_repositories({
# next_token: "NextToken",
# sort_by: "repositoryName", # accepts repositoryName, lastModifiedDate
# order: "ascending", # accepts ascending, descending
# })
#
# @example Response structure
#
# resp.repositories #=> Array
# resp.repositories[0].repository_name #=> String
# resp.repositories[0].repository_id #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/ListRepositories AWS API Documentation
#
# @overload list_repositories(params = {})
# @param [Hash] params ({})
def list_repositories(params = {}, options = {})
req = build_request(:list_repositories, params)
req.send_request(options)
end
# Closes a pull request and attempts to merge the source commit of a
# pull request into the specified destination branch for that pull
# request at the specified commit using the fast-forward merge option.
#
# @option params [required, String] :pull_request_id
# The system-generated ID of the pull request. To get this ID, use
# ListPullRequests.
#
# @option params [required, String] :repository_name
# The name of the repository where the pull request was created.
#
# @option params [String] :source_commit_id
# The full commit ID of the original or updated commit in the pull
# request source branch. Pass this value if you want an exception thrown
# if the current commit ID of the tip of the source branch does not
# match this commit ID.
#
# @return [Types::MergePullRequestByFastForwardOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::MergePullRequestByFastForwardOutput#pull_request #pull_request} => Types::PullRequest
#
# @example Request syntax with placeholder values
#
# resp = client.merge_pull_request_by_fast_forward({
# pull_request_id: "PullRequestId", # required
# repository_name: "RepositoryName", # required
# source_commit_id: "CommitId",
# })
#
# @example Response structure
#
# resp.pull_request.pull_request_id #=> String
# resp.pull_request.title #=> String
# resp.pull_request.description #=> String
# resp.pull_request.last_activity_date #=> Time
# resp.pull_request.creation_date #=> Time
# resp.pull_request.pull_request_status #=> String, one of "OPEN", "CLOSED"
# resp.pull_request.author_arn #=> String
# resp.pull_request.pull_request_targets #=> Array
# resp.pull_request.pull_request_targets[0].repository_name #=> String
# resp.pull_request.pull_request_targets[0].source_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_commit #=> String
# resp.pull_request.pull_request_targets[0].source_commit #=> String
# resp.pull_request.pull_request_targets[0].merge_base #=> String
# resp.pull_request.pull_request_targets[0].merge_metadata.is_merged #=> Boolean
# resp.pull_request.pull_request_targets[0].merge_metadata.merged_by #=> String
# resp.pull_request.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/MergePullRequestByFastForward AWS API Documentation
#
# @overload merge_pull_request_by_fast_forward(params = {})
# @param [Hash] params ({})
def merge_pull_request_by_fast_forward(params = {}, options = {})
req = build_request(:merge_pull_request_by_fast_forward, params)
req.send_request(options)
end
# Posts a comment on the comparison between two commits.
#
# @option params [required, String] :repository_name
# The name of the repository where you want to post a comment on the
# comparison between commits.
#
# @option params [String] :before_commit_id
# To establish the directionality of the comparison, the full commit ID
# of the 'before' commit.
#
# @option params [required, String] :after_commit_id
# To establish the directionality of the comparison, the full commit ID
# of the 'after' commit.
#
# @option params [Types::Location] :location
# The location of the comparison where you want to comment.
#
# @option params [required, String] :content
# The content of the comment you want to make.
#
# @option params [String] :client_request_token
# A unique, client-generated idempotency token that when provided in a
# request, ensures the request cannot be repeated with a changed
# parameter. If a request is received with the same parameters and a
# token is included, the request will return information about the
# initial request that used that token.
#
# **A suitable default value is auto-generated.** You should normally
# not need to pass this option.**
#
# @return [Types::PostCommentForComparedCommitOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::PostCommentForComparedCommitOutput#repository_name #repository_name} => String
# * {Types::PostCommentForComparedCommitOutput#before_commit_id #before_commit_id} => String
# * {Types::PostCommentForComparedCommitOutput#after_commit_id #after_commit_id} => String
# * {Types::PostCommentForComparedCommitOutput#before_blob_id #before_blob_id} => String
# * {Types::PostCommentForComparedCommitOutput#after_blob_id #after_blob_id} => String
# * {Types::PostCommentForComparedCommitOutput#location #location} => Types::Location
# * {Types::PostCommentForComparedCommitOutput#comment #comment} => Types::Comment
#
# @example Request syntax with placeholder values
#
# resp = client.post_comment_for_compared_commit({
# repository_name: "RepositoryName", # required
# before_commit_id: "CommitId",
# after_commit_id: "CommitId", # required
# location: {
# file_path: "Path",
# file_position: 1,
# relative_file_version: "BEFORE", # accepts BEFORE, AFTER
# },
# content: "Content", # required
# client_request_token: "ClientRequestToken",
# })
#
# @example Response structure
#
# resp.repository_name #=> String
# resp.before_commit_id #=> String
# resp.after_commit_id #=> String
# resp.before_blob_id #=> String
# resp.after_blob_id #=> String
# resp.location.file_path #=> String
# resp.location.file_position #=> Integer
# resp.location.relative_file_version #=> String, one of "BEFORE", "AFTER"
# resp.comment.comment_id #=> String
# resp.comment.content #=> String
# resp.comment.in_reply_to #=> String
# resp.comment.creation_date #=> Time
# resp.comment.last_modified_date #=> Time
# resp.comment.author_arn #=> String
# resp.comment.deleted #=> Boolean
# resp.comment.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/PostCommentForComparedCommit AWS API Documentation
#
# @overload post_comment_for_compared_commit(params = {})
# @param [Hash] params ({})
def post_comment_for_compared_commit(params = {}, options = {})
req = build_request(:post_comment_for_compared_commit, params)
req.send_request(options)
end
# Posts a comment on a pull request.
#
# @option params [required, String] :pull_request_id
# The system-generated ID of the pull request. To get this ID, use
# ListPullRequests.
#
# @option params [required, String] :repository_name
# The name of the repository where you want to post a comment on a pull
# request.
#
# @option params [required, String] :before_commit_id
# The full commit ID of the commit in the destination branch that was
# the tip of the branch at the time the pull request was created.
#
# @option params [required, String] :after_commit_id
# The full commit ID of the commit in the source branch that is the
# current tip of the branch for the pull request when you post the
# comment.
#
# @option params [Types::Location] :location
# The location of the change where you want to post your comment. If no
# location is provided, the comment will be posted as a general comment
# on the pull request difference between the before commit ID and the
# after commit ID.
#
# @option params [required, String] :content
# The content of your comment on the change.
#
# @option params [String] :client_request_token
# A unique, client-generated idempotency token that when provided in a
# request, ensures the request cannot be repeated with a changed
# parameter. If a request is received with the same parameters and a
# token is included, the request will return information about the
# initial request that used that token.
#
# **A suitable default value is auto-generated.** You should normally
# not need to pass this option.**
#
# @return [Types::PostCommentForPullRequestOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::PostCommentForPullRequestOutput#repository_name #repository_name} => String
# * {Types::PostCommentForPullRequestOutput#pull_request_id #pull_request_id} => String
# * {Types::PostCommentForPullRequestOutput#before_commit_id #before_commit_id} => String
# * {Types::PostCommentForPullRequestOutput#after_commit_id #after_commit_id} => String
# * {Types::PostCommentForPullRequestOutput#before_blob_id #before_blob_id} => String
# * {Types::PostCommentForPullRequestOutput#after_blob_id #after_blob_id} => String
# * {Types::PostCommentForPullRequestOutput#location #location} => Types::Location
# * {Types::PostCommentForPullRequestOutput#comment #comment} => Types::Comment
#
# @example Request syntax with placeholder values
#
# resp = client.post_comment_for_pull_request({
# pull_request_id: "PullRequestId", # required
# repository_name: "RepositoryName", # required
# before_commit_id: "CommitId", # required
# after_commit_id: "CommitId", # required
# location: {
# file_path: "Path",
# file_position: 1,
# relative_file_version: "BEFORE", # accepts BEFORE, AFTER
# },
# content: "Content", # required
# client_request_token: "ClientRequestToken",
# })
#
# @example Response structure
#
# resp.repository_name #=> String
# resp.pull_request_id #=> String
# resp.before_commit_id #=> String
# resp.after_commit_id #=> String
# resp.before_blob_id #=> String
# resp.after_blob_id #=> String
# resp.location.file_path #=> String
# resp.location.file_position #=> Integer
# resp.location.relative_file_version #=> String, one of "BEFORE", "AFTER"
# resp.comment.comment_id #=> String
# resp.comment.content #=> String
# resp.comment.in_reply_to #=> String
# resp.comment.creation_date #=> Time
# resp.comment.last_modified_date #=> Time
# resp.comment.author_arn #=> String
# resp.comment.deleted #=> Boolean
# resp.comment.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/PostCommentForPullRequest AWS API Documentation
#
# @overload post_comment_for_pull_request(params = {})
# @param [Hash] params ({})
def post_comment_for_pull_request(params = {}, options = {})
req = build_request(:post_comment_for_pull_request, params)
req.send_request(options)
end
# Posts a comment in reply to an existing comment on a comparison
# between commits or a pull request.
#
# @option params [required, String] :in_reply_to
# The system-generated ID of the comment to which you want to reply. To
# get this ID, use GetCommentsForComparedCommit or
# GetCommentsForPullRequest.
#
# @option params [String] :client_request_token
# A unique, client-generated idempotency token that when provided in a
# request, ensures the request cannot be repeated with a changed
# parameter. If a request is received with the same parameters and a
# token is included, the request will return information about the
# initial request that used that token.
#
# **A suitable default value is auto-generated.** You should normally
# not need to pass this option.**
#
# @option params [required, String] :content
# The contents of your reply to a comment.
#
# @return [Types::PostCommentReplyOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::PostCommentReplyOutput#comment #comment} => Types::Comment
#
# @example Request syntax with placeholder values
#
# resp = client.post_comment_reply({
# in_reply_to: "CommentId", # required
# client_request_token: "ClientRequestToken",
# content: "Content", # required
# })
#
# @example Response structure
#
# resp.comment.comment_id #=> String
# resp.comment.content #=> String
# resp.comment.in_reply_to #=> String
# resp.comment.creation_date #=> Time
# resp.comment.last_modified_date #=> Time
# resp.comment.author_arn #=> String
# resp.comment.deleted #=> Boolean
# resp.comment.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/PostCommentReply AWS API Documentation
#
# @overload post_comment_reply(params = {})
# @param [Hash] params ({})
def post_comment_reply(params = {}, options = {})
req = build_request(:post_comment_reply, params)
req.send_request(options)
end
# Adds or updates a file in a branch in an AWS CodeCommit repository,
# and generates a commit for the addition in the specified branch.
#
# @option params [required, String] :repository_name
# The name of the repository where you want to add or update the file.
#
# @option params [required, String] :branch_name
# The name of the branch where you want to add or update the file. If
# this is an empty repository, this branch will be created.
#
# @option params [required, String, IO] :file_content
# The content of the file, in binary object format.
#
# @option params [required, String] :file_path
# The name of the file you want to add or update, including the relative
# path to the file in the repository.
#
# <note markdown="1"> If the path does not currently exist in the repository, the path will
# be created as part of adding the file.
#
# </note>
#
# @option params [String] :file_mode
# The file mode permissions of the blob. Valid file mode permissions are
# listed below.
#
# @option params [String] :parent_commit_id
# The full commit ID of the head commit in the branch where you want to
# add or update the file. If this is an empty repository, no commit ID
# is required. If this is not an empty repository, a commit ID is
# required.
#
# The commit ID must match the ID of the head commit at the time of the
# operation, or an error will occur, and the file will not be added or
# updated.
#
# @option params [String] :commit_message
# A message about why this file was added or updated. While optional,
# adding a message is strongly encouraged in order to provide a more
# useful commit history for your repository.
#
# @option params [String] :name
# The name of the person adding or updating the file. While optional,
# adding a name is strongly encouraged in order to provide a more useful
# commit history for your repository.
#
# @option params [String] :email
# An email address for the person adding or updating the file.
#
# @return [Types::PutFileOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::PutFileOutput#commit_id #commit_id} => String
# * {Types::PutFileOutput#blob_id #blob_id} => String
# * {Types::PutFileOutput#tree_id #tree_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.put_file({
# repository_name: "RepositoryName", # required
# branch_name: "BranchName", # required
# file_content: "data", # required
# file_path: "Path", # required
# file_mode: "EXECUTABLE", # accepts EXECUTABLE, NORMAL, SYMLINK
# parent_commit_id: "CommitId",
# commit_message: "Message",
# name: "Name",
# email: "Email",
# })
#
# @example Response structure
#
# resp.commit_id #=> String
# resp.blob_id #=> String
# resp.tree_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/PutFile AWS API Documentation
#
# @overload put_file(params = {})
# @param [Hash] params ({})
def put_file(params = {}, options = {})
req = build_request(:put_file, params)
req.send_request(options)
end
# Replaces all triggers for a repository. This can be used to create or
# delete triggers.
#
# @option params [required, String] :repository_name
# The name of the repository where you want to create or update the
# trigger.
#
# @option params [required, Array<Types::RepositoryTrigger>] :triggers
# The JSON block of configuration information for each trigger.
#
# @return [Types::PutRepositoryTriggersOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::PutRepositoryTriggersOutput#configuration_id #configuration_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.put_repository_triggers({
# repository_name: "RepositoryName", # required
# triggers: [ # required
# {
# name: "RepositoryTriggerName", # required
# destination_arn: "Arn", # required
# custom_data: "RepositoryTriggerCustomData",
# branches: ["BranchName"],
# events: ["all"], # required, accepts all, updateReference, createReference, deleteReference
# },
# ],
# })
#
# @example Response structure
#
# resp.configuration_id #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/PutRepositoryTriggers AWS API Documentation
#
# @overload put_repository_triggers(params = {})
# @param [Hash] params ({})
def put_repository_triggers(params = {}, options = {})
req = build_request(:put_repository_triggers, params)
req.send_request(options)
end
# Tests the functionality of repository triggers by sending information
# to the trigger target. If real data is available in the repository,
# the test will send data from the last commit. If no data is available,
# sample data will be generated.
#
# @option params [required, String] :repository_name
# The name of the repository in which to test the triggers.
#
# @option params [required, Array<Types::RepositoryTrigger>] :triggers
# The list of triggers to test.
#
# @return [Types::TestRepositoryTriggersOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::TestRepositoryTriggersOutput#successful_executions #successful_executions} => Array<String>
# * {Types::TestRepositoryTriggersOutput#failed_executions #failed_executions} => Array<Types::RepositoryTriggerExecutionFailure>
#
# @example Request syntax with placeholder values
#
# resp = client.test_repository_triggers({
# repository_name: "RepositoryName", # required
# triggers: [ # required
# {
# name: "RepositoryTriggerName", # required
# destination_arn: "Arn", # required
# custom_data: "RepositoryTriggerCustomData",
# branches: ["BranchName"],
# events: ["all"], # required, accepts all, updateReference, createReference, deleteReference
# },
# ],
# })
#
# @example Response structure
#
# resp.successful_executions #=> Array
# resp.successful_executions[0] #=> String
# resp.failed_executions #=> Array
# resp.failed_executions[0].trigger #=> String
# resp.failed_executions[0].failure_message #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/TestRepositoryTriggers AWS API Documentation
#
# @overload test_repository_triggers(params = {})
# @param [Hash] params ({})
def test_repository_triggers(params = {}, options = {})
req = build_request(:test_repository_triggers, params)
req.send_request(options)
end
# Replaces the contents of a comment.
#
# @option params [required, String] :comment_id
# The system-generated ID of the comment you want to update. To get this
# ID, use GetCommentsForComparedCommit or GetCommentsForPullRequest.
#
# @option params [required, String] :content
# The updated content with which you want to replace the existing
# content of the comment.
#
# @return [Types::UpdateCommentOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateCommentOutput#comment #comment} => Types::Comment
#
# @example Request syntax with placeholder values
#
# resp = client.update_comment({
# comment_id: "CommentId", # required
# content: "Content", # required
# })
#
# @example Response structure
#
# resp.comment.comment_id #=> String
# resp.comment.content #=> String
# resp.comment.in_reply_to #=> String
# resp.comment.creation_date #=> Time
# resp.comment.last_modified_date #=> Time
# resp.comment.author_arn #=> String
# resp.comment.deleted #=> Boolean
# resp.comment.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/UpdateComment AWS API Documentation
#
# @overload update_comment(params = {})
# @param [Hash] params ({})
def update_comment(params = {}, options = {})
req = build_request(:update_comment, params)
req.send_request(options)
end
# Sets or changes the default branch name for the specified repository.
#
# <note markdown="1"> If you use this operation to change the default branch name to the
# current default branch name, a success message is returned even though
# the default branch did not change.
#
# </note>
#
# @option params [required, String] :repository_name
# The name of the repository to set or change the default branch for.
#
# @option params [required, String] :default_branch_name
# The name of the branch to set as the default.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.update_default_branch({
# repository_name: "RepositoryName", # required
# default_branch_name: "BranchName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/UpdateDefaultBranch AWS API Documentation
#
# @overload update_default_branch(params = {})
# @param [Hash] params ({})
def update_default_branch(params = {}, options = {})
req = build_request(:update_default_branch, params)
req.send_request(options)
end
# Replaces the contents of the description of a pull request.
#
# @option params [required, String] :pull_request_id
# The system-generated ID of the pull request. To get this ID, use
# ListPullRequests.
#
# @option params [required, String] :description
# The updated content of the description for the pull request. This
# content will replace the existing description.
#
# @return [Types::UpdatePullRequestDescriptionOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdatePullRequestDescriptionOutput#pull_request #pull_request} => Types::PullRequest
#
# @example Request syntax with placeholder values
#
# resp = client.update_pull_request_description({
# pull_request_id: "PullRequestId", # required
# description: "Description", # required
# })
#
# @example Response structure
#
# resp.pull_request.pull_request_id #=> String
# resp.pull_request.title #=> String
# resp.pull_request.description #=> String
# resp.pull_request.last_activity_date #=> Time
# resp.pull_request.creation_date #=> Time
# resp.pull_request.pull_request_status #=> String, one of "OPEN", "CLOSED"
# resp.pull_request.author_arn #=> String
# resp.pull_request.pull_request_targets #=> Array
# resp.pull_request.pull_request_targets[0].repository_name #=> String
# resp.pull_request.pull_request_targets[0].source_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_commit #=> String
# resp.pull_request.pull_request_targets[0].source_commit #=> String
# resp.pull_request.pull_request_targets[0].merge_base #=> String
# resp.pull_request.pull_request_targets[0].merge_metadata.is_merged #=> Boolean
# resp.pull_request.pull_request_targets[0].merge_metadata.merged_by #=> String
# resp.pull_request.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/UpdatePullRequestDescription AWS API Documentation
#
# @overload update_pull_request_description(params = {})
# @param [Hash] params ({})
def update_pull_request_description(params = {}, options = {})
req = build_request(:update_pull_request_description, params)
req.send_request(options)
end
# Updates the status of a pull request.
#
# @option params [required, String] :pull_request_id
# The system-generated ID of the pull request. To get this ID, use
# ListPullRequests.
#
# @option params [required, String] :pull_request_status
# The status of the pull request. The only valid operations are to
# update the status from `OPEN` to `OPEN`, `OPEN` to `CLOSED` or from
# from `CLOSED` to `CLOSED`.
#
# @return [Types::UpdatePullRequestStatusOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdatePullRequestStatusOutput#pull_request #pull_request} => Types::PullRequest
#
# @example Request syntax with placeholder values
#
# resp = client.update_pull_request_status({
# pull_request_id: "PullRequestId", # required
# pull_request_status: "OPEN", # required, accepts OPEN, CLOSED
# })
#
# @example Response structure
#
# resp.pull_request.pull_request_id #=> String
# resp.pull_request.title #=> String
# resp.pull_request.description #=> String
# resp.pull_request.last_activity_date #=> Time
# resp.pull_request.creation_date #=> Time
# resp.pull_request.pull_request_status #=> String, one of "OPEN", "CLOSED"
# resp.pull_request.author_arn #=> String
# resp.pull_request.pull_request_targets #=> Array
# resp.pull_request.pull_request_targets[0].repository_name #=> String
# resp.pull_request.pull_request_targets[0].source_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_commit #=> String
# resp.pull_request.pull_request_targets[0].source_commit #=> String
# resp.pull_request.pull_request_targets[0].merge_base #=> String
# resp.pull_request.pull_request_targets[0].merge_metadata.is_merged #=> Boolean
# resp.pull_request.pull_request_targets[0].merge_metadata.merged_by #=> String
# resp.pull_request.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/UpdatePullRequestStatus AWS API Documentation
#
# @overload update_pull_request_status(params = {})
# @param [Hash] params ({})
def update_pull_request_status(params = {}, options = {})
req = build_request(:update_pull_request_status, params)
req.send_request(options)
end
# Replaces the title of a pull request.
#
# @option params [required, String] :pull_request_id
# The system-generated ID of the pull request. To get this ID, use
# ListPullRequests.
#
# @option params [required, String] :title
# The updated title of the pull request. This will replace the existing
# title.
#
# @return [Types::UpdatePullRequestTitleOutput] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdatePullRequestTitleOutput#pull_request #pull_request} => Types::PullRequest
#
# @example Request syntax with placeholder values
#
# resp = client.update_pull_request_title({
# pull_request_id: "PullRequestId", # required
# title: "Title", # required
# })
#
# @example Response structure
#
# resp.pull_request.pull_request_id #=> String
# resp.pull_request.title #=> String
# resp.pull_request.description #=> String
# resp.pull_request.last_activity_date #=> Time
# resp.pull_request.creation_date #=> Time
# resp.pull_request.pull_request_status #=> String, one of "OPEN", "CLOSED"
# resp.pull_request.author_arn #=> String
# resp.pull_request.pull_request_targets #=> Array
# resp.pull_request.pull_request_targets[0].repository_name #=> String
# resp.pull_request.pull_request_targets[0].source_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_reference #=> String
# resp.pull_request.pull_request_targets[0].destination_commit #=> String
# resp.pull_request.pull_request_targets[0].source_commit #=> String
# resp.pull_request.pull_request_targets[0].merge_base #=> String
# resp.pull_request.pull_request_targets[0].merge_metadata.is_merged #=> Boolean
# resp.pull_request.pull_request_targets[0].merge_metadata.merged_by #=> String
# resp.pull_request.client_request_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/UpdatePullRequestTitle AWS API Documentation
#
# @overload update_pull_request_title(params = {})
# @param [Hash] params ({})
def update_pull_request_title(params = {}, options = {})
req = build_request(:update_pull_request_title, params)
req.send_request(options)
end
# Sets or changes the comment or description for a repository.
#
# <note markdown="1"> The description field for a repository accepts all HTML characters and
# all valid Unicode characters. Applications that do not HTML-encode the
# description and display it in a web page could expose users to
# potentially malicious code. Make sure that you HTML-encode the
# description field in any application that uses this API to display the
# repository description on a web page.
#
# </note>
#
# @option params [required, String] :repository_name
# The name of the repository to set or change the comment or description
# for.
#
# @option params [String] :repository_description
# The new comment or description for the specified repository.
# Repository descriptions are limited to 1,000 characters.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.update_repository_description({
# repository_name: "RepositoryName", # required
# repository_description: "RepositoryDescription",
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/UpdateRepositoryDescription AWS API Documentation
#
# @overload update_repository_description(params = {})
# @param [Hash] params ({})
def update_repository_description(params = {}, options = {})
req = build_request(:update_repository_description, params)
req.send_request(options)
end
# Renames a repository. The repository name must be unique across the
# calling AWS account. In addition, repository names are limited to 100
# alphanumeric, dash, and underscore characters, and cannot include
# certain characters. The suffix ".git" is prohibited. For a full
# description of the limits on repository names, see [Limits][1] in the
# AWS CodeCommit User Guide.
#
#
#
# [1]: http://docs.aws.amazon.com/codecommit/latest/userguide/limits.html
#
# @option params [required, String] :old_name
# The existing name of the repository.
#
# @option params [required, String] :new_name
# The new name for the repository.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.update_repository_name({
# old_name: "RepositoryName", # required
# new_name: "RepositoryName", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/codecommit-2015-04-13/UpdateRepositoryName AWS API Documentation
#
# @overload update_repository_name(params = {})
# @param [Hash] params ({})
def update_repository_name(params = {}, options = {})
req = build_request(:update_repository_name, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-codecommit'
context[:gem_version] = '1.13.0'
Seahorse::Client::Request.new(handlers, context)
end
# @api private
# @deprecated
def waiter_names
[]
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 44.587516 | 209 | 0.676104 |
e2d7f6599229dab7b9e03a87fdf8f6e6f58ea578 | 3,534 | require 'active_support/concern'
require 'elasticsearch/model'
require 'track_record/tracker'
module TrackRecord
extend ActiveSupport::Concern
QUERY_AMOUNT = ENV["FEED_EVENTS_QUERY_AMOUNT"].nil? ? 100 : ENV["FEED_EVENTS_QUERY_AMOUNT"].to_i
Client = Elasticsearch::Model.client
included do
after_commit on: :create do
Tracker.track_change(:index, self.class.to_s, id, previous_changes, current_user, "create")
end
after_commit on: :update do
Tracker.track_change(:index, self.class.to_s, id, previous_changes, current_user, "update")
end
around_destroy :audit_deleted
end
def audit_deleted
record = self.to_json
index_name = self.audit_index_name
record_id = self.id
yield
Tracker.track_deletion(self.class.to_s, record_id, current_user, "delete", record, index_name)
end
def audit_index_name
Rails.env.production? ? "audit_#{model_name.route_key}" : "audit_#{model_name.route_key}_#{Rails.env}"
end
def current_user
$custom_current_user.present? ? $custom_current_user.as_json : nil
end
def feed
search_hash = Hash.new
search = Client.search index: audit_index_name, ignore: [404], body: { size: QUERY_AMOUNT, query: { match: { 'record.id': id } } }
search_hash[self.class.to_s] = search['hits']['hits'].pluck('_source') unless search['hits'].nil?
association_class_list = self.association_classes
search_hash = search_hash.merge(self.association_feed(association_class_list))
search_hash = search_hash.merge(self.richtext_feed(association_class_list))
end
private
def association_classes
Rails.application.eager_load!
ApplicationRecord.descendants.collect(&:name).select{ |model| model.constantize.reflect_on_all_associations().map(&:name).include? self.class.to_s.underscore.to_sym }
end
def association_feed(association_class_list)
association_search_hash = Hash.new
association_class_list.each do | association_class |
instance = association_class.classify.constantize.new
search = association_search(instance.audit_index_name, self.class.to_s.underscore)
association_search_hash[association_class] = search unless search.nil?
end
association_search_hash
end
def association_search(index_name, parent_class_name)
search = Client.search index: index_name, ignore: [404], body: { size: QUERY_AMOUNT, query: { match: { "record.#{parent_class_name}_id": id } } }
search['hits']['hits'].pluck('_source') unless search['hits'].nil?
end
def richtext_feed(association_class_list)
richtext_hash = Hash.new
richtext_hash["RichText"] = Hash.new
audit_index_name = Rails.env.production? ? "audit_richtext" : "audit_richtext_#{Rails.env}"
search = richtext_search(audit_index_name, self.class.to_s)
richtext_hash["RichText"][self.class.to_s] = search unless (search.nil? || search.empty?)
association_class_list.each do | association_class |
search = richtext_search(audit_index_name, association_class)
richtext_hash["RichText"][association_class] = search unless (search.nil? || search.empty?)
end
richtext_hash
end
def richtext_search(index_name, class_name)
search = Client.search index: index_name, ignore: [404], body: { size: QUERY_AMOUNT, query: { bool: { must: [ { match: { "record.record_id": id } },
{ match: { "record.record_type": class_name } }]}}}
search['hits']['hits'].pluck('_source') unless search['hits'].nil?
end
end | 40.159091 | 170 | 0.723543 |
18aecdd16200f58793015424bfe4a32f8fc7048b | 1,562 | #--
# Copyright (c) 2010 Michael Berkovich, Geni Inc
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
class Tr8n::TranslationKeyComment < ActiveRecord::Base
set_table_name :tr8n_translation_key_comments
belongs_to :language, :class_name => "Tr8n::Language"
belongs_to :translator, :class_name => "Tr8n::Translator"
belongs_to :translation_key, :class_name => "Tr8n::TranslationKey"
alias :key :translation_key
def toHTML
return "" unless message
message.gsub("\n", "<br>")
end
end
| 41.105263 | 75 | 0.742638 |
ab9676d96b870f3c1bba8be8f8e00898a6b323a9 | 778 | module God
module Conditions
class DiskUsage < PollCondition
attr_accessor :above, :mount_point
def initialize
super
self.above = nil
self.mount_point = nil
end
def valid?
valid = true
valid &= complain("Attribute 'mount_point' must be specified", self) if self.mount_point.nil?
valid &= complain("Attribute 'above' must be specified", self) if self.above.nil?
valid
end
def test
self.info = []
usage = `df -P | grep -i " #{self.mount_point}$" | awk '{print $5}' | sed 's/%//'`
if usage.to_i > self.above
self.info = "disk space out of bounds"
return true
else
return false
end
end
end
end
end
| 23.575758 | 101 | 0.556555 |
6a1abd6f368e876cfa18c5190becdf080816861d | 965 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'alphabetical_paginate/version'
Gem::Specification.new do |spec|
spec.name = "alphabetical_paginate"
spec.version = AlphabeticalPaginate::VERSION
spec.authors = ["lingz"]
spec.email = ["[email protected]"]
spec.description = "Alphabetical Pagination"
spec.summary = "Pagination"
spec.homepage = "https://github.com/lingz/alphabetical_paginate"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", "~> 2.6"
spec.add_development_dependency "rails"
end
| 37.115385 | 74 | 0.675648 |
618baf0aa45ec1ecb69f47a1938c52f0838e36b5 | 3,348 | require 'find'
require 'taglib'
require 'pathname'
require 'logger'
require 'json'
module UnitF
module Tag
class File < Pathname
def initialize(file_path)
super(::File.absolute_path(file_path.to_s))
end
def tag
@file.tag
end
def format_json
JSON.pretty_generate(info)
end
def format_line
buff = []
info.each_key do |key|
buff << "#{key}=#{info[key]}"
end
buff.join(',')
end
def info
{
file: realpath.to_path,
artist: tag.artist,
album: tag.album,
title: tag.title,
track: tag.track,
genre: tag.genre,
year: tag.year,
cover: cover?
}
end
def print
puts "File : #{realpath}"
puts "Artist: #{tag.artist}"
puts "Album : #{tag.album}"
puts "Title : #{tag.title}"
puts "Track : #{tag.track}"
puts "Genre : #{tag.genre}"
puts "Year : #{tag.year}"
puts "Cover : #{cover?}"
puts "Stats : #{stats}"
puts
end
def cover_path
"#{dirname}/cover.jpg"
end
def auto_tag_path
"#{dirname}/.autotag"
end
def mp3?
extname.match(/\.mp3$/i)
end
def flac?
extname.match(/\.flac$/i)
end
def cover_available?
::File.exist?(cover_path)
end
def auto_cover!
cover!(cover_path)
end
def manual_auto_tags
UnitF::Log.info(auto_tag_path)
tags = {}
return {} unless ::File.exist?(auto_tag_path)
::File.read(auto_tag_path).each_line do |line|
line.chomp!
UnitF::Log.info(line)
tag, value = line.split(/\s*=\s*/)
tags[tag.to_sym] = value
end
tags
rescue
{}
end
def auto_tags
manual_tags = manual_auto_tags
tags = {}
tags[:title] = ::File.basename(realpath.to_path)
track = tags[:title].match(/^\s*\d+/).to_s.to_i
tags[:title].gsub!(/\.\w+$/, '')
tags[:title].gsub!(/^\d*\s*(-|\.)*\s*/, '')
path_parts = realpath.dirname.to_path.split('/')
tags[:album] = path_parts[-1]
tags[:artist] = path_parts[-2]
tags.merge(manual_auto_tags)
end
def auto_tag!
UnitF::Log.info("Auto tagging #{to_s}")
manual_auto_tags
title = ::File.basename(realpath.to_path)
# This must come before gsubbing the title
track = title.match(/^\s*\d+/).to_s.to_i
title.gsub!(/\.\w+$/, '')
title.gsub!(/^\d*\s*(-|\.)*\s*/, '')
path_parts = realpath.dirname.to_path.split('/')
album = path_parts[-1]
artist = path_parts[-2]
tag.album = album
tag.artist = artist
tag.title = title
tag.track = track
self.album_artist = artist
end
def save
@file.save
end
def close
@file&.close
@file = nil
end
def open
object = if flac?
UnitF::Tag::FLAC.new(to_path)
elsif mp3?
UnitF::Tag::MP3.new(to_path)
end
yield(object) if block_given?
object&.close
end
end
end
end
| 21.74026 | 56 | 0.497312 |
bf48af6e90481438d8ffaa9d592369f0d750b0e3 | 680 | Pod::Spec.new do |s|
s.name = 'GoLog'
s.version = '0.0.3'
s.summary = 'GoLog'
s.description = "GoLog"
s.homepage = "https://github.com/junbjnnn/GoLog"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "GoLog" => "[email protected]" }
s.source = { :git => "https://github.com/junbjnnn/GoLog.git", :tag => "#{s.version}" }
s.ios.deployment_target = '10.0'
s.source_files = "GoLog/**/*.{h,m,swift,xib,storyboard}"
s.resource_bundles = {
'GoLog' => ['GoLog/GoLogBundle/*.{storyboard,xib,xcassets,json,imageset,png}']
}
s.dependency 'SwiftLog', '1.0.0'
s.swift_version = "4.2"
end
| 35.789474 | 98 | 0.555882 |
8780c7e921851d56d3ea3656945cb21df1a4de11 | 45 | module LogicalFriday
VERSION = "0.1.0"
end
| 11.25 | 20 | 0.711111 |
bba947502e63e7124b812e912ab05cd8d4d89270 | 1,236 | module Tiki
module Torch
class Transcoder
class << self
def inherited(subclass)
unless registry.include? subclass
registry << subclass
end
end
def registry
@registry ||= []
end
def transcoder_for(code)
registry.find { |klass| klass.code == code.to_s }
end
def encode(payload = {}, properties = {}, code = Torch.config.transcoder_code)
transcoder = transcoder_for(code)
raise "Unknown transcoder code [#{code}]" unless transcoder
"#{code}|#{transcoder.encode(payload, properties)}"
end
def decode(str)
code, body = split_encoding str
raise "Invalid encoding [#{code}]" unless code
transcoder = transcoder_for code
raise "Unknown encoding [#{code}]" unless transcoder
transcoder.decode body
end
private
def split_encoding(str)
sig = str[0, 255]
len = sig.index('|')
if len > 0
sig, body = str[0, len], str[len+1 .. -1]
else
sig, body = nil, nil
end
[sig, body]
end
end
end
end
end
| 22.071429 | 86 | 0.522654 |
030b9b210bbcac146c367b1ed02aa60ae390f6b2 | 4,164 | require 'spec_helper'
require 'r10k/module/git'
describe R10K::Module::Git do
describe "setting the owner and name" do
describe "with a title of 'branan/eight_hundred'" do
subject do
described_class.new(
'branan/eight_hundred',
'/moduledir',
{
:git => 'git://git-server.site/branan/puppet-eight_hundred',
}
)
end
it "sets the owner to 'branan'" do
expect(subject.owner).to eq 'branan'
end
it "sets the name to 'eight_hundred'" do
expect(subject.name).to eq 'eight_hundred'
end
it "sets the path to '/moduledir/eight_hundred'" do
expect(subject.path).to eq(Pathname.new('/moduledir/eight_hundred'))
end
end
describe "with a title of 'modulename'" do
subject do
described_class.new(
'eight_hundred',
'/moduledir',
{
:git => 'git://git-server.site/branan/puppet-eight_hundred',
}
)
end
it "sets the owner to nil" do
expect(subject.owner).to be_nil
end
it "sets the name to 'eight_hundred'" do
expect(subject.name).to eq 'eight_hundred'
end
it "sets the path to '/moduledir/eight_hundred'" do
expect(subject.path).to eq(Pathname.new('/moduledir/eight_hundred'))
end
end
end
describe "properties" do
subject do
described_class.new('boolean', '/moduledir', {:git => 'git://github.com/adrienthebo/puppet-boolean'})
end
it "sets the module type to :git" do
expect(subject.properties).to include(:type => :git)
end
it "sets the expected version" do
expect(subject.properties).to include(:expected => instance_of(R10K::Git::Ref))
end
it "sets the actual version to the revision when the revision is available" do
head = double('head')
expect(subject.working_dir).to receive(:current).and_return(head)
expect(head).to receive(:sha1).and_return('35d3517e67ceeb4b485b56d4a14d38fb95516c92')
expect(subject.properties).to include(:actual => '35d3517e67ceeb4b485b56d4a14d38fb95516c92')
end
it "sets the actual version (unresolvable) when the revision is unavailable" do
head = double('head')
expect(subject.working_dir).to receive(:current).and_return(head)
expect(head).to receive(:sha1).and_raise(ArgumentError)
expect(subject.properties).to include(:actual => '(unresolvable)')
end
end
describe "determining the status" do
subject do
described_class.new(
'boolean',
'/moduledir',
{
:git => 'git://github.com/adrienthebo/puppet-boolean'
}
)
end
it "is absent when the working dir is absent" do
expect(subject.working_dir).to receive(:exist?).and_return false
expect(subject.status).to eq :absent
end
it "is mismatched Then the working dir is not a git repository" do
allow(subject.working_dir).to receive(:exist?).and_return true
expect(subject.working_dir).to receive(:git?).and_return false
expect(subject.status).to eq :mismatched
end
it "is mismatched when the expected remote does not match the actual remote" do
allow(subject.working_dir).to receive(:exist?).and_return true
expect(subject.working_dir).to receive(:git?).and_return true
expect(subject.working_dir).to receive(:remote).and_return 'nope'
expect(subject.status).to eq :mismatched
end
it "is outdated when the working dir is outdated" do
allow(subject.working_dir).to receive(:exist?).and_return true
expect(subject.working_dir).to receive(:git?).and_return true
expect(subject.working_dir).to receive(:outdated?).and_return true
expect(subject.status).to eq :outdated
end
it "is insync if all other conditions are satisfied" do
allow(subject.working_dir).to receive(:exist?).and_return true
expect(subject.working_dir).to receive(:git?).and_return true
expect(subject.working_dir).to receive(:outdated?).and_return false
expect(subject.status).to eq :insync
end
end
end
| 32.53125 | 107 | 0.657061 |
28bad31cc8b3f2ef7d7f5b9f96022cb0a1aa144a | 130 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'checkers'
require 'minitest/autorun'
require 'minitest/mock'
| 21.666667 | 58 | 0.753846 |
0386c3e83509f418af9f00111febdfb933c196ab | 9,849 | require 'openssl'
module PuppetSpec
module SSL
PRIVATE_KEY_LENGTH = 2048
FIVE_YEARS = 5 * 365 * 24 * 60 * 60
CA_EXTENSIONS = [
["basicConstraints", "CA:TRUE", true],
["keyUsage", "keyCertSign, cRLSign", true],
["subjectKeyIdentifier", "hash", false],
["authorityKeyIdentifier", "keyid:always", false]
]
NODE_EXTENSIONS = [
["keyUsage", "digitalSignature", true],
["subjectKeyIdentifier", "hash", false]
]
DEFAULT_SIGNING_DIGEST = OpenSSL::Digest::SHA256.new
DEFAULT_REVOCATION_REASON = OpenSSL::OCSP::REVOKED_STATUS_KEYCOMPROMISE
ROOT_CA_NAME = "/CN=root-ca-\u{2070E}"
REVOKED_INT_CA_NAME = "/CN=revoked-int-ca-\u16A0"
INT_CA_NAME = "/CN=unrevoked-int-ca\u06FF\u16A0\u{2070E}"
LEAF_CA_NAME = "/CN=leaf-ca-\u06FF"
EXPLANATORY_TEXT = <<-EOT
# Root Issuer: #{ROOT_CA_NAME}
# Intermediate Issuer: #{INT_CA_NAME}
# Leaf Issuer: #{LEAF_CA_NAME}
EOT
def self.create_private_key(length = PRIVATE_KEY_LENGTH)
OpenSSL::PKey::RSA.new(length)
end
def self.self_signed_ca(key, name)
cert = OpenSSL::X509::Certificate.new
cert.public_key = key.public_key
cert.subject = OpenSSL::X509::Name.parse(name)
cert.issuer = cert.subject
cert.version = 2
cert.serial = rand(2**128)
not_before = just_now
cert.not_before = not_before
cert.not_after = not_before + FIVE_YEARS
ext_factory = extension_factory_for(cert, cert)
CA_EXTENSIONS.each do |ext|
extension = ext_factory.create_extension(*ext)
cert.add_extension(extension)
end
cert.sign(key, DEFAULT_SIGNING_DIGEST)
cert
end
def self.create_csr(key, name)
csr = OpenSSL::X509::Request.new
csr.public_key = key.public_key
csr.subject = OpenSSL::X509::Name.parse(name)
csr.version = 2
csr.sign(key, DEFAULT_SIGNING_DIGEST)
csr
end
def self.sign(ca_key, ca_cert, csr, extensions = NODE_EXTENSIONS)
cert = OpenSSL::X509::Certificate.new
cert.public_key = csr.public_key
cert.subject = csr.subject
cert.issuer = ca_cert.subject
cert.version = 2
cert.serial = rand(2**128)
not_before = just_now
cert.not_before = not_before
cert.not_after = not_before + FIVE_YEARS
ext_factory = extension_factory_for(ca_cert, cert)
extensions.each do |ext|
extension = ext_factory.create_extension(*ext)
cert.add_extension(extension)
end
cert.sign(ca_key, DEFAULT_SIGNING_DIGEST)
cert
end
def self.create_crl_for(ca_cert, ca_key)
crl = OpenSSL::X509::CRL.new
crl.version = 1
crl.issuer = ca_cert.subject
ef = extension_factory_for(ca_cert)
crl.add_extension(
ef.create_extension(["authorityKeyIdentifier", "keyid:always", false]))
crl.add_extension(
OpenSSL::X509::Extension.new("crlNumber", OpenSSL::ASN1::Integer(0)))
not_before = just_now
crl.last_update = not_before
crl.next_update = not_before + FIVE_YEARS
crl.sign(ca_key, DEFAULT_SIGNING_DIGEST)
crl
end
def self.revoke(serial, crl, ca_key)
revoked = OpenSSL::X509::Revoked.new
revoked.serial = serial
revoked.time = Time.now
revoked.add_extension(
OpenSSL::X509::Extension.new("CRLReason",
OpenSSL::ASN1::Enumerated(DEFAULT_REVOCATION_REASON)))
crl.add_revoked(revoked)
extensions = crl.extensions.group_by{|e| e.oid == 'crlNumber' }
crl_number = extensions[true].first
unchanged_exts = extensions[false]
next_crl_number = crl_number.value.to_i + 1
new_crl_number_ext = OpenSSL::X509::Extension.new("crlNumber",
OpenSSL::ASN1::Integer(next_crl_number))
crl.extensions = unchanged_exts + [new_crl_number_ext]
crl.sign(ca_key, DEFAULT_SIGNING_DIGEST)
crl
end
# Creates a self-signed root ca, then signs two node certs, revoking one of them.
# Creates an intermediate CA and one node cert off of it.
# Creates a second intermediate CA and one node cert off if it.
# Creates a leaf CA off of the intermediate CA, then signs two node certs revoking one of them.
# Revokes an intermediate CA.
# Returns the ca bundle, crl chain, and all the node certs
#
# -----
# / \
# / \
# | root +-------------------o------------------o
# \ CA / | |
# \ / | |
# --+-- | |
# | | |
# | | |
# | | |
# | --+-- --+--
# +---------+ | +---------+ / \ / \
# | revoked | | | | /revoked\ / \
# | node +--o---+ node | | int | | int |
# | | | | \ CA / \ CA /
# +---------+ +---------+ \ / \ /
# --+-- --+--
# | |
# | |
# | |
# --+-- |
# / \ +---+-----+
# / \ | |
# | leaf | | node |
# \ CA / | |
# \ / +---------+
# --+--
# |
# |
# +---------+ | +----------+
# | revoked | | | |
# | node +--o--+ node |
# | | | |
# +---------+ +----------+
def self.create_chained_pki
root_key = create_private_key
root_cert = self_signed_ca(root_key, ROOT_CA_NAME)
root_crl = create_crl_for(root_cert, root_key)
unrevoked_root_node_key = create_private_key
unrevoked_root_node_csr = create_csr(unrevoked_root_node_key, "/CN=unrevoked-root-node")
unrevoked_root_node_cert = sign(root_key, root_cert, unrevoked_root_node_csr)
revoked_root_node_key = create_private_key
revoked_root_node_csr = create_csr(revoked_root_node_key, "/CN=revoked-root-node")
revoked_root_node_cert = sign(root_key, root_cert, revoked_root_node_csr)
revoke(revoked_root_node_cert.serial, root_crl, root_key)
revoked_int_key = create_private_key
revoked_int_csr = create_csr(revoked_int_key, REVOKED_INT_CA_NAME)
revoked_int_cert = sign(root_key, root_cert, revoked_int_csr, CA_EXTENSIONS)
revoked_int_crl = create_crl_for(revoked_int_cert, revoked_int_key)
int_key = create_private_key
int_csr = create_csr(int_key, INT_CA_NAME)
int_cert = sign(root_key, root_cert, int_csr, CA_EXTENSIONS)
int_node_key = create_private_key
int_node_csr = create_csr(int_node_key, "/CN=unrevoked-int-node")
int_node_cert = sign(int_key, int_cert, int_node_csr)
unrevoked_int_node_key = create_private_key
unrevoked_int_node_csr = create_csr(unrevoked_int_node_key, "/CN=unrevoked-int-node")
unrevoked_int_node_cert = sign(revoked_int_key, revoked_int_cert, unrevoked_int_node_csr)
leaf_key = create_private_key
leaf_csr = create_csr(leaf_key, LEAF_CA_NAME)
leaf_cert = sign(revoked_int_key, revoked_int_cert, leaf_csr, CA_EXTENSIONS)
leaf_crl = create_crl_for(leaf_cert, leaf_key)
revoke(revoked_int_cert.serial, root_crl, root_key)
unrevoked_leaf_node_key = create_private_key
unrevoked_leaf_node_csr = create_csr(unrevoked_leaf_node_key, "/CN=unrevoked-leaf-node")
unrevoked_leaf_node_cert = sign(leaf_key, leaf_cert, unrevoked_leaf_node_csr)
revoked_leaf_node_key = create_private_key
revoked_leaf_node_csr = create_csr(revoked_leaf_node_key, "/CN=revoked-leaf-node")
revoked_leaf_node_cert = sign(leaf_key, leaf_cert, revoked_leaf_node_csr)
revoke(revoked_leaf_node_cert.serial, leaf_crl, leaf_key)
ca_bundle = bundle(root_cert, revoked_int_cert, leaf_cert)
crl_chain = bundle(root_crl, revoked_int_crl, leaf_crl)
{
:root_cert => root_cert,
:int_cert => int_cert,
:int_node_cert => int_node_cert,
:leaf_cert => leaf_cert,
:revoked_root_node_cert => revoked_root_node_cert,
:revoked_int_cert => revoked_int_cert,
:revoked_leaf_node_cert => revoked_leaf_node_cert,
:unrevoked_root_node_cert => unrevoked_root_node_cert,
:unrevoked_int_node_cert => unrevoked_int_node_cert,
:unrevoked_leaf_node_cert => unrevoked_leaf_node_cert,
:ca_bundle => ca_bundle,
:crl_chain => crl_chain,
}
end
private
def self.just_now
Time.now - 1
end
def self.extension_factory_for(ca, cert = nil)
ef = OpenSSL::X509::ExtensionFactory.new
ef.issuer_certificate = ca
ef.subject_certificate = cert if cert
ef
end
def self.bundle(*items)
items.map {|i| EXPLANATORY_TEXT + i.to_pem }.join("\n")
end
end
end
| 37.166038 | 99 | 0.554066 |
5db45b283af9eaaafe962f601c5351d625d83fbf | 4,150 | # frozen_string_literal: true
require 'active_support/concern'
module BetterRecord
module ModelConcerns
module HasProtectedPassword
extend ActiveSupport::Concern
module ClassMethods
def has_protected_password(
password_field: :password,
password_validator: nil,
confirm: true,
**opts
)
# == Constants ============================================================
og_dup_arr = []
if (
self.const_defined?(:NON_DUPABLE_KEYS) &&
(
self.const_get(:NON_DUPABLE_KEYS).is_a?(Array) ||
self.const_belongs_to_parent?(:NON_DUPABLE_KEYS)
)
)
og_dup_arr = [*self.const_get(:NON_DUPABLE_KEYS)]
self.__send__ :remove_const, :NON_DUPABLE_KEYS unless self.const_belongs_to_parent?(:NON_DUPABLE_KEYS)
end
unless self.const_defined?(:NON_DUPABLE_KEYS)
self.__send__ :const_set, :NON_DUPABLE_KEYS, Set[]
end
self::NON_DUPABLE_KEYS.merge(%I[
#{password_field}
new_#{password_field}
new_#{password_field}_confirmation
clear_#{password_field}
])
# == Attributes ===========================================================
attribute :"new_#{password_field}", :text
attribute :"new_#{password_field}_confirmation", :text
attribute :"clear_#{password_field}", :text
# == Extensions ===========================================================
# == Relationships ========================================================
# == Validations ==========================================================
validate :"new_#{password_field}", :"require_#{password_field}_confirmation", if: :"new_#{password_field}?"
if password_validator
validate password_validator if :"new_#{password_field}?"
end
# == Scopes ===============================================================
# == Callbacks ============================================================
# == Boolean Class Methods ================================================
# == Class Methods ========================================================
# == Boolean Methods ======================================================
# == Instance Methods =====================================================
define_method password_field do
self[password_field]
end
private password_field
define_method :"#{password_field}=" do |value|
write_attribute password_field, value
end
private :"#{password_field}="
define_method :"clear_#{password_field}=" do |value|
if value && (value.to_sym == :clear)
__send__ :"#{password_field}=", (self.persisted? ? 'CLEAR_EXISTING_PASSWORD_FOR_ROW' : nil)
__send__ :"new_#{password_field}=", nil
__send__ :"new_#{password_field}_confirmation=", nil
end
true
end
if confirm
define_method :"require_#{password_field}_confirmation" do
tmp_new_pwd = __send__ :"new_#{password_field}"
tmp_new_confirmation = __send__ :"new_#{password_field}_confirmation"
if tmp_new_pwd.present?
if tmp_new_pwd != tmp_new_confirmation
errors.add(:"new_#{password_field}", 'does not match confirmation')
else
self.__send__ :"#{password_field}=", tmp_new_pwd
end
end
end
else
define_method :"require_#{password_field}_confirmation" do
tmp_new_pwd = __send__ :"new_#{password_field}"
if tmp_new_pwd.present?
self.__send__ :"#{password_field}=", tmp_new_pwd
end
end
end
private :"require_#{password_field}_confirmation"
end
end
end
end
end
| 35.169492 | 117 | 0.476145 |
f8ca31470baa08278fb94d9fa7e8595fcb67a46b | 596 | require "sinatra/base"
class FakeSlackApi < Sinatra::Base
cattr_accessor :failure
post "/api/chat.postMessage" do
if failure
json_response 200, "message_post_failure.json"
else
json_response 200, "message_posted.json"
end
end
post "/api/users.list" do
json_response 200, "users_list.json"
end
post "/api/im.open" do
json_response 200, "im_open.json"
end
private
def json_response(response_code, file_name)
content_type :json
status response_code
File.open(File.dirname(__FILE__) + '/fixtures/' + file_name, 'rb').read
end
end
| 19.866667 | 75 | 0.697987 |
87d18305d703ddd2a764df60b57d6c2631f09aca | 1,023 | class MicropostsController < ApplicationController
before_action :logged_in_user, only: [:create, :destroy]
before_action :correct_user, only: [:destroy]
def create
@micropost = current_user.microposts.build(micropost_params)
@micropost.image.attach(params[:micropost][:image])
if @micropost.save
flash[:success] = "Micropost created!"
redirect_to root_url
else
@feed_items = current_user.feed.paginate(page: params[:page])
render 'static_pages/home'
end
end
def destroy
@micropost.destroy
flash[:success] = "Micropost deleted"
redirect_back(fallback_location: root_url) # redirect_to request.referrer || root_url
end
private
def micropost_params
params.require(:micropost).permit(:content, :image)
end
def correct_user
@micropost = current_user.microposts.find_by(id: params[:id])
redirect_to root_url if @micropost.nil?
end
end
| 30.088235 | 93 | 0.653959 |
031176b6c2809b7d9f0df48409c9e4b40737ca0b | 170 | # frozen_string_literal: true
FactoryGirl.define do
factory :book, class: Book do
id { 1 }
title { "神探伽利略" }
author { "東野圭吾" }
genre { "推理" }
end
end
| 17 | 31 | 0.6 |
ab59d8f7a66b53cfa2eaa01563fe34d267185479 | 239 | # frozen_string_literal: true
require "test_helper"
class TestHexletCode < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::HexletCode::VERSION
end
def test_it_does_something_useful
assert false
end
end
| 17.071429 | 39 | 0.790795 |
1a4617dd3ed6ee2c56486fdbe1a2a3d192e9a999 | 1,411 | require 'securerandom'
module SPARQL; module Algebra
class Operator
##
# The SPARQL `uuid` function.
#
# [121] BuiltInCall ::= ... | 'UUID' NIL
#
# @example SPARQL Grammar
# PREFIX : <http://example.org/>
# PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
# ASK {
# BIND(UUID() AS ?u1)
# BIND(UUID() AS ?u2)
# FILTER(?u1 != ?u2)
# }
#
# @example SSE
# (prefix
# ((: <http://example.org/>) (xsd: <http://www.w3.org/2001/XMLSchema#>))
# (ask
# (filter (!= ?u1 ?u2)
# (extend ((?u1 (uuid)) (?u2 (uuid)))
# (bgp)))))
#
# @see https://www.w3.org/TR/sparql11-query/#func-uuid
class UUID < Operator::Nullary
include Evaluatable
NAME = :uuid
##
# Return a fresh IRI from the UUID URN scheme. Each call of UUID() returns a different UUID. It must not be the "nil" UUID (all zeroes). The variant and version of the UUID is implementation dependent.
#
# @return [RDF::URI]
def apply(**options)
RDF::URI("urn:uuid:#{SecureRandom.uuid}")
end
##
#
# Returns a partial SPARQL grammar for this operator.
#
# @return [String]
def to_sparql(**options)
"UUID(" + operands.to_sparql(delimiter: ', ', **options) + ")"
end
end # UUID
end # Operator
end; end # SPARQL::Algebra
| 27.134615 | 207 | 0.535082 |
386ef68af97a1034156c85b016ffd9a4ea5a25c7 | 208 | # frozen_string_literal: true
require 'open3'
class RTesseract
module Text
def self.run(source, errors, options)
RTesseract::Command.new(source, 'stdout', errors, options).run
end
end
end
| 17.333333 | 68 | 0.711538 |
5dff7527c87acb9dd7317be2862db13d99356d5f | 1,341 | require_relative 'edgecase'
class AboutNil < EdgeCase::Koan
def test_nil_is_an_object
assert nil.is_a?(Object), "Unlike NULL in other languages"
end
def test_you_dont_get_null_pointer_errors_when_calling_methods_on_nil
#
# What is the Exception that is thrown when you call a method that
# does not exist?
#
# Hint: launch irb and try the code in the block below.
#
# Don't be confused by the code below yet. It's using blocks
# which are explained later on in about_blocks.rb. For now,
# think about it like running nil.some_method_nil_doesnt_know_about
# in a sandbox and catching the error class into the exception
# variable.
#
exception = assert_raise('NoMethodError (undefined method `to_sym\' for nil:NilClass)') do
nil.to_sym
end
#
# What is the error message itself? What substring or pattern could
# you test against in order to have a good idea what the string is?
#
assert_match /[a-zA-Z]+' for nil:NilClass/, exception.message
end
def test_nil_has_a_few_methods_defined_on_it
assert_equal true, nil.nil?
assert_equal '', nil.to_s
assert_equal "nil", nil.inspect
# THINK ABOUT IT:
#
# Is it better to use
# obj.nil?
# or
# obj == nil
# Why?
end
end
| 28.531915 | 94 | 0.66965 |
ffc20ee1074867a9ca4e098fd46e687b39d42fa6 | 678 | require 'pathname'
module Search
def self.files(filter, dir, callback, block)
dir = Pathname.new dir unless dir.instance_of? Pathname
total = 0
dir.each_child do |item|
next if item == '..' or item == '.'
if item.directory?
quantity = files filter, item, callback, block
total += quantity
else
if item.to_s =~ filter
total += 1
block.call item, strings_count( item )
end
end
end
callback.call(dir, total) if total > 0
total
end
private
def self.strings_count(path)
quantity = 0
File.open(path) { |x| x.each_line { quantity += 1 } }
quantity
end
end
| 18.833333 | 59 | 0.587021 |
1a0e09ac445529a49c3f7ab96fb0c78218be72cb | 972 | Почему веки часто становятся раздраженными и красными?
Офтальмолог, Татьяна Левашова, отвечает:
Наши глаза являются экранами, которые защищают бейсбол от многих маленьких частиц, которые располагаются на них.
Когда забываем регулярно чистить наши глаза, часто мы сами способствуем развитию блефарита.
Решение просто:
не разрубить глаза с грязными руками и носить специальную ткань с вами для ваших глаз;
По крайней мере 1-2 раза в день, чистите края лида от вредных веществ, которые накопились на них в течение дня.
Полезно дополнять эти процедуры очистки с теплыми компрессами, которые содержат инфузию лекарственных лекарственных трав (sage, camomile), которые выделяют толстые выделения из морских желез, за которыми следует массаж краев краев с использованием отпечатков пальцев в круговых движениях;
при удалении макияжа, не используйте продукты, содержащие алкоголь.
Они высыхают и обезвоживают кожу и способствуют дальнейшему увеличению выделений из морских желез.
| 88.363636 | 288 | 0.837449 |
e97c0c50e5bc043655f7b29dcc37aa2a99c77701 | 798 | # frozen_string_literal: true
Gem::Specification.new do |spec|
spec.name = "minima"
spec.version = "2.5.0"
spec.authors = ["Joel Glovier"]
spec.email = ["[email protected]"]
spec.summary = "A beautiful, minimal theme for Jekyll."
spec.homepage = "https://github.com/jekyll/minima"
spec.license = "MIT"
spec.metadata["plugin_type"] = "theme"
spec.files = `git ls-files -z`.split("\x0").select do |f|
f.match(%r!^(assets|_(includes|layouts|sass)/|(LICENSE|README)((\.(txt|md|markdown)|$)))!i)
end
spec.add_runtime_dependency "jekyll", ">= 3.5", "< 5.0"
spec.add_runtime_dependency "jekyll-seo-tag", "~> 2.1"
spec.add_runtime_dependency "jekyll-coffeescript", "~> 2.0"
spec.add_development_dependency "bundler"
end
| 31.92 | 95 | 0.637845 |
ab2bd1d3cf6116c24b76a5f201c8adfe31350824 | 704 | require "aws-sdk-s3"
require "brutalismbot/version"
require "brutalismbot/reddit/resource"
module Brutalismbot
module Reddit
class Client
attr_reader :endpoint, :user_agent
def initialize(endpoint:nil, user_agent:nil)
@endpoint = endpoint || ENV["REDDIT_ENDPOINT"] || "https://www.reddit.com/r/brutalism"
@user_agent = user_agent || ENV["REDDIT_USER_AGENT"] || "Brutalismbot v#{Brutalismbot::VERSION}"
end
def list(resource, **options)
url = File.join(@endpoint, "#{resource}.json")
qry = URI.encode_www_form(options)
uri = "#{url}?#{qry}"
Resource.new(uri: uri, user_agent: @user_agent)
end
end
end
end
| 28.16 | 104 | 0.647727 |
1ac0e123ff1bd593dee14d7bf24461eccefb8b61 | 416 | class GpgSignature < ActiveRecord::Base
include ShaAttribute
sha_attribute :commit_sha
sha_attribute :gpg_key_primary_keyid
belongs_to :project
belongs_to :gpg_key
validates :commit_sha, presence: true
validates :project_id, presence: true
validates :gpg_key_primary_keyid, presence: true
def gpg_key_primary_keyid
super&.upcase
end
def commit
project.commit(commit_sha)
end
end
| 18.909091 | 50 | 0.776442 |
e9b021bfb2360e89ad118daf3c24de24a560500c | 324 | cask "font-irish-grover" do
version :latest
sha256 :no_check
url "https://github.com/google/fonts/raw/master/apache/irishgrover/IrishGrover-Regular.ttf",
verified: "github.com/google/fonts/"
name "Irish Grover"
homepage "https://fonts.google.com/specimen/Irish+Grover"
font "IrishGrover-Regular.ttf"
end
| 27 | 94 | 0.740741 |
eddd3c3da16d33210a0e36bafd8ddaf7fe94145e | 1,991 | module ListingIndexViewUtils
ListingItem = Struct.new(
:id,
:url,
:title,
:category_id,
:latitude,
:longitude,
:distance,
:distance_unit,
:author,
:description,
:listing_images,
:price,
:unit_tr_key,
:unit_type,
:quantity,
:shape_name_tr_key,
:listing_shape_id,
:icon_name)
Author = Struct.new(
:id,
:username,
:first_name,
:last_name,
:organization_name,
:is_organization,
:avatar,
:is_deleted,
:num_of_reviews)
ListingImage = Struct.new(
:thumb,
:small_3x2)
module_function
def to_struct(result:, includes:, per_page:, page:)
listings = result[:listings].map { |l|
author =
if includes.include?(:author)
Author.new(
l[:author][:id],
l[:author][:username],
l[:author][:first_name],
l[:author][:last_name],
l[:author][:organization_name],
l[:author][:is_organization],
ListingImage.new(
l[:author][:avatar][:thumb]
),
l[:author][:is_deleted],
l[:author][:num_of_reviews]
)
end
listing_images =
if includes.include?(:listing_images)
l[:listing_images].map { |li|
ListingImage.new(li[:thumb], li[:small_3x2])
}
else
[]
end
ListingItem.new(
l[:id],
l[:url],
l[:title],
l[:category_id],
l[:latitude],
l[:longitude],
l[:distance],
l[:distance_unit],
author,
l[:description],
listing_images,
l[:price],
l[:unit_tr_key],
l[:unit_type],
l[:quantity],
l[:shape_name_tr_key],
l[:listing_shape_id],
l[:icon_name]
)
}
paginated = WillPaginate::Collection.create(page, per_page, result[:count]) do |pager|
pager.replace(listings)
end
end
end
| 20.957895 | 90 | 0.523857 |
1c85e9a4ce5935d81c31426749bb0d74655ce62d | 157 | Before("@mediatailor") do
@service = Aws::MediaTailor::Resource.new
@client = @service.client
end
After("@mediatailor") do
# shared cleanup logic
end
| 17.444444 | 43 | 0.713376 |
611b181ebba331be3b47feebfc3c2058f1a3c56e | 1,531 | require 'test_helper'
class TagsControllerTest < ActionController::TestCase
fixtures :all
def test_should_autocomplete_tags
get :auto_complete_for_tag_name, :format => 'json'
assert_response :success
end
def test_should_show_tag
get :show, :id => tags(:general).name
assert_response :success
assert assigns(:photos).include?(photos(:library_pic))
end
def test_should_show_special_character_tag
get :show, :id => tags(:special_characters).name
assert_response :success
end
def test_should_show_tag_with_type
%w(posts users clippings photos).each do |type|
item = type.classify.constantize.first
item.tag_list = "general"
item.save!
get :show, :id => tags(:general).name, :type => 'type'
assert_response :success
end
end
def test_should_fail_to_show_tag
get :show, :id => 'tag_that_does_not_exist'
assert_redirected_to :action => :index
end
def test_should_get_index
get :index
assert_response :success
end
def test_should_show_matching_items_for_multiple_tags
posts(:apt_post).tag_list = "#{tags(:general).name},#{tags(:extra).name}"
posts(:apt_post).save
posts(:apt2_post).tag_list = "#{tags(:general).name},#{tags(:extra).name},#{tags(:misc).name}"
posts(:apt2_post).save
get :show, :id => 'General,Extra'
assert_response :success
assert_equal 2, assigns(:posts).size
assert assigns(:posts).include?(posts(:apt_post))
assert assigns(:posts).include?(posts(:apt2_post))
end
end
| 24.693548 | 98 | 0.707381 |
08839f290cf4fc2e8162732e770f57cbfa6f2a00 | 2,552 | require_dependency 'distributed_mutex'
class EmailLog < ActiveRecord::Base
self.ignored_columns = %w{
topic_id
reply_key
skipped
skipped_reason
}
CRITICAL_EMAIL_TYPES ||= Set.new %w{
account_created
admin_login
confirm_new_email
confirm_old_email
forgot_password
notify_old_email
signup
signup_after_approval
}
belongs_to :user
belongs_to :post
has_one :topic, through: :post
validates :email_type, :to_address, presence: true
scope :bounced, -> { where(bounced: true) }
after_create do
# Update last_emailed_at if the user_id is present and email was sent
User.where(id: user_id).update_all("last_emailed_at = CURRENT_TIMESTAMP") if user_id.present?
end
def self.unique_email_per_post(post, user)
return yield unless post && user
DistributedMutex.synchronize("email_log_#{post.id}_#{user.id}") do
if where(post_id: post.id, user_id: user.id).exists?
nil
else
yield
end
end
end
def self.reached_max_emails?(user, email_type = nil)
return false if SiteSetting.max_emails_per_day_per_user == 0 || CRITICAL_EMAIL_TYPES.include?(email_type)
count = where('created_at > ?', 1.day.ago)
.where(user_id: user.id)
.count
count >= SiteSetting.max_emails_per_day_per_user
end
def self.count_per_day(start_date, end_date)
where("created_at BETWEEN ? AND ?", start_date, end_date)
.group("DATE(created_at)")
.order("DATE(created_at)")
.count
end
def self.for(reply_key)
self.find_by(reply_key: reply_key)
end
def self.last_sent_email_address
self.where(email_type: "signup")
.order(created_at: :desc)
.limit(1)
.pluck(:to_address)
.first
end
def bounce_key
super&.delete('-')
end
end
# == Schema Information
#
# Table name: email_logs
#
# id :integer not null, primary key
# to_address :string not null
# email_type :string not null
# user_id :integer
# created_at :datetime not null
# updated_at :datetime not null
# post_id :integer
# bounce_key :uuid
# bounced :boolean default(FALSE), not null
# message_id :string
#
# Indexes
#
# index_email_logs_on_created_at (created_at)
# index_email_logs_on_message_id (message_id)
# index_email_logs_on_post_id (post_id)
# index_email_logs_on_user_id (user_id)
# index_email_logs_on_user_id_and_created_at (user_id,created_at)
#
| 24.304762 | 109 | 0.670455 |
b9cfc6057a04c9495685a006d6e0a3980c4d5efe | 12,753 | # Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# config.secret_key = '0d875c7c9e6bfd3b8915c257eb8ebb6fafa484dd8588b122ed73e22f680992ca87af05b4c98b17d4f6e72adff0c0ef448412ed779f2a4b5b5cdb891019379aea'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [ :email ]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [ :email ]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [ :email ]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 10. If
# using other encryptors, it sets how many times you want the password re-encrypted.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# encryptor), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 10
# Setup a pepper to generate the encrypted password.
# config.pepper = '6885342a6392f7a7595190c28c7b0e757bf17e76dc00e99bc058ce2cb5041a24547abaa7deac97c3f54f5963267637478f45e209f7e5aaa69cf0f0103e47b012'
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day. Default is 0.days, meaning
# the user cannot access the website without confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [ :email ]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 8..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
# config.email_regexp = /\A[^@]+@[^@]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# If true, expires auth token on session timeout.
# config.expire_auth_token_on_timeout = false
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [ :email ]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [ :email ]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# ==> Configuration for :encryptable
# Allow you to use another encryption algorithm besides bcrypt (default). You can use
# :sha1, :sha512 or encryptors from others authentication tools as :clearance_sha1,
# :authlogic_sha512 (then you should set stretches above to 20 for default behavior)
# and :restful_authentication_sha1 (then you should set stretches to 10, and copy
# REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using omniauth, Devise cannot automatically set Omniauth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
end
| 49.05 | 154 | 0.750333 |
08139be88c3b211aa4ba03dddabd24b47a4fed5a | 1,635 | require 'rexml/document'
include KNX4R
###################################################################################
# Create Roles, Users, OrgUnits and Accesses in the database #
###################################################################################
connection = ActiveRecord::Base.connection()
connection.execute("TRUNCATE TABLE roles;")
connection.execute("TRUNCATE TABLE users;")
connection.execute("TRUNCATE TABLE users_roles;")
connection.execute("TRUNCATE TABLE org_units;")
connection.execute("TRUNCATE TABLE accesses;")
# User username:string password:string password_confirmation:string role:int language:string
users_list = [
[ "admin_example", "123456", "123456", :admin, "de" ],
[ "editor_example", "123456", "123456", :editor, "de" ],
[ "observer_example", "123456", "123456", :observer, "en" ]
]
users_list.each do |username, password, password_confirmation, role, language|
user = User.create!( username: username, password: password, password_confirmation: password_confirmation, language: language )
# Add role for rolify
user.add_role role
end
# OrgUnit name:string
prj = KNXproject.load(Rails.root.join('config', 'knx_config.xml').to_s)
prj.org_units.each do |key, value|
OrgUnit.create!( name: value, key: key )
end
# Access user_id:int org_unit_id:int
number_of_org_units = OrgUnit.count
accesses_list = []
(1..4).each do |user_id|
(1..number_of_org_units).each do |org_unit_id|
accesses_list << [user_id, org_unit_id]
end
end
accesses_list.each do |user_id, org_unit_id|
Access.create!( user_id: user_id, org_unit_id: org_unit_id )
end
| 36.333333 | 129 | 0.66055 |
bb6e48f1bfd5170c4bb5ecae147931d256635b1e | 291 | class CreateShiftTypes < ActiveRecord::Migration[4.2]
def self.up
create_table :shift_types do |t|
t.string :name
t.integer :primary_requirement
t.integer :secondary_requirement
t.timestamps
end
end
def self.down
drop_table :shift_types
end
end
| 18.1875 | 53 | 0.690722 |
264ba78a85225ada3de7d1a16c574b4069f00ff5 | 359 | #!/usr/bin/ruby
require_relative 'config'
require_relative 'bearing/bear'
require_relative 'bearing/cmd'
require_relative 'bearing/ipc'
action, *args = ARGV
ipc = Bearing::IPC.new
Bearing::Cmd.validate_action_argument!(action)
Bearing::Cmd.start_app
Bearing::Bear.call_api(action: action, args: args, call_id: ipc.call_id)
puts ipc.wait_for_incoming_data
| 22.4375 | 72 | 0.793872 |
edc826d13c4296bc2e0ce2c2167f5c2c5dd74eb2 | 479 | # frozen_string_literal: true
RSpec.describe Bigcommerce::Sku do
before(:each) { @sku = Bigcommerce::Sku }
let(:params) { 1 }
describe '.count' do
it 'should hit the correct path' do
expect(@sku).to receive(:get).with('products/1/skus/count', {})
@sku.count params
end
end
describe '.count_all' do
it 'should hit the correct path' do
expect(@sku).to receive(:get).with('products/skus/count', {})
@sku.count_all
end
end
end
| 21.772727 | 69 | 0.636743 |
b9d988cb1d2ab614106b76db630fa3a8c0d863f4 | 4,887 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_06_01
module Models
#
# Properties of an application rule.
#
class AzureFirewallApplicationRule
include MsRestAzure
# @return [String] Name of the application rule.
attr_accessor :name
# @return [String] Description of the rule.
attr_accessor :description
# @return [Array<String>] List of source IP addresses for this rule.
attr_accessor :source_addresses
# @return [Array<AzureFirewallApplicationRuleProtocol>] Array of
# ApplicationRuleProtocols.
attr_accessor :protocols
# @return [Array<String>] List of FQDNs for this rule.
attr_accessor :target_fqdns
# @return [Array<String>] List of FQDN Tags for this rule.
attr_accessor :fqdn_tags
# @return [Array<String>] List of source IpGroups for this rule.
attr_accessor :source_ip_groups
#
# Mapper for AzureFirewallApplicationRule class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'AzureFirewallApplicationRule',
type: {
name: 'Composite',
class_name: 'AzureFirewallApplicationRule',
model_properties: {
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
},
description: {
client_side_validation: true,
required: false,
serialized_name: 'description',
type: {
name: 'String'
}
},
source_addresses: {
client_side_validation: true,
required: false,
serialized_name: 'sourceAddresses',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
protocols: {
client_side_validation: true,
required: false,
serialized_name: 'protocols',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'AzureFirewallApplicationRuleProtocolElementType',
type: {
name: 'Composite',
class_name: 'AzureFirewallApplicationRuleProtocol'
}
}
}
},
target_fqdns: {
client_side_validation: true,
required: false,
serialized_name: 'targetFqdns',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
fqdn_tags: {
client_side_validation: true,
required: false,
serialized_name: 'fqdnTags',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
source_ip_groups: {
client_side_validation: true,
required: false,
serialized_name: 'sourceIpGroups',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
}
end
end
end
end
| 31.529032 | 89 | 0.448742 |
e8bea52ddfe0b9a084665af35eccba5fe66a3f87 | 3,004 | require 'nokogiri'
module Canvas
module Workflow
module Filters
module InlineColorFilter
CSS = {
vb: {
c: 'color: #008000', # Comment
k: 'color: #0000ff', # Keyword
ch: 'color: #008000', # Comment.Hashbang
cm: 'color: #008000', # Comment.Multiline
cp: 'color: #0000ff', # Comment.Preproc
cpf: 'color: #008000', # Comment.PreprocFile
c1: 'color: #008000', # Comment.Single
cs: 'color: #008000', # Comment.Special
ge: 'font-style: italic', # Generic.Emph
gh: 'font-weight: bold', # Generic.Heading
gp: 'font-weight: bold', # Generic.Prompt
gs: 'font-weight: bold', # Generic.Strong
gu: 'font-weight: bold', # Generic.Subheading
kc: 'color: #0000ff', # Keyword.Constant
kd: 'color: #0000ff', # Keyword.Declaration
kn: 'color: #0000ff', # Keyword.Namespace
kp: 'color: #0000ff', # Keyword.Pseudo
kr: 'color: #0000ff', # Keyword.Reserved
kt: 'color: #2b91af', # Keyword.Type
s: 'color: #a31515', # Literal.String
nc: 'color: #2b91af', # Name.Class
ow: 'color: #0000ff', # Operator.Word
sa: 'color: #a31515', # Literal.String.Affix
sb: 'color: #a31515', # Literal.String.Backtick
sc: 'color: #a31515', # Literal.String.Char
dl: 'color: #a31515', # Literal.String.Delimiter
sd: 'color: #a31515', # Literal.String.Doc
s2: 'color: #a31515', # Literal.String.Double
se: 'color: #a31515', # Literal.String.Escape
sh: 'color: #a31515', # Literal.String.Heredoc
si: 'color: #a31515', # Literal.String.Interpol
sx: 'color: #a31515', # Literal.String.Other
sr: 'color: #a31515', # Literal.String.Regex
s1: 'color: #a31515', # Literal.String.Single
ss: 'color: #a31515' # Literal.String.Symbol
}
}.freeze
def inline_color(raw, lang = 'vb')
doc = Nokogiri::HTML.fragment(raw.encode('UTF-8', :invalid => :replace, :undef => :replace, :replace => ''))
inline = CSS[lang.to_sym]
# convert class to inline color
doc.search("div.language-#{lang} div.highlight span").each do |span|
span['style'] = inline[span['class'].to_sym]
end
doc.inner_html
end
end
end
end
end
Liquid::Template.register_filter(Canvas::Workflow::Filters::InlineColorFilter)
| 46.215385 | 118 | 0.470373 |
4ada1a862de9dbcb0140e0d290de15ba5adfa3e8 | 889 | cask 'now' do
version '4.0.16'
sha256 'cabf8faf7493f8a0711ceee1cf0b528b9893ef57db0d5d25fc38a2b4ca503eaa'
# github.com/zeit/now-desktop was verified as official when first introduced to the cask
url "https://github.com/zeit/now-desktop/releases/download/#{version}/Now-#{version}-mac.zip"
appcast 'https://github.com/zeit/now-desktop/releases.atom'
name 'Now'
homepage 'https://zeit.co/now'
app 'Now.app'
uninstall delete: '/usr/local/bin/now',
login_item: 'Now',
signal: ['TERM', 'co.zeit.now']
zap trash: [
'~/.now.json',
'~/Library/Application Support/Now',
'~/Library/Caches/co.zeit.now',
'~/Library/Caches/co.zeit.now.ShipIt',
'~/Library/Preferences/co.zeit.now.plist',
'~/Library/Preferences/co.zeit.now.helper.plist',
]
end
| 34.192308 | 95 | 0.611924 |
bb7f87b40c5682429f67f1ee4d0066cdd21bc4f9 | 177 | class AddDatapackageResourceToDatasources < ActiveRecord::Migration
def change
add_reference :datasources, :datapackage_resource, index: true, foreign_key: true
end
end
| 29.5 | 85 | 0.813559 |
38a3c0d803897c512707f7ffe2e39c1dbf731b35 | 1,123 | class Classifier
def initialize(company)
@raw_data = company
@company_categories = %w[id name website_url category contact_email mailing_list irc_channel tag_line precis
description tags primary_open_source_license image_url image_bg_color gplus_url
twitter_url blog_url application_instructions topic_tags technology_tags proposal_tags
ideas_list contact_method program_year]
end
def dictionary(keywords)
'Please enter an array' unless keywords.is_a? Array
hash = {}
keywords.each do |keyword|
hash[keyword.to_sym] = extractor(keyword)
end
hash
end
private
def extractor(category)
return 'Please enter a string' unless category.is_a? String
index = @company_categories.index(category)
keyword = "\"#{@company_categories[index]}\":"
keyword_index = @raw_data.index(keyword) + keyword.length
next_keyword = "\"#{@company_categories[index + 1]}\":"
next_keyword_index = @raw_data.index(next_keyword) - 2
@raw_data[keyword_index..next_keyword_index]
end
end
| 34.030303 | 115 | 0.694568 |
acc7c1c26525c3179d42ac02f11a522b37b05365 | 3,443 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20170731211237) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "board_memberships", force: :cascade do |t|
t.integer "member_id"
t.integer "board_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["board_id"], name: "index_board_memberships_on_board_id"
t.index ["member_id"], name: "index_board_memberships_on_member_id"
end
create_table "boards", force: :cascade do |t|
t.integer "company_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["company_id"], name: "index_boards_on_company_id"
end
create_table "companies", force: :cascade do |t|
t.string "name"
t.string "website"
t.float "market_cap"
t.string "ticker_symbol"
t.integer "exchange_id"
t.integer "parent_company_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["exchange_id"], name: "index_companies_on_exchange_id"
t.index ["parent_company_id"], name: "index_companies_on_parent_company_id"
end
create_table "exchanges", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "executives", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "prices", force: :cascade do |t|
t.integer "company_id"
t.float "open"
t.float "high"
t.float "low"
t.float "close"
t.date "price_date"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["company_id"], name: "index_prices_on_company_id"
end
create_table "users", force: :cascade do |t|
t.string "username"
t.string "password"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "watch_list_items", force: :cascade do |t|
t.integer "company_id"
t.float "cost_basis"
t.float "return_basis"
t.date "start_date"
t.date "end_date"
t.integer "watch_list_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["company_id"], name: "index_watch_list_items_on_company_id"
t.index ["watch_list_id"], name: "index_watch_list_items_on_watch_list_id"
end
create_table "watch_lists", force: :cascade do |t|
t.string "name"
t.integer "user_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["user_id"], name: "index_watch_lists_on_user_id"
end
end
| 34.43 | 86 | 0.713622 |
217a2206bb35740bcaaefb3dd1d68900aea72c20 | 399 | require('spec_helper')
describe Venue do
it do
should have_and_belong_to_many(:bands)
join_table('bands_venues')
end
it ('returns a capitalized name') do
new_venue = Venue.create({name: "brooklyn bowl"})
expect(new_venue.name).to(eq("Brooklyn Bowl"))
end
it ('validates the presence of a name') do
expect(Venue.create({name: ""}).valid?).to(eq(false))
end
end
| 22.166667 | 57 | 0.674185 |
08f27676bf4de96fdc1fd3477736dfd0432a17f3 | 1,451 | require "spec_helper"
describe Array do
context "#all_empty?" do
it "returns true if all the elements of the Array are empty" do
expect(["", "", ""].all_empty?).to be true
end
it "returns false if some of the elements of the Array are not empty" do
expect([1, "", Object.new, "", :a].all_empty?).to be false
end
it "returns true for an empty Array" do
expect([].all_empty?).to be true
end
end
context "#all_same?" do
it "returns true if all the elements of the Array are the same" do
expect(["A", "A", "A"].all_same?).to be true
end
it "returns false if some of the elements of the Array are not the same" do
expect([1, Object.new, 1].all_same?).to be false
end
it "returns true for an empty Array" do
expect([].all_same?).to be true
end
end
context "#any_empty?" do
it "returns true if any element of the Array is empty" do
expect(["", 1, Object.new].any_empty?).to be true
end
it "returns false if any element of the Array is not empty" do
expect(["A", :a, 1].any_empty?).to be false
end
end
context "#none_empty?" do
it "returns true if no element of the Array is empty" do
expect(["A", 1, Object.new].none_empty?).to be true
end
it "returns false if any element of the Array is empty" do
expect(["", :a, 1].none_empty?).to be false
end
end
end
| 27.903846 | 79 | 0.611992 |
0132dcae66763f2c68b50f430ec2c048dc9b741d | 122 | name 'codedeploy-agent'
recipe 'codedeploy-agent::default', 'Fetches, installs, and starts the AWS CodeDeploy host agent'
| 40.666667 | 97 | 0.786885 |
2800df390e9e5017908da8eb4c1c2a6efa41f835 | 1,702 | module Vagrant
# This represents the state of a given machine. This is a very basic
# class that simply stores a short and long description of the state
# of a machine.
#
# The state also stores a state "id" which can be used as a unique
# identifier for a state. This should be a symbol. This allows internal
# code to compare state such as ":not_created" instead of using
# string comparison.
#
# The short description should be a single word description of the
# state of the machine such as "running" or "not created".
#
# The long description can span multiple lines describing what the
# state actually means.
class MachineState
# This is a special ID that can be set for the state ID that
# tells Vagrant that the machine is not created. If this is the
# case, then Vagrant will set the ID to nil which will automatically
# clean out the machine data directory.
NOT_CREATED_ID = :not_created
# Unique ID for this state.
#
# @return [Symbol]
attr_reader :id
# Short description for this state.
#
# @return [String]
attr_reader :short_description
# Long description for this state.
#
# @return [String]
attr_reader :long_description
# Creates a new instance to represent the state of a machine.
#
# @param [Symbol] id Unique identifier for this state.
# @param [String] short Short (preferably one-word) description of
# the state.
# @param [String] long Long description (can span multiple lines)
# of the state.
def initialize(id, short, long)
@id = id
@short_description = short
@long_description = long
end
end
end
| 32.730769 | 73 | 0.679788 |
01f6bdbcbf871b5526171a7a1b5cc802a247c6f7 | 109 | 5.times do
Channel.create({
# title: Faker::Book.title,
# body: Faker::Lorem.sentence
})
end
| 15.571429 | 35 | 0.59633 |
6a51b316dd1eda23c589c3290596038f82b3fc16 | 13,194 | require 'spec_helper'
require 'ddtrace'
require 'mongo'
RSpec.describe 'Mongo::Client instrumentation' do
let(:tracer) { get_test_tracer }
let(:client) { Mongo::Client.new(["#{host}:#{port}"], client_options) }
let(:client_options) { { database: database } }
let(:host) { ENV.fetch('TEST_MONGODB_HOST', '127.0.0.1') }
let(:port) { ENV.fetch('TEST_MONGODB_PORT', 27017) }
let(:database) { 'test' }
let(:collection) { :artists }
let(:pin) { Datadog::Pin.get_from(client) }
let(:spans) { tracer.writer.spans(:keep) }
let(:span) { spans.first }
def discard_spans!
tracer.writer.spans
end
before(:each) do
# Disable Mongo logging
Mongo::Logger.logger.level = ::Logger::WARN
Datadog.configure do |c|
c.use :mongo
end
# Have to manually update this because its still
# using global pin instead of configuration.
# Remove this when we remove the pin.
pin.tracer = tracer
end
# Clear data between tests
let(:drop_database?) { true }
after(:each) do
client.database.drop if drop_database?
end
it 'evaluates the block given to the constructor' do
expect { |b| Mongo::Client.new(["#{host}:#{port}"], client_options, &b) }.to yield_control
end
context 'pin' do
it 'has the correct attributes' do
expect(pin.service).to eq('mongodb')
expect(pin.app).to eq('mongodb')
expect(pin.app_type).to eq('db')
end
context 'when the service is changed' do
let(:service) { 'mongodb-primary' }
before(:each) { pin.service = service }
it 'produces spans with the correct service' do
client[collection].insert_one(name: 'FKA Twigs')
expect(spans).to have(1).items
expect(spans.first.service).to eq(service)
end
end
context 'when the tracer is disabled' do
before(:each) { pin.tracer.enabled = false }
it 'produces spans with the correct service' do
client[collection].insert_one(name: 'FKA Twigs')
expect(spans).to be_empty
end
end
end
# rubocop:disable Metrics/LineLength
describe 'tracing' do
shared_examples_for 'a MongoDB trace' do
it 'has basic properties' do
expect(spans).to have(1).items
expect(span.service).to eq(pin.service)
expect(span.span_type).to eq('mongodb')
expect(span.get_tag('mongodb.db')).to eq(database)
expect(span.get_tag('mongodb.collection')).to eq(collection.to_s)
expect(span.get_tag('out.host')).to eq(host)
expect(span.get_tag('out.port')).to eq(port.to_s)
end
end
describe '#insert_one operation' do
before(:each) { client[collection].insert_one(params) }
context 'for a basic document' do
let(:params) { { name: 'FKA Twigs' } }
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>:insert, \"database\"=>\"#{database}\", \"collection\"=>\"#{collection}\", \"documents\"=>[{:name=>\"?\"}], \"ordered\"=>\"?\"}")
expect(span.get_tag('mongodb.rows')).to eq('1')
end
end
context 'for a document with an array' do
let(:params) { { name: 'Steve', hobbies: ['hiking', 'tennis', 'fly fishing'] } }
let(:collection) { :people }
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>:insert, \"database\"=>\"#{database}\", \"collection\"=>\"#{collection}\", \"documents\"=>[{:name=>\"?\", :hobbies=>[\"?\"]}], \"ordered\"=>\"?\"}")
expect(span.get_tag('mongodb.rows')).to eq('1')
end
end
end
describe '#insert_many operation' do
before(:each) { client[collection].insert_many(params) }
context 'for documents with arrays' do
let(:params) do
[
{ name: 'Steve', hobbies: ['hiking', 'tennis', 'fly fishing'] },
{ name: 'Sally', hobbies: ['skiing', 'stamp collecting'] }
]
end
let(:collection) { :people }
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>:insert, \"database\"=>\"#{database}\", \"collection\"=>\"#{collection}\", \"documents\"=>[{:name=>\"?\", :hobbies=>[\"?\"]}, \"?\"], \"ordered\"=>\"?\"}")
expect(span.get_tag('mongodb.rows')).to eq('2')
end
end
end
describe '#find_all operation' do
let(:collection) { :people }
before(:each) do
# Insert a document
client[collection].insert_one(name: 'Steve', hobbies: ['hiking', 'tennis', 'fly fishing'])
discard_spans!
# Do #find_all operation
client[collection].find.each do |document|
# => Yields a BSON::Document.
end
end
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>\"find\", \"database\"=>\"#{database}\", \"collection\"=>\"#{collection}\", \"filter\"=>{}}")
expect(span.get_tag('mongodb.rows')).to be nil
end
end
describe '#find operation' do
let(:collection) { :people }
before(:each) do
# Insert a document
client[collection].insert_one(name: 'Steve', hobbies: ['hiking'])
discard_spans!
# Do #find operation
result = client[collection].find(name: 'Steve').first[:hobbies]
expect(result).to eq(['hiking'])
end
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>\"find\", \"database\"=>\"#{database}\", \"collection\"=>\"#{collection}\", \"filter\"=>{\"name\"=>\"?\"}}")
expect(span.get_tag('mongodb.rows')).to be nil
end
end
describe '#update_one operation' do
let(:collection) { :people }
before(:each) do
# Insert a document
client[collection].insert_one(name: 'Sally', hobbies: ['skiing', 'stamp collecting'])
discard_spans!
# Do #update_one operation
client[collection].update_one({ name: 'Sally' }, '$set' => { 'phone_number' => '555-555-5555' })
end
after(:each) do
# Verify correctness of the operation
expect(client[collection].find(name: 'Sally').first[:phone_number]).to eq('555-555-5555')
end
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>:update, \"database\"=>\"#{database}\", \"collection\"=>\"#{collection}\", \"updates\"=>[{\"q\"=>{\"name\"=>\"?\"}, \"u\"=>{\"$set\"=>{\"phone_number\"=>\"?\"}}, \"multi\"=>\"?\", \"upsert\"=>\"?\"}], \"ordered\"=>\"?\"}")
expect(span.get_tag('mongodb.rows')).to eq('1')
end
end
describe '#update_many operation' do
let(:collection) { :people }
let(:documents) do
[
{ name: 'Steve', hobbies: ['hiking', 'tennis', 'fly fishing'] },
{ name: 'Sally', hobbies: ['skiing', 'stamp collecting'] }
]
end
before(:each) do
# Insert documents
client[collection].insert_many(documents)
discard_spans!
# Do #update_many operation
client[collection].update_many({}, '$set' => { 'phone_number' => '555-555-5555' })
end
after(:each) do
# Verify correctness of the operation
documents.each do |d|
expect(client[collection].find(name: d[:name]).first[:phone_number]).to eq('555-555-5555')
end
end
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>:update, \"database\"=>\"#{database}\", \"collection\"=>\"#{collection}\", \"updates\"=>[{\"q\"=>{}, \"u\"=>{\"$set\"=>{\"phone_number\"=>\"?\"}}, \"multi\"=>\"?\", \"upsert\"=>\"?\"}], \"ordered\"=>\"?\"}")
expect(span.get_tag('mongodb.rows')).to eq('2')
end
end
describe '#delete_one operation' do
let(:collection) { :people }
before(:each) do
# Insert a document
client[collection].insert_one(name: 'Sally', hobbies: ['skiing', 'stamp collecting'])
discard_spans!
# Do #delete_one operation
client[collection].delete_one(name: 'Sally')
end
after(:each) do
# Verify correctness of the operation
expect(client[collection].find(name: 'Sally').count).to eq(0)
end
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>:delete, \"database\"=>\"#{database}\", \"collection\"=>\"#{collection}\", \"deletes\"=>[{\"q\"=>{\"name\"=>\"?\"}, \"limit\"=>\"?\"}], \"ordered\"=>\"?\"}")
expect(span.get_tag('mongodb.rows')).to eq('1')
end
end
describe '#delete_many operation' do
let(:collection) { :people }
let(:documents) do
[
{ name: 'Steve', hobbies: ['hiking', 'tennis', 'fly fishing'] },
{ name: 'Sally', hobbies: ['skiing', 'stamp collecting'] }
]
end
before(:each) do
# Insert documents
client[collection].insert_many(documents)
discard_spans!
# Do #delete_many operation
client[collection].delete_many(name: /$S*/)
end
after(:each) do
# Verify correctness of the operation
documents.each do |d|
expect(client[collection].find(name: d[:name]).count).to eq(0)
end
end
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>:delete, \"database\"=>\"#{database}\", \"collection\"=>\"#{collection}\", \"deletes\"=>[{\"q\"=>{\"name\"=>\"?\"}, \"limit\"=>\"?\"}], \"ordered\"=>\"?\"}")
expect(span.get_tag('mongodb.rows')).to eq('2')
end
end
describe '#drop operation' do
let(:collection) { 1 } # Because drop operation doesn't have a collection
before(:each) { client.database.drop }
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>:dropDatabase, \"database\"=>\"#{database}\", \"collection\"=>1}")
expect(span.get_tag('mongodb.rows')).to be nil
end
end
describe 'a failed query' do
before(:each) { client[:artists].drop }
it_behaves_like 'a MongoDB trace'
it 'has operation-specific properties' do
expect(span.resource).to eq("{\"operation\"=>:drop, \"database\"=>\"#{database}\", \"collection\"=>\"#{collection}\"}")
expect(span.get_tag('mongodb.rows')).to be nil
expect(span.status).to eq(1)
expect(span.get_tag('error.msg')).to eq('ns not found (26)')
end
context 'that triggers #failed before #started' do
subject(:failed_event) { subscriber.failed(event) }
let(:event) { instance_double(Mongo::Monitoring::Event::CommandFailed, request_id: double('request_id')) }
let(:subscriber) { Datadog::Contrib::MongoDB::MongoCommandSubscriber.new }
# Clear the thread variable out, as if #started has never run.
before(:each) { Thread.current[:datadog_mongo_span] = nil }
it { expect { failed_event }.to_not raise_error }
end
end
describe 'with LDAP/SASL authentication' do
let(:client_options) do
super().merge(auth_mech: :plain)
end
context 'which fails' do
let(:insert_span) { spans.first }
let(:auth_span) { spans.last }
let(:drop_database?) { false }
before(:each) do
begin
# Insert a document
client[collection].insert_one(name: 'Steve', hobbies: ['hiking'])
rescue Mongo::Auth::Unauthorized
# Expect this to create an unauthorized error
nil
end
end
it 'produces spans for command and authentication' do
# With LDAP/SASL, Mongo will run a "saslStart" command
# after the original command starts but before it finishes.
# Thus we should expect it to create an authentication span
# that is a child of the original command span.
expect(spans).to have(2).items
expect(insert_span.name).to eq('mongo.cmd')
expect(insert_span.resource).to match(/"operation"\s*=>\s*:insert/)
expect(insert_span.status).to eq(1)
expect(insert_span.get_tag('error.type')).to eq('Mongo::Monitoring::Event::CommandFailed')
expect(insert_span.get_tag('error.msg')).to eq('User is not authorized to access test.')
expect(auth_span.name).to eq('mongo.cmd')
expect(auth_span.resource).to match(/"operation"\s*=>\s*:saslStart/)
expect(auth_span.status).to eq(1)
expect(auth_span.get_tag('error.type')).to eq('Mongo::Monitoring::Event::CommandFailed')
expect(auth_span.get_tag('error.msg')).to eq('Unsupported mechanism PLAIN (2)')
end
end
end
end
end
| 34.904762 | 275 | 0.58898 |
280fded9493b862ba456c0d78adfe1d7e7082c51 | 1,314 | # frozen_string_literal: true
# Copyright 2016-2019 New Context, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Kitchen
module Terraform
module CommandFlag
# LockTimeout is the class of objects which control the duration in which to retry the acquisition of the state
# lock.
class LockTimeout
# #initialize prepares a new instance of the class.
#
# @param duration [Integer] the duration in seconds.
# @return [Kitchen::Terraform::CommandFlag::LockTimeout]
def initialize(duration:)
self.duration = duration
end
# @return [String] the backend configuration flag.
def to_s
"-lock-timeout=#{duration}s"
end
private
attr_accessor :duration
end
end
end
end
| 30.55814 | 117 | 0.687976 |
ff345bf7a368d104a2cc49ff54a413c1db394547 | 71,671 | # frozen_string_literal: true
# -*- coding: utf-8 -*-
#--
# Copyright 2006 by Chad Fowler, Rich Kilmer, Jim Weirich and others.
# All rights reserved.
# See LICENSE.txt for permissions.
#++
require 'rubygems/deprecate'
require 'rubygems/basic_specification'
require 'rubygems/stub_specification'
require 'rubygems/specification_policy'
require 'rubygems/util/list'
##
# The Specification class contains the information for a gem. Typically
# defined in a .gemspec file or a Rakefile, and looks like this:
#
# Gem::Specification.new do |s|
# s.name = 'example'
# s.version = '0.1.0'
# s.licenses = ['MIT']
# s.summary = "This is an example!"
# s.description = "Much longer explanation of the example!"
# s.authors = ["Ruby Coder"]
# s.email = '[email protected]'
# s.files = ["lib/example.rb"]
# s.homepage = 'https://rubygems.org/gems/example'
# s.metadata = { "source_code_uri" => "https://github.com/example/example" }
# end
#
# Starting in RubyGems 2.0, a Specification can hold arbitrary
# metadata. See #metadata for restrictions on the format and size of metadata
# items you may add to a specification.
class Gem::Specification < Gem::BasicSpecification
extend Gem::Deprecate
# REFACTOR: Consider breaking out this version stuff into a separate
# module. There's enough special stuff around it that it may justify
# a separate class.
##
# The version number of a specification that does not specify one
# (i.e. RubyGems 0.7 or earlier).
NONEXISTENT_SPECIFICATION_VERSION = -1
##
# The specification version applied to any new Specification instances
# created. This should be bumped whenever something in the spec format
# changes.
#
# Specification Version History:
#
# spec ruby
# ver ver yyyy-mm-dd description
# -1 <0.8.0 pre-spec-version-history
# 1 0.8.0 2004-08-01 Deprecated "test_suite_file" for "test_files"
# "test_file=x" is a shortcut for "test_files=[x]"
# 2 0.9.5 2007-10-01 Added "required_rubygems_version"
# Now forward-compatible with future versions
# 3 1.3.2 2009-01-03 Added Fixnum validation to specification_version
# 4 1.9.0 2011-06-07 Added metadata
#--
# When updating this number, be sure to also update #to_ruby.
#
# NOTE RubyGems < 1.2 cannot load specification versions > 2.
CURRENT_SPECIFICATION_VERSION = 4 # :nodoc:
##
# An informal list of changes to the specification. The highest-valued
# key should be equal to the CURRENT_SPECIFICATION_VERSION.
SPECIFICATION_VERSION_HISTORY = { # :nodoc:
-1 => ['(RubyGems versions up to and including 0.7 did not have versioned specifications)'],
1 => [
'Deprecated "test_suite_file" in favor of the new, but equivalent, "test_files"',
'"test_file=x" is a shortcut for "test_files=[x]"',
],
2 => [
'Added "required_rubygems_version"',
'Now forward-compatible with future versions',
],
3 => [
'Added Fixnum validation to the specification_version',
],
4 => [
'Added sandboxed freeform metadata to the specification version.',
],
}.freeze
MARSHAL_FIELDS = { # :nodoc:
-1 => 16,
1 => 16,
2 => 16,
3 => 17,
4 => 18,
}.freeze
today = Time.now.utc
TODAY = Time.utc(today.year, today.month, today.day) # :nodoc:
# rubocop:disable Style/MutableConstant
LOAD_CACHE = {} # :nodoc:
# rubocop:enable Style/MutableConstant
LOAD_CACHE_MUTEX = Thread::Mutex.new
private_constant :LOAD_CACHE if defined? private_constant
VALID_NAME_PATTERN = /\A[a-zA-Z0-9\.\-\_]+\z/.freeze # :nodoc:
# :startdoc:
##
# List of attribute names: [:name, :version, ...]
@@required_attributes = [:rubygems_version,
:specification_version,
:name,
:version,
:date,
:summary,
:require_paths]
##
# Map of attribute names to default values.
@@default_value = {
:authors => [],
:autorequire => nil,
:bindir => 'bin',
:cert_chain => [],
:date => nil,
:dependencies => [],
:description => nil,
:email => nil,
:executables => [],
:extensions => [],
:extra_rdoc_files => [],
:files => [],
:homepage => nil,
:licenses => [],
:metadata => {},
:name => nil,
:platform => Gem::Platform::RUBY,
:post_install_message => nil,
:rdoc_options => [],
:require_paths => ['lib'],
:required_ruby_version => Gem::Requirement.default,
:required_rubygems_version => Gem::Requirement.default,
:requirements => [],
:rubygems_version => Gem::VERSION,
:signing_key => nil,
:specification_version => CURRENT_SPECIFICATION_VERSION,
:summary => nil,
:test_files => [],
:version => nil,
}.freeze
# rubocop:disable Style/MutableConstant
INITIALIZE_CODE_FOR_DEFAULTS = { } # :nodoc:
# rubocop:enable Style/MutableConstant
@@default_value.each do |k,v|
INITIALIZE_CODE_FOR_DEFAULTS[k] = case v
when [], {}, true, false, nil, Numeric, Symbol
v.inspect
when String
v.dump
when Numeric
"default_value(:#{k})"
else
"default_value(:#{k}).dup"
end
end
@@attributes = @@default_value.keys.sort_by {|s| s.to_s }
@@array_attributes = @@default_value.reject {|k,v| v != [] }.keys
@@nil_attributes, @@non_nil_attributes = @@default_value.keys.partition do |k|
@@default_value[k].nil?
end
@@stubs = nil
@@stubs_by_name = {}
# Sentinel object to represent "not found" stubs
NOT_FOUND = Struct.new(:to_spec, :this).new # :nodoc:
@@spec_with_requirable_file = {}
@@active_stub_with_requirable_file = {}
# Tracking removed method calls to warn users during build time.
REMOVED_METHODS = [:rubyforge_project=].freeze # :nodoc:
def removed_method_calls
@removed_method_calls ||= []
end
######################################################################
# :section: Required gemspec attributes
##
# This gem's name.
#
# Usage:
#
# spec.name = 'rake'
attr_accessor :name
##
# This gem's version.
#
# The version string can contain numbers and periods, such as +1.0.0+.
# A gem is a 'prerelease' gem if the version has a letter in it, such as
# +1.0.0.pre+.
#
# Usage:
#
# spec.version = '0.4.1'
attr_reader :version
##
# A short summary of this gem's description. Displayed in `gem list -d`.
#
# The #description should be more detailed than the summary.
#
# Usage:
#
# spec.summary = "This is a small summary of my gem"
attr_reader :summary
##
# Files included in this gem. You cannot append to this accessor, you must
# assign to it.
#
# Only add files you can require to this list, not directories, etc.
#
# Directories are automatically stripped from this list when building a gem,
# other non-files cause an error.
#
# Usage:
#
# require 'rake'
# spec.files = FileList['lib/**/*.rb',
# 'bin/*',
# '[A-Z]*'].to_a
#
# # or without Rake...
# spec.files = Dir['lib/**/*.rb'] + Dir['bin/*']
# spec.files += Dir['[A-Z]*']
# spec.files.reject! { |fn| fn.include? "CVS" }
def files
# DO NOT CHANGE TO ||= ! This is not a normal accessor. (yes, it sucks)
# DOC: Why isn't it normal? Why does it suck? How can we fix this?
@files = [@files,
@test_files,
add_bindir(@executables),
@extra_rdoc_files,
@extensions,
].flatten.compact.uniq.sort
end
##
# A list of authors for this gem.
#
# Alternatively, a single author can be specified by assigning a string to
# `spec.author`
#
# Usage:
#
# spec.authors = ['John Jones', 'Mary Smith']
def authors=(value)
@authors = Array(value).flatten.grep(String)
end
######################################################################
# :section: Recommended gemspec attributes
##
# A long description of this gem
#
# The description should be more detailed than the summary but not
# excessively long. A few paragraphs is a recommended length with no
# examples or formatting.
#
# Usage:
#
# spec.description = <<-EOF
# Rake is a Make-like program implemented in Ruby. Tasks and
# dependencies are specified in standard Ruby syntax.
# EOF
attr_reader :description
##
# A contact email address (or addresses) for this gem
#
# Usage:
#
# spec.email = '[email protected]'
# spec.email = ['[email protected]', '[email protected]']
attr_accessor :email
##
# The URL of this gem's home page
#
# Usage:
#
# spec.homepage = 'https://github.com/ruby/rake'
attr_accessor :homepage
##
# The license for this gem.
#
# The license must be no more than 64 characters.
#
# This should just be the name of your license. The full text of the license
# should be inside of the gem (at the top level) when you build it.
#
# The simplest way, is to specify the standard SPDX ID
# https://spdx.org/licenses/ for the license.
# Ideally you should pick one that is OSI (Open Source Initiative)
# http://opensource.org/licenses/alphabetical approved.
#
# The most commonly used OSI approved licenses are MIT and Apache-2.0.
# GitHub also provides a license picker at http://choosealicense.com/.
#
# You should specify a license for your gem so that people know how they are
# permitted to use it, and any restrictions you're placing on it. Not
# specifying a license means all rights are reserved; others have no rights
# to use the code for any purpose.
#
# You can set multiple licenses with #licenses=
#
# Usage:
# spec.license = 'MIT'
def license=(o)
self.licenses = [o]
end
##
# The license(s) for the library.
#
# Each license must be a short name, no more than 64 characters.
#
# This should just be the name of your license. The full
# text of the license should be inside of the gem when you build it.
#
# See #license= for more discussion
#
# Usage:
# spec.licenses = ['MIT', 'GPL-2.0']
def licenses=(licenses)
@licenses = Array licenses
end
##
# The metadata holds extra data for this gem that may be useful to other
# consumers and is settable by gem authors.
#
# Metadata items have the following restrictions:
#
# * The metadata must be a Hash object
# * All keys and values must be Strings
# * Keys can be a maximum of 128 bytes and values can be a maximum of 1024
# bytes
# * All strings must be UTF-8, no binary data is allowed
#
# You can use metadata to specify links to your gem's homepage, codebase,
# documentation, wiki, mailing list, issue tracker and changelog.
#
# s.metadata = {
# "bug_tracker_uri" => "https://example.com/user/bestgemever/issues",
# "changelog_uri" => "https://example.com/user/bestgemever/CHANGELOG.md",
# "documentation_uri" => "https://www.example.info/gems/bestgemever/0.0.1",
# "homepage_uri" => "https://bestgemever.example.io",
# "mailing_list_uri" => "https://groups.example.com/bestgemever",
# "source_code_uri" => "https://example.com/user/bestgemever",
# "wiki_uri" => "https://example.com/user/bestgemever/wiki"
# "funding_uri" => "https://example.com/donate"
# }
#
# These links will be used on your gem's page on rubygems.org and must pass
# validation against following regex.
#
# %r{\Ahttps?:\/\/([^\s:@]+:[^\s:@]*@)?[A-Za-z\d\-]+(\.[A-Za-z\d\-]+)+\.?(:\d{1,5})?([\/?]\S*)?\z}
attr_accessor :metadata
######################################################################
# :section: Optional gemspec attributes
##
# Singular (alternative) writer for #authors
#
# Usage:
#
# spec.author = 'John Jones'
def author=(o)
self.authors = [o]
end
##
# The path in the gem for executable scripts. Usually 'bin'
#
# Usage:
#
# spec.bindir = 'bin'
attr_accessor :bindir
##
# The certificate chain used to sign this gem. See Gem::Security for
# details.
attr_accessor :cert_chain
##
# A message that gets displayed after the gem is installed.
#
# Usage:
#
# spec.post_install_message = "Thanks for installing!"
attr_accessor :post_install_message
##
# The platform this gem runs on.
#
# This is usually Gem::Platform::RUBY or Gem::Platform::CURRENT.
#
# Most gems contain pure Ruby code; they should simply leave the default
# value in place. Some gems contain C (or other) code to be compiled into a
# Ruby "extension". The gem should leave the default value in place unless
# the code will only compile on a certain type of system. Some gems consist
# of pre-compiled code ("binary gems"). It's especially important that they
# set the platform attribute appropriately. A shortcut is to set the
# platform to Gem::Platform::CURRENT, which will cause the gem builder to set
# the platform to the appropriate value for the system on which the build is
# being performed.
#
# If this attribute is set to a non-default value, it will be included in
# the filename of the gem when it is built such as:
# nokogiri-1.6.0-x86-mingw32.gem
#
# Usage:
#
# spec.platform = Gem::Platform.local
def platform=(platform)
if @original_platform.nil? or
@original_platform == Gem::Platform::RUBY
@original_platform = platform
end
case platform
when Gem::Platform::CURRENT then
@new_platform = Gem::Platform.local
@original_platform = @new_platform.to_s
when Gem::Platform then
@new_platform = platform
# legacy constants
when nil, Gem::Platform::RUBY then
@new_platform = Gem::Platform::RUBY
when 'mswin32' then # was Gem::Platform::WIN32
@new_platform = Gem::Platform.new 'x86-mswin32'
when 'i586-linux' then # was Gem::Platform::LINUX_586
@new_platform = Gem::Platform.new 'x86-linux'
when 'powerpc-darwin' then # was Gem::Platform::DARWIN
@new_platform = Gem::Platform.new 'ppc-darwin'
else
@new_platform = Gem::Platform.new platform
end
@platform = @new_platform.to_s
invalidate_memoized_attributes
@new_platform
end
##
# Paths in the gem to add to <code>$LOAD_PATH</code> when this gem is
# activated.
#--
# See also #require_paths
#++
# If you have an extension you do not need to add <code>"ext"</code> to the
# require path, the extension build process will copy the extension files
# into "lib" for you.
#
# The default value is <code>"lib"</code>
#
# Usage:
#
# # If all library files are in the root directory...
# spec.require_paths = ['.']
def require_paths=(val)
@require_paths = Array(val)
end
##
# The version of Ruby required by this gem
attr_reader :required_ruby_version
##
# The RubyGems version required by this gem
attr_reader :required_rubygems_version
##
# The version of RubyGems used to create this gem.
#
# Do not set this, it is set automatically when the gem is packaged.
attr_accessor :rubygems_version
##
# The key used to sign this gem. See Gem::Security for details.
attr_accessor :signing_key
##
# Adds a development dependency named +gem+ with +requirements+ to this
# gem.
#
# Usage:
#
# spec.add_development_dependency 'example', '~> 1.1', '>= 1.1.4'
#
# Development dependencies aren't installed by default and aren't
# activated when a gem is required.
def add_development_dependency(gem, *requirements)
add_dependency_with_type(gem, :development, requirements)
end
##
# Adds a runtime dependency named +gem+ with +requirements+ to this gem.
#
# Usage:
#
# spec.add_runtime_dependency 'example', '~> 1.1', '>= 1.1.4'
def add_runtime_dependency(gem, *requirements)
if requirements.uniq.size != requirements.size
warn "WARNING: duplicated #{gem} dependency #{requirements}"
end
add_dependency_with_type(gem, :runtime, requirements)
end
##
# Executables included in the gem.
#
# For example, the rake gem has rake as an executable. You don’t specify the
# full path (as in bin/rake); all application-style files are expected to be
# found in bindir. These files must be executable Ruby files. Files that
# use bash or other interpreters will not work.
#
# Executables included may only be ruby scripts, not scripts for other
# languages or compiled binaries.
#
# Usage:
#
# spec.executables << 'rake'
def executables
@executables ||= []
end
##
# Extensions to build when installing the gem, specifically the paths to
# extconf.rb-style files used to compile extensions.
#
# These files will be run when the gem is installed, causing the C (or
# whatever) code to be compiled on the user’s machine.
#
# Usage:
#
# spec.extensions << 'ext/rmagic/extconf.rb'
#
# See Gem::Ext::Builder for information about writing extensions for gems.
def extensions
@extensions ||= []
end
##
# Extra files to add to RDoc such as README or doc/examples.txt
#
# When the user elects to generate the RDoc documentation for a gem (typically
# at install time), all the library files are sent to RDoc for processing.
# This option allows you to have some non-code files included for a more
# complete set of documentation.
#
# Usage:
#
# spec.extra_rdoc_files = ['README', 'doc/user-guide.txt']
def extra_rdoc_files
@extra_rdoc_files ||= []
end
##
# The version of RubyGems that installed this gem. Returns
# <code>Gem::Version.new(0)</code> for gems installed by versions earlier
# than RubyGems 2.2.0.
def installed_by_version # :nodoc:
@installed_by_version ||= Gem::Version.new(0)
end
##
# Sets the version of RubyGems that installed this gem. See also
# #installed_by_version.
def installed_by_version=(version) # :nodoc:
@installed_by_version = Gem::Version.new version
end
##
# Specifies the rdoc options to be used when generating API documentation.
#
# Usage:
#
# spec.rdoc_options << '--title' << 'Rake -- Ruby Make' <<
# '--main' << 'README' <<
# '--line-numbers'
def rdoc_options
@rdoc_options ||= []
end
##
# The version of Ruby required by this gem. The ruby version can be
# specified to the patch-level:
#
# $ ruby -v -e 'p Gem.ruby_version'
# ruby 2.0.0p247 (2013-06-27 revision 41674) [x86_64-darwin12.4.0]
# #<Gem::Version "2.0.0.247">
#
# Prereleases can also be specified.
#
# Usage:
#
# # This gem will work with 1.8.6 or greater...
# spec.required_ruby_version = '>= 1.8.6'
#
# # Only with final releases of major version 2 where minor version is at least 3
# spec.required_ruby_version = '~> 2.3'
#
# # Only prereleases or final releases after 2.6.0.preview2
# spec.required_ruby_version = '> 2.6.0.preview2'
#
# # This gem will work with 2.3.0 or greater, including major version 3, but lesser than 4.0.0
# spec.required_ruby_version = '>= 2.3', '< 4'
def required_ruby_version=(req)
@required_ruby_version = Gem::Requirement.create req
end
##
# The RubyGems version required by this gem
def required_rubygems_version=(req)
@required_rubygems_version = Gem::Requirement.create req
end
##
# Lists the external (to RubyGems) requirements that must be met for this gem
# to work. It's simply information for the user.
#
# Usage:
#
# spec.requirements << 'libmagick, v6.0'
# spec.requirements << 'A good graphics card'
def requirements
@requirements ||= []
end
##
# A collection of unit test files. They will be loaded as unit tests when
# the user requests a gem to be unit tested.
#
# Usage:
# spec.test_files = Dir.glob('test/tc_*.rb')
# spec.test_files = ['tests/test-suite.rb']
def test_files=(files) # :nodoc:
@test_files = Array files
end
######################################################################
# :section: Specification internals
##
# True when this gemspec has been activated. This attribute is not persisted.
attr_accessor :activated
alias :activated? :activated
##
# Autorequire was used by old RubyGems to automatically require a file.
#
# Deprecated: It is neither supported nor functional.
attr_accessor :autorequire # :nodoc:
##
# Sets the default executable for this gem.
#
# Deprecated: You must now specify the executable name to Gem.bin_path.
attr_writer :default_executable
rubygems_deprecate :default_executable=
##
# Allows deinstallation of gems with legacy platforms.
attr_writer :original_platform # :nodoc:
##
# The Gem::Specification version of this gemspec.
#
# Do not set this, it is set automatically when the gem is packaged.
attr_accessor :specification_version
def self._all # :nodoc:
unless defined?(@@all) && @@all
@@all = stubs.map(&:to_spec)
# After a reset, make sure already loaded specs
# are still marked as activated.
specs = {}
Gem.loaded_specs.each_value{|s| specs[s] = true }
@@all.each{|s| s.activated = true if specs[s] }
end
@@all
end
def self._clear_load_cache # :nodoc:
LOAD_CACHE_MUTEX.synchronize do
LOAD_CACHE.clear
end
end
def self.each_gemspec(dirs) # :nodoc:
dirs.each do |dir|
Gem::Util.glob_files_in_dir("*.gemspec", dir).each do |path|
yield path.tap(&Gem::UNTAINT)
end
end
end
def self.gemspec_stubs_in(dir, pattern)
Gem::Util.glob_files_in_dir(pattern, dir).map {|path| yield path }.select(&:valid?)
end
private_class_method :gemspec_stubs_in
def self.installed_stubs(dirs, pattern)
map_stubs(dirs, pattern) do |path, base_dir, gems_dir|
Gem::StubSpecification.gemspec_stub(path, base_dir, gems_dir)
end
end
private_class_method :installed_stubs
def self.map_stubs(dirs, pattern) # :nodoc:
dirs.flat_map do |dir|
base_dir = File.dirname dir
gems_dir = File.join base_dir, "gems"
gemspec_stubs_in(dir, pattern) {|path| yield path, base_dir, gems_dir }
end
end
private_class_method :map_stubs
def self.each_spec(dirs) # :nodoc:
each_gemspec(dirs) do |path|
spec = self.load path
yield spec if spec
end
end
##
# Returns a Gem::StubSpecification for every installed gem
def self.stubs
@@stubs ||= begin
pattern = "*.gemspec"
stubs = stubs_for_pattern(pattern, false)
@@stubs_by_name = stubs.select {|s| Gem::Platform.match_spec? s }.group_by(&:name)
stubs
end
end
##
# Returns a Gem::StubSpecification for default gems
def self.default_stubs(pattern = "*.gemspec")
base_dir = Gem.default_dir
gems_dir = File.join base_dir, "gems"
gemspec_stubs_in(Gem.default_specifications_dir, pattern) do |path|
Gem::StubSpecification.default_gemspec_stub(path, base_dir, gems_dir)
end
end
##
# Returns a Gem::StubSpecification for installed gem named +name+
# only returns stubs that match Gem.platforms
def self.stubs_for(name)
if @@stubs
@@stubs_by_name[name] || []
else
@@stubs_by_name[name] ||= stubs_for_pattern("#{name}-*.gemspec").select do |s|
s.name == name
end
end
end
##
# Finds stub specifications matching a pattern from the standard locations,
# optionally filtering out specs not matching the current platform
#
def self.stubs_for_pattern(pattern, match_platform = true) # :nodoc:
installed_stubs = installed_stubs(Gem::Specification.dirs, pattern)
installed_stubs.select! {|s| Gem::Platform.match_spec? s } if match_platform
stubs = installed_stubs + default_stubs(pattern)
stubs = stubs.uniq {|stub| stub.full_name }
_resort!(stubs)
stubs
end
def self._resort!(specs) # :nodoc:
specs.sort! do |a, b|
names = a.name <=> b.name
next names if names.nonzero?
versions = b.version <=> a.version
next versions if versions.nonzero?
b.platform == Gem::Platform::RUBY ? -1 : 1
end
end
##
# Loads the default specifications. It should be called only once.
def self.load_defaults
each_spec([Gem.default_specifications_dir]) do |spec|
# #load returns nil if the spec is bad, so we just ignore
# it at this stage
Gem.register_default_spec(spec)
end
end
##
# Returns all specifications. This method is discouraged from use.
# You probably want to use one of the Enumerable methods instead.
def self.all
warn "NOTE: Specification.all called from #{caller.first}" unless
Gem::Deprecate.skip
_all
end
##
# Sets the known specs to +specs+. Not guaranteed to work for you in
# the future. Use at your own risk. Caveat emptor. Doomy doom doom.
# Etc etc.
#
#--
# Makes +specs+ the known specs
# Listen, time is a river
# Winter comes, code breaks
#
# -- wilsonb
def self.all=(specs)
@@stubs_by_name = specs.group_by(&:name)
@@all = @@stubs = specs
end
##
# Return full names of all specs in sorted order.
def self.all_names
self._all.map(&:full_name)
end
##
# Return the list of all array-oriented instance variables.
#--
# Not sure why we need to use so much stupid reflection in here...
def self.array_attributes
@@array_attributes.dup
end
##
# Return the list of all instance variables.
#--
# Not sure why we need to use so much stupid reflection in here...
def self.attribute_names
@@attributes.dup
end
##
# Return the directories that Specification uses to find specs.
def self.dirs
@@dirs ||= Gem.path.collect do |dir|
File.join dir.dup.tap(&Gem::UNTAINT), "specifications"
end
end
##
# Set the directories that Specification uses to find specs. Setting
# this resets the list of known specs.
def self.dirs=(dirs)
self.reset
@@dirs = Array(dirs).map {|dir| File.join dir, "specifications" }
end
extend Enumerable
##
# Enumerate every known spec. See ::dirs= and ::add_spec to set the list of
# specs.
def self.each
return enum_for(:each) unless block_given?
self._all.each do |x|
yield x
end
end
##
# Returns every spec that matches +name+ and optional +requirements+.
def self.find_all_by_name(name, *requirements)
requirements = Gem::Requirement.default if requirements.empty?
# TODO: maybe try: find_all { |s| spec === dep }
Gem::Dependency.new(name, *requirements).matching_specs
end
##
# Returns every spec that has the given +full_name+
def self.find_all_by_full_name(full_name)
stubs.select {|s| s.full_name == full_name }.map(&:to_spec)
end
##
# Find the best specification matching a +name+ and +requirements+. Raises
# if the dependency doesn't resolve to a valid specification.
def self.find_by_name(name, *requirements)
requirements = Gem::Requirement.default if requirements.empty?
# TODO: maybe try: find { |s| spec === dep }
Gem::Dependency.new(name, *requirements).to_spec
end
##
# Return the best specification that contains the file matching +path+.
def self.find_by_path(path)
path = path.dup.freeze
spec = @@spec_with_requirable_file[path] ||= (stubs.find do |s|
next unless Gem::BundlerVersionFinder.compatible?(s)
s.contains_requirable_file? path
end || NOT_FOUND)
spec.to_spec
end
##
# Return the best specification that contains the file matching +path+
# amongst the specs that are not activated.
def self.find_inactive_by_path(path)
stub = stubs.find do |s|
next if s.activated?
next unless Gem::BundlerVersionFinder.compatible?(s)
s.contains_requirable_file? path
end
stub && stub.to_spec
end
def self.find_active_stub_by_path(path)
stub = @@active_stub_with_requirable_file[path] ||= (stubs.find do |s|
s.activated? and s.contains_requirable_file? path
end || NOT_FOUND)
stub.this
end
##
# Return currently unresolved specs that contain the file matching +path+.
def self.find_in_unresolved(path)
unresolved_specs.find_all {|spec| spec.contains_requirable_file? path }
end
##
# Search through all unresolved deps and sub-dependencies and return
# specs that contain the file matching +path+.
def self.find_in_unresolved_tree(path)
unresolved_specs.each do |spec|
spec.traverse do |from_spec, dep, to_spec, trail|
if to_spec.has_conflicts? || to_spec.conficts_when_loaded_with?(trail)
:next
else
return trail.reverse if to_spec.contains_requirable_file? path
end
end
end
[]
end
def self.unresolved_specs
unresolved_deps.values.map {|dep| dep.to_specs }.flatten
end
private_class_method :unresolved_specs
##
# Special loader for YAML files. When a Specification object is loaded
# from a YAML file, it bypasses the normal Ruby object initialization
# routine (#initialize). This method makes up for that and deals with
# gems of different ages.
#
# +input+ can be anything that YAML.load() accepts: String or IO.
def self.from_yaml(input)
Gem.load_yaml
input = normalize_yaml_input input
spec = Gem::SafeYAML.safe_load input
if spec && spec.class == FalseClass
raise Gem::EndOfYAMLException
end
unless Gem::Specification === spec
raise Gem::Exception, "YAML data doesn't evaluate to gem specification"
end
spec.specification_version ||= NONEXISTENT_SPECIFICATION_VERSION
spec.reset_nil_attributes_to_default
spec
end
##
# Return the latest specs, optionally including prerelease specs if
# +prerelease+ is true.
def self.latest_specs(prerelease = false)
_latest_specs Gem::Specification._all, prerelease
end
##
# Return the latest installed spec for gem +name+.
def self.latest_spec_for(name)
latest_specs(true).find {|installed_spec| installed_spec.name == name }
end
def self._latest_specs(specs, prerelease = false) # :nodoc:
result = {}
specs.reverse_each do |spec|
next if spec.version.prerelease? unless prerelease
result[spec.name] = spec
end
result.map(&:last).flatten.sort_by{|tup| tup.name }
end
##
# Loads Ruby format gemspec from +file+.
def self.load(file)
return unless file
_spec = LOAD_CACHE_MUTEX.synchronize { LOAD_CACHE[file] }
return _spec if _spec
file = file.dup.tap(&Gem::UNTAINT)
return unless File.file?(file)
code = File.read file, :mode => 'r:UTF-8:-'
code.tap(&Gem::UNTAINT)
begin
_spec = eval code, binding, file
if Gem::Specification === _spec
_spec.loaded_from = File.expand_path file.to_s
LOAD_CACHE_MUTEX.synchronize do
prev = LOAD_CACHE[file]
if prev
_spec = prev
else
LOAD_CACHE[file] = _spec
end
end
return _spec
end
warn "[#{file}] isn't a Gem::Specification (#{_spec.class} instead)."
rescue SignalException, SystemExit
raise
rescue SyntaxError, Exception => e
warn "Invalid gemspec in [#{file}]: #{e}"
end
nil
end
##
# Specification attributes that must be non-nil
def self.non_nil_attributes
@@non_nil_attributes.dup
end
##
# Make sure the YAML specification is properly formatted with dashes
def self.normalize_yaml_input(input)
result = input.respond_to?(:read) ? input.read : input
result = "--- " + result unless result.start_with?("--- ")
result = result.dup
result.gsub!(/ !!null \n/, " \n")
# date: 2011-04-26 00:00:00.000000000Z
# date: 2011-04-26 00:00:00.000000000 Z
result.gsub!(/^(date: \d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}\.\d+?)Z/, '\1 Z')
result
end
##
# Return a list of all outdated local gem names. This method is HEAVY
# as it must go fetch specifications from the server.
#
# Use outdated_and_latest_version if you wish to retrieve the latest remote
# version as well.
def self.outdated
outdated_and_latest_version.map {|local, _| local.name }
end
##
# Enumerates the outdated local gems yielding the local specification and
# the latest remote version.
#
# This method may take some time to return as it must check each local gem
# against the server's index.
def self.outdated_and_latest_version
return enum_for __method__ unless block_given?
# TODO: maybe we should switch to rubygems' version service?
fetcher = Gem::SpecFetcher.fetcher
latest_specs(true).each do |local_spec|
dependency =
Gem::Dependency.new local_spec.name, ">= #{local_spec.version}"
remotes, = fetcher.search_for_dependency dependency
remotes = remotes.map {|n, _| n.version }
latest_remote = remotes.sort.last
yield [local_spec, latest_remote] if
latest_remote and local_spec.version < latest_remote
end
nil
end
##
# Is +name+ a required attribute?
def self.required_attribute?(name)
@@required_attributes.include? name.to_sym
end
##
# Required specification attributes
def self.required_attributes
@@required_attributes.dup
end
##
# Reset the list of known specs, running pre and post reset hooks
# registered in Gem.
def self.reset
@@dirs = nil
Gem.pre_reset_hooks.each {|hook| hook.call }
@@all = nil
@@stubs = nil
@@stubs_by_name = {}
@@spec_with_requirable_file = {}
@@active_stub_with_requirable_file = {}
_clear_load_cache
unresolved = unresolved_deps
unless unresolved.empty?
w = "W" + "ARN"
warn "#{w}: Unresolved or ambiguous specs during Gem::Specification.reset:"
unresolved.values.each do |dep|
warn " #{dep}"
versions = find_all_by_name(dep.name)
unless versions.empty?
warn " Available/installed versions of this gem:"
versions.each {|s| warn " - #{s.version}" }
end
end
warn "#{w}: Clearing out unresolved specs. Try 'gem cleanup <gem>'"
warn "Please report a bug if this causes problems."
unresolved.clear
end
Gem.post_reset_hooks.each {|hook| hook.call }
end
# DOC: This method needs documented or nodoc'd
def self.unresolved_deps
@unresolved_deps ||= Hash.new {|h, n| h[n] = Gem::Dependency.new n }
end
##
# Load custom marshal format, re-initializing defaults as needed
def self._load(str)
Gem.load_yaml
array = Marshal.load str
spec = Gem::Specification.new
spec.instance_variable_set :@specification_version, array[1]
current_version = CURRENT_SPECIFICATION_VERSION
field_count = if spec.specification_version > current_version
spec.instance_variable_set :@specification_version,
current_version
MARSHAL_FIELDS[current_version]
else
MARSHAL_FIELDS[spec.specification_version]
end
if array.size < field_count
raise TypeError, "invalid Gem::Specification format #{array.inspect}"
end
# Cleanup any YAML::PrivateType. They only show up for an old bug
# where nil => null, so just convert them to nil based on the type.
array.map! {|e| e.kind_of?(YAML::PrivateType) ? nil : e }
spec.instance_variable_set :@rubygems_version, array[0]
# spec version
spec.instance_variable_set :@name, array[2]
spec.instance_variable_set :@version, array[3]
spec.date = array[4]
spec.instance_variable_set :@summary, array[5]
spec.instance_variable_set :@required_ruby_version, array[6]
spec.instance_variable_set :@required_rubygems_version, array[7]
spec.instance_variable_set :@original_platform, array[8]
spec.instance_variable_set :@dependencies, array[9]
# offset due to rubyforge_project removal
spec.instance_variable_set :@email, array[11]
spec.instance_variable_set :@authors, array[12]
spec.instance_variable_set :@description, array[13]
spec.instance_variable_set :@homepage, array[14]
spec.instance_variable_set :@has_rdoc, array[15]
spec.instance_variable_set :@new_platform, array[16]
spec.instance_variable_set :@platform, array[16].to_s
spec.instance_variable_set :@license, array[17]
spec.instance_variable_set :@metadata, array[18]
spec.instance_variable_set :@loaded, false
spec.instance_variable_set :@activated, false
spec
end
def <=>(other) # :nodoc:
sort_obj <=> other.sort_obj
end
def ==(other) # :nodoc:
self.class === other &&
name == other.name &&
version == other.version &&
platform == other.platform
end
##
# Dump only crucial instance variables.
#--
# MAINTAIN ORDER!
# (down with the man)
def _dump(limit)
Marshal.dump [
@rubygems_version,
@specification_version,
@name,
@version,
date,
@summary,
@required_ruby_version,
@required_rubygems_version,
@original_platform,
@dependencies,
'', # rubyforge_project
@email,
@authors,
@description,
@homepage,
true, # has_rdoc
@new_platform,
@licenses,
@metadata,
]
end
##
# Activate this spec, registering it as a loaded spec and adding
# it's lib paths to $LOAD_PATH. Returns true if the spec was
# activated, false if it was previously activated. Freaks out if
# there are conflicts upon activation.
def activate
other = Gem.loaded_specs[self.name]
if other
check_version_conflict other
return false
end
raise_if_conflicts
activate_dependencies
add_self_to_load_path
Gem.loaded_specs[self.name] = self
@activated = true
@loaded = true
return true
end
##
# Activate all unambiguously resolved runtime dependencies of this
# spec. Add any ambiguous dependencies to the unresolved list to be
# resolved later, as needed.
def activate_dependencies
unresolved = Gem::Specification.unresolved_deps
self.runtime_dependencies.each do |spec_dep|
if loaded = Gem.loaded_specs[spec_dep.name]
next if spec_dep.matches_spec? loaded
msg = "can't satisfy '#{spec_dep}', already activated '#{loaded.full_name}'"
e = Gem::LoadError.new msg
e.name = spec_dep.name
raise e
end
begin
specs = spec_dep.to_specs
rescue Gem::MissingSpecError => e
raise Gem::MissingSpecError.new(e.name, e.requirement, "at: #{self.spec_file}")
end
if specs.size == 1
specs.first.activate
else
name = spec_dep.name
unresolved[name] = unresolved[name].merge spec_dep
end
end
unresolved.delete self.name
end
##
# Abbreviate the spec for downloading. Abbreviated specs are only used for
# searching, downloading and related activities and do not need deployment
# specific information (e.g. list of files). So we abbreviate the spec,
# making it much smaller for quicker downloads.
def abbreviate
self.files = []
self.test_files = []
self.rdoc_options = []
self.extra_rdoc_files = []
self.cert_chain = []
end
##
# Sanitize the descriptive fields in the spec. Sometimes non-ASCII
# characters will garble the site index. Non-ASCII characters will
# be replaced by their XML entity equivalent.
def sanitize
self.summary = sanitize_string(summary)
self.description = sanitize_string(description)
self.post_install_message = sanitize_string(post_install_message)
self.authors = authors.collect {|a| sanitize_string(a) }
end
##
# Sanitize a single string.
def sanitize_string(string)
return string unless string
# HACK the #to_s is in here because RSpec has an Array of Arrays of
# Strings for authors. Need a way to disallow bad values on gemspec
# generation. (Probably won't happen.)
string.to_s
end
##
# Returns an array with bindir attached to each executable in the
# +executables+ list
def add_bindir(executables)
return nil if executables.nil?
if @bindir
Array(executables).map {|e| File.join(@bindir, e) }
else
executables
end
rescue
return nil
end
##
# Adds a dependency on gem +dependency+ with type +type+ that requires
# +requirements+. Valid types are currently <tt>:runtime</tt> and
# <tt>:development</tt>.
def add_dependency_with_type(dependency, type, requirements)
requirements = if requirements.empty?
Gem::Requirement.default
else
requirements.flatten
end
unless dependency.respond_to?(:name) &&
dependency.respond_to?(:requirement)
dependency = Gem::Dependency.new(dependency.to_s, requirements, type)
end
dependencies << dependency
end
private :add_dependency_with_type
alias add_dependency add_runtime_dependency
##
# Adds this spec's require paths to LOAD_PATH, in the proper location.
def add_self_to_load_path
return if default_gem?
paths = full_require_paths
Gem.add_to_load_path(*paths)
end
##
# Singular reader for #authors. Returns the first author in the list
def author
val = authors and val.first
end
##
# The list of author names who wrote this gem.
#
# spec.authors = ['Chad Fowler', 'Jim Weirich', 'Rich Kilmer']
def authors
@authors ||= []
end
##
# Returns the full path to installed gem's bin directory.
#
# NOTE: do not confuse this with +bindir+, which is just 'bin', not
# a full path.
def bin_dir
@bin_dir ||= File.join gem_dir, bindir
end
##
# Returns the full path to an executable named +name+ in this gem.
def bin_file(name)
File.join bin_dir, name
end
##
# Returns the build_args used to install the gem
def build_args
if File.exist? build_info_file
build_info = File.readlines build_info_file
build_info = build_info.map {|x| x.strip }
build_info.delete ""
build_info
else
[]
end
end
##
# Builds extensions for this platform if the gem has extensions listed and
# the gem.build_complete file is missing.
def build_extensions # :nodoc:
return if default_gem?
return if extensions.empty?
return if File.exist? gem_build_complete_path
return if !File.writable?(base_dir)
return if !File.exist?(File.join(base_dir, 'extensions'))
begin
# We need to require things in $LOAD_PATH without looking for the
# extension we are about to build.
unresolved_deps = Gem::Specification.unresolved_deps.dup
Gem::Specification.unresolved_deps.clear
require 'rubygems/config_file'
require 'rubygems/ext'
require 'rubygems/user_interaction'
ui = Gem::SilentUI.new
Gem::DefaultUserInteraction.use_ui ui do
builder = Gem::Ext::Builder.new self
builder.build_extensions
end
ensure
ui.close if ui
Gem::Specification.unresolved_deps.replace unresolved_deps
end
end
##
# Returns the full path to the build info directory
def build_info_dir
File.join base_dir, "build_info"
end
##
# Returns the full path to the file containing the build
# information generated when the gem was installed
def build_info_file
File.join build_info_dir, "#{full_name}.info"
end
##
# Returns the full path to the cache directory containing this
# spec's cached gem.
def cache_dir
@cache_dir ||= File.join base_dir, "cache"
end
##
# Returns the full path to the cached gem for this spec.
def cache_file
@cache_file ||= File.join cache_dir, "#{full_name}.gem"
end
##
# Return any possible conflicts against the currently loaded specs.
def conflicts
conflicts = {}
self.runtime_dependencies.each do |dep|
spec = Gem.loaded_specs[dep.name]
if spec and not spec.satisfies_requirement? dep
(conflicts[spec] ||= []) << dep
end
end
env_req = Gem.env_requirement(name)
(conflicts[self] ||= []) << env_req unless env_req.satisfied_by? version
conflicts
end
##
# return true if there will be conflict when spec if loaded together with the list of specs.
def conficts_when_loaded_with?(list_of_specs) # :nodoc:
result = list_of_specs.any? do |spec|
spec.dependencies.any? {|dep| dep.runtime? && (dep.name == name) && !satisfies_requirement?(dep) }
end
result
end
##
# Return true if there are possible conflicts against the currently loaded specs.
def has_conflicts?
return true unless Gem.env_requirement(name).satisfied_by?(version)
self.dependencies.any? do |dep|
if dep.runtime?
spec = Gem.loaded_specs[dep.name]
spec and not spec.satisfies_requirement? dep
else
false
end
end
end
# The date this gem was created.
#
# If SOURCE_DATE_EPOCH is set as an environment variable, use that to support
# reproducible builds; otherwise, default to the current UTC date.
#
# Details on SOURCE_DATE_EPOCH:
# https://reproducible-builds.org/specs/source-date-epoch/
def date
@date ||= Time.utc(*Gem.source_date_epoch.utc.to_a[3..5].reverse)
end
DateLike = Object.new # :nodoc:
def DateLike.===(obj) # :nodoc:
defined?(::Date) and Date === obj
end
DateTimeFormat = # :nodoc:
/\A
(\d{4})-(\d{2})-(\d{2})
(\s+ \d{2}:\d{2}:\d{2}\.\d+ \s* (Z | [-+]\d\d:\d\d) )?
\Z/x.freeze
##
# The date this gem was created
#
# DO NOT set this, it is set automatically when the gem is packaged.
def date=(date)
# We want to end up with a Time object with one-day resolution.
# This is the cleanest, most-readable, faster-than-using-Date
# way to do it.
@date = case date
when String then
if DateTimeFormat =~ date
Time.utc($1.to_i, $2.to_i, $3.to_i)
else
raise(Gem::InvalidSpecificationException,
"invalid date format in specification: #{date.inspect}")
end
when Time, DateLike then
Time.utc(date.year, date.month, date.day)
else
TODAY
end
end
##
# The default executable for this gem.
#
# Deprecated: The name of the gem is assumed to be the name of the
# executable now. See Gem.bin_path.
def default_executable # :nodoc:
if defined?(@default_executable) and @default_executable
result = @default_executable
elsif @executables and @executables.size == 1
result = Array(@executables).first
else
result = nil
end
result
end
rubygems_deprecate :default_executable
##
# The default value for specification attribute +name+
def default_value(name)
@@default_value[name]
end
##
# A list of Gem::Dependency objects this gem depends on.
#
# Use #add_dependency or #add_development_dependency to add dependencies to
# a gem.
def dependencies
@dependencies ||= []
end
##
# Return a list of all gems that have a dependency on this gemspec. The
# list is structured with entries that conform to:
#
# [depending_gem, dependency, [list_of_gems_that_satisfy_dependency]]
def dependent_gems(check_dev=true)
out = []
Gem::Specification.each do |spec|
deps = check_dev ? spec.dependencies : spec.runtime_dependencies
deps.each do |dep|
if self.satisfies_requirement?(dep)
sats = []
find_all_satisfiers(dep) do |sat|
sats << sat
end
out << [spec, dep, sats]
end
end
end
out
end
##
# Returns all specs that matches this spec's runtime dependencies.
def dependent_specs
runtime_dependencies.map {|dep| dep.to_specs }.flatten
end
##
# A detailed description of this gem. See also #summary
def description=(str)
@description = str.to_s
end
##
# List of dependencies that are used for development
def development_dependencies
dependencies.select {|d| d.type == :development }
end
##
# Returns the full path to this spec's documentation directory. If +type+
# is given it will be appended to the end. For example:
#
# spec.doc_dir # => "/path/to/gem_repo/doc/a-1"
#
# spec.doc_dir 'ri' # => "/path/to/gem_repo/doc/a-1/ri"
def doc_dir(type = nil)
@doc_dir ||= File.join base_dir, 'doc', full_name
if type
File.join @doc_dir, type
else
@doc_dir
end
end
def encode_with(coder) # :nodoc:
mark_version
coder.add 'name', @name
coder.add 'version', @version
platform = case @original_platform
when nil, '' then
'ruby'
when String then
@original_platform
else
@original_platform.to_s
end
coder.add 'platform', platform
attributes = @@attributes.map(&:to_s) - %w[name version platform]
attributes.each do |name|
coder.add name, instance_variable_get("@#{name}")
end
end
def eql?(other) # :nodoc:
self.class === other && same_attributes?(other)
end
##
# Singular accessor for #executables
def executable
val = executables and val.first
end
##
# Singular accessor for #executables
def executable=(o)
self.executables = [o]
end
##
# Sets executables to +value+, ensuring it is an array.
def executables=(value)
@executables = Array(value)
end
##
# Sets extensions to +extensions+, ensuring it is an array.
def extensions=(extensions)
@extensions = Array extensions
end
##
# Sets extra_rdoc_files to +files+, ensuring it is an array.
def extra_rdoc_files=(files)
@extra_rdoc_files = Array files
end
##
# The default (generated) file name of the gem. See also #spec_name.
#
# spec.file_name # => "example-1.0.gem"
def file_name
"#{full_name}.gem"
end
##
# Sets files to +files+, ensuring it is an array.
def files=(files)
@files = Array files
end
##
# Finds all gems that satisfy +dep+
def find_all_satisfiers(dep)
Gem::Specification.each do |spec|
yield spec if spec.satisfies_requirement? dep
end
end
private :find_all_satisfiers
##
# Creates a duplicate spec without large blobs that aren't used at runtime.
def for_cache
spec = dup
spec.files = nil
spec.test_files = nil
spec
end
def full_name
@full_name ||= super
end
##
# Work around bundler removing my methods
def gem_dir # :nodoc:
super
end
def gems_dir
@gems_dir ||= File.join(base_dir, "gems")
end
##
# Deprecated and ignored, defaults to true.
#
# Formerly used to indicate this gem was RDoc-capable.
def has_rdoc # :nodoc:
true
end
rubygems_deprecate :has_rdoc
##
# Deprecated and ignored.
#
# Formerly used to indicate this gem was RDoc-capable.
def has_rdoc=(ignored) # :nodoc:
@has_rdoc = true
end
rubygems_deprecate :has_rdoc=
alias :has_rdoc? :has_rdoc # :nodoc:
rubygems_deprecate :has_rdoc?
##
# True if this gem has files in test_files
def has_unit_tests? # :nodoc:
not test_files.empty?
end
# :stopdoc:
alias has_test_suite? has_unit_tests?
# :startdoc:
def hash # :nodoc:
name.hash ^ version.hash
end
def init_with(coder) # :nodoc:
@installed_by_version ||= nil
yaml_initialize coder.tag, coder.map
end
eval <<-RUBY, binding, __FILE__, __LINE__ + 1
# frozen_string_literal: true
def set_nil_attributes_to_nil
#{@@nil_attributes.map {|key| "@#{key} = nil" }.join "; "}
end
private :set_nil_attributes_to_nil
def set_not_nil_attributes_to_default_values
#{@@non_nil_attributes.map {|key| "@#{key} = #{INITIALIZE_CODE_FOR_DEFAULTS[key]}" }.join ";"}
end
private :set_not_nil_attributes_to_default_values
RUBY
##
# Specification constructor. Assigns the default values to the attributes
# and yields itself for further initialization. Optionally takes +name+ and
# +version+.
def initialize(name = nil, version = nil)
super()
@gems_dir = nil
@base_dir = nil
@loaded = false
@activated = false
@loaded_from = nil
@original_platform = nil
@installed_by_version = nil
set_nil_attributes_to_nil
set_not_nil_attributes_to_default_values
@new_platform = Gem::Platform::RUBY
self.name = name if name
self.version = version if version
if platform = Gem.platforms.last and platform != Gem::Platform::RUBY and platform != Gem::Platform.local
self.platform = platform
end
yield self if block_given?
end
##
# Duplicates array_attributes from +other_spec+ so state isn't shared.
def initialize_copy(other_spec)
self.class.array_attributes.each do |name|
name = :"@#{name}"
next unless other_spec.instance_variable_defined? name
begin
val = other_spec.instance_variable_get(name)
if val
instance_variable_set name, val.dup
elsif Gem.configuration.really_verbose
warn "WARNING: #{full_name} has an invalid nil value for #{name}"
end
rescue TypeError
e = Gem::FormatException.new \
"#{full_name} has an invalid value for #{name}"
e.file_path = loaded_from
raise e
end
end
end
def base_dir
return Gem.dir unless loaded_from
@base_dir ||= if default_gem?
File.dirname File.dirname File.dirname loaded_from
else
File.dirname File.dirname loaded_from
end
end
##
# Expire memoized instance variables that can incorrectly generate, replace
# or miss files due changes in certain attributes used to compute them.
def invalidate_memoized_attributes
@full_name = nil
@cache_file = nil
end
private :invalidate_memoized_attributes
def inspect # :nodoc:
if $DEBUG
super
else
"#{super[0..-2]} #{full_name}>"
end
end
##
# Files in the Gem under one of the require_paths
def lib_files
@files.select do |file|
require_paths.any? do |path|
file.start_with? path
end
end
end
##
# Singular accessor for #licenses
def license
licenses.first
end
##
# Plural accessor for setting licenses
#
# See #license= for details
def licenses
@licenses ||= []
end
def internal_init # :nodoc:
super
@bin_dir = nil
@cache_dir = nil
@cache_file = nil
@doc_dir = nil
@ri_dir = nil
@spec_dir = nil
@spec_file = nil
end
##
# Sets the rubygems_version to the current RubyGems version.
def mark_version
@rubygems_version = Gem::VERSION
end
##
# Track removed method calls to warn about during build time.
# Warn about unknown attributes while loading a spec.
def method_missing(sym, *a, &b) # :nodoc:
if REMOVED_METHODS.include?(sym)
removed_method_calls << sym
return
end
if @specification_version > CURRENT_SPECIFICATION_VERSION and
sym.to_s.end_with?("=")
warn "ignoring #{sym} loading #{full_name}" if $DEBUG
else
super
end
end
##
# Is this specification missing its extensions? When this returns true you
# probably want to build_extensions
def missing_extensions?
return false if default_gem?
return false if extensions.empty?
return false if File.exist? gem_build_complete_path
true
end
##
# Normalize the list of files so that:
# * All file lists have redundancies removed.
# * Files referenced in the extra_rdoc_files are included in the package
# file list.
def normalize
if defined?(@extra_rdoc_files) and @extra_rdoc_files
@extra_rdoc_files.uniq!
@files ||= []
@files.concat(@extra_rdoc_files)
end
@files = @files.uniq if @files
@extensions = @extensions.uniq if @extensions
@test_files = @test_files.uniq if @test_files
@executables = @executables.uniq if @executables
@extra_rdoc_files = @extra_rdoc_files.uniq if @extra_rdoc_files
end
##
# Return a NameTuple that represents this Specification
def name_tuple
Gem::NameTuple.new name, version, original_platform
end
##
# Returns the full name (name-version) of this gemspec using the original
# platform. For use with legacy gems.
def original_name # :nodoc:
if platform == Gem::Platform::RUBY or platform.nil?
"#{@name}-#{@version}"
else
"#{@name}-#{@version}-#{@original_platform}"
end
end
##
# Cruft. Use +platform+.
def original_platform # :nodoc:
@original_platform ||= platform
end
##
# The platform this gem runs on. See Gem::Platform for details.
def platform
@new_platform ||= Gem::Platform::RUBY
end
def pretty_print(q) # :nodoc:
q.group 2, 'Gem::Specification.new do |s|', 'end' do
q.breakable
attributes = @@attributes - [:name, :version]
attributes.unshift :installed_by_version
attributes.unshift :version
attributes.unshift :name
attributes.each do |attr_name|
current_value = self.send attr_name
current_value = current_value.sort if %i[files test_files].include? attr_name
if current_value != default_value(attr_name) or
self.class.required_attribute? attr_name
q.text "s.#{attr_name} = "
if attr_name == :date
current_value = current_value.utc
q.text "Time.utc(#{current_value.year}, #{current_value.month}, #{current_value.day})"
else
q.pp current_value
end
q.breakable
end
end
end
end
##
# Raise an exception if the version of this spec conflicts with the one
# that is already loaded (+other+)
def check_version_conflict(other) # :nodoc:
return if self.version == other.version
# This gem is already loaded. If the currently loaded gem is not in the
# list of candidate gems, then we have a version conflict.
msg = "can't activate #{full_name}, already activated #{other.full_name}"
e = Gem::LoadError.new msg
e.name = self.name
raise e
end
private :check_version_conflict
##
# Check the spec for possible conflicts and freak out if there are any.
def raise_if_conflicts # :nodoc:
if has_conflicts?
raise Gem::ConflictError.new self, conflicts
end
end
##
# Sets rdoc_options to +value+, ensuring it is an array.
def rdoc_options=(options)
@rdoc_options = Array options
end
##
# Singular accessor for #require_paths
def require_path
val = require_paths and val.first
end
##
# Singular accessor for #require_paths
def require_path=(path)
self.require_paths = Array(path)
end
##
# Set requirements to +req+, ensuring it is an array.
def requirements=(req)
@requirements = Array req
end
def respond_to_missing?(m, include_private = false) # :nodoc:
false
end
##
# Returns the full path to this spec's ri directory.
def ri_dir
@ri_dir ||= File.join base_dir, 'ri', full_name
end
##
# Return a string containing a Ruby code representation of the given
# object.
def ruby_code(obj)
case obj
when String then obj.dump + ".freeze"
when Array then '[' + obj.map {|x| ruby_code x }.join(", ") + ']'
when Hash then
seg = obj.keys.sort.map {|k| "#{k.to_s.dump} => #{obj[k].to_s.dump}" }
"{ #{seg.join(', ')} }"
when Gem::Version then obj.to_s.dump
when DateLike then obj.strftime('%Y-%m-%d').dump
when Time then obj.strftime('%Y-%m-%d').dump
when Numeric then obj.inspect
when true, false, nil then obj.inspect
when Gem::Platform then "Gem::Platform.new(#{obj.to_a.inspect})"
when Gem::Requirement then
list = obj.as_list
"Gem::Requirement.new(#{ruby_code(list.size == 1 ? obj.to_s : list)})"
else raise Gem::Exception, "ruby_code case not handled: #{obj.class}"
end
end
private :ruby_code
##
# List of dependencies that will automatically be activated at runtime.
def runtime_dependencies
dependencies.select(&:runtime?)
end
##
# True if this gem has the same attributes as +other+.
def same_attributes?(spec)
@@attributes.all? {|name, default| self.send(name) == spec.send(name) }
end
private :same_attributes?
##
# Checks if this specification meets the requirement of +dependency+.
def satisfies_requirement?(dependency)
return @name == dependency.name &&
dependency.requirement.satisfied_by?(@version)
end
##
# Returns an object you can use to sort specifications in #sort_by.
def sort_obj
[@name, @version, @new_platform == Gem::Platform::RUBY ? -1 : 1]
end
##
# Used by Gem::Resolver to order Gem::Specification objects
def source # :nodoc:
Gem::Source::Installed.new
end
##
# Returns the full path to the directory containing this spec's
# gemspec file. eg: /usr/local/lib/ruby/gems/1.8/specifications
def spec_dir
@spec_dir ||= File.join base_dir, "specifications"
end
##
# Returns the full path to this spec's gemspec file.
# eg: /usr/local/lib/ruby/gems/1.8/specifications/mygem-1.0.gemspec
def spec_file
@spec_file ||= File.join spec_dir, "#{full_name}.gemspec"
end
##
# The default name of the gemspec. See also #file_name
#
# spec.spec_name # => "example-1.0.gemspec"
def spec_name
"#{full_name}.gemspec"
end
##
# A short summary of this gem's description.
def summary=(str)
@summary = str.to_s.strip.
gsub(/(\w-)\n[ \t]*(\w)/, '\1\2').gsub(/\n[ \t]*/, " ") # so. weird.
end
##
# Singular accessor for #test_files
def test_file # :nodoc:
val = test_files and val.first
end
##
# Singular mutator for #test_files
def test_file=(file) # :nodoc:
self.test_files = [file]
end
##
# Test files included in this gem. You cannot append to this accessor, you
# must assign to it.
def test_files # :nodoc:
# Handle the possibility that we have @test_suite_file but not
# @test_files. This will happen when an old gem is loaded via
# YAML.
if defined? @test_suite_file
@test_files = [@test_suite_file].flatten
@test_suite_file = nil
end
if defined?(@test_files) and @test_files
@test_files
else
@test_files = []
end
end
##
# Returns a Ruby code representation of this specification, such that it can
# be eval'ed and reconstruct the same specification later. Attributes that
# still have their default values are omitted.
def to_ruby
mark_version
result = []
result << "# -*- encoding: utf-8 -*-"
result << "#{Gem::StubSpecification::PREFIX}#{name} #{version} #{platform} #{raw_require_paths.join("\0")}"
result << "#{Gem::StubSpecification::PREFIX}#{extensions.join "\0"}" unless
extensions.empty?
result << nil
result << "Gem::Specification.new do |s|"
result << " s.name = #{ruby_code name}"
result << " s.version = #{ruby_code version}"
unless platform.nil? or platform == Gem::Platform::RUBY
result << " s.platform = #{ruby_code original_platform}"
end
result << ""
result << " s.required_rubygems_version = #{ruby_code required_rubygems_version} if s.respond_to? :required_rubygems_version="
if metadata and !metadata.empty?
result << " s.metadata = #{ruby_code metadata} if s.respond_to? :metadata="
end
result << " s.require_paths = #{ruby_code raw_require_paths}"
handled = [
:dependencies,
:name,
:platform,
:require_paths,
:required_rubygems_version,
:specification_version,
:version,
:has_rdoc,
:default_executable,
:metadata,
:signing_key,
]
@@attributes.each do |attr_name|
next if handled.include? attr_name
current_value = self.send(attr_name)
if current_value != default_value(attr_name) || self.class.required_attribute?(attr_name)
result << " s.#{attr_name} = #{ruby_code current_value}"
end
end
if String === signing_key
result << " s.signing_key = #{signing_key.dump}.freeze"
end
if @installed_by_version
result << nil
result << " s.installed_by_version = \"#{Gem::VERSION}\" if s.respond_to? :installed_by_version"
end
unless dependencies.empty?
result << nil
result << " if s.respond_to? :specification_version then"
result << " s.specification_version = #{specification_version}"
result << " end"
result << nil
result << " if s.respond_to? :add_runtime_dependency then"
dependencies.each do |dep|
req = dep.requirements_list.inspect
dep.instance_variable_set :@type, :runtime if dep.type.nil? # HACK
result << " s.add_#{dep.type}_dependency(%q<#{dep.name}>.freeze, #{req})"
end
result << " else"
dependencies.each do |dep|
version_reqs_param = dep.requirements_list.inspect
result << " s.add_dependency(%q<#{dep.name}>.freeze, #{version_reqs_param})"
end
result << " end"
end
result << "end"
result << nil
result.join "\n"
end
##
# Returns a Ruby lighter-weight code representation of this specification,
# used for indexing only.
#
# See #to_ruby.
def to_ruby_for_cache
for_cache.to_ruby
end
def to_s # :nodoc:
"#<Gem::Specification name=#{@name} version=#{@version}>"
end
##
# Returns self
def to_spec
self
end
def to_yaml(opts = {}) # :nodoc:
Gem.load_yaml
# Because the user can switch the YAML engine behind our
# back, we have to check again here to make sure that our
# psych code was properly loaded, and load it if not.
unless Gem.const_defined?(:NoAliasYAMLTree)
require 'rubygems/psych_tree'
end
builder = Gem::NoAliasYAMLTree.create
builder << self
ast = builder.tree
require 'stringio'
io = StringIO.new
io.set_encoding Encoding::UTF_8
Psych::Visitors::Emitter.new(io).accept(ast)
io.string.gsub(/ !!null \n/, " \n")
end
##
# Recursively walk dependencies of this spec, executing the +block+ for each
# hop.
def traverse(trail = [], visited = {}, &block)
trail.push(self)
begin
dependencies.each do |dep|
next unless dep.runtime?
dep.matching_specs(true).each do |dep_spec|
next if visited.has_key?(dep_spec)
visited[dep_spec] = true
trail.push(dep_spec)
begin
result = block[self, dep, dep_spec, trail]
ensure
trail.pop
end
unless result == :next
spec_name = dep_spec.name
dep_spec.traverse(trail, visited, &block) unless
trail.any? {|s| s.name == spec_name }
end
end
end
ensure
trail.pop
end
end
##
# Checks that the specification contains all required fields, and does a
# very basic sanity check.
#
# Raises InvalidSpecificationException if the spec does not pass the
# checks..
def validate(packaging = true, strict = false)
normalize
validation_policy = Gem::SpecificationPolicy.new(self)
validation_policy.packaging = packaging
validation_policy.validate(strict)
end
def keep_only_files_and_directories
@executables.delete_if {|x| File.directory?(File.join(@bindir, x)) }
@extensions.delete_if {|x| File.directory?(x) && !File.symlink?(x) }
@extra_rdoc_files.delete_if {|x| File.directory?(x) && !File.symlink?(x) }
@files.delete_if {|x| File.directory?(x) && !File.symlink?(x) }
@test_files.delete_if {|x| File.directory?(x) && !File.symlink?(x) }
end
def validate_metadata
Gem::SpecificationPolicy.new(self).validate_metadata
end
rubygems_deprecate :validate_metadata
def validate_dependencies
Gem::SpecificationPolicy.new(self).validate_dependencies
end
rubygems_deprecate :validate_dependencies
def validate_permissions
Gem::SpecificationPolicy.new(self).validate_permissions
end
rubygems_deprecate :validate_permissions
##
# Set the version to +version+, potentially also setting
# required_rubygems_version if +version+ indicates it is a
# prerelease.
def version=(version)
@version = Gem::Version.create(version)
# skip to set required_ruby_version when pre-released rubygems.
# It caused to raise CircularDependencyError
if @version.prerelease? && (@name.nil? || @name.strip != "rubygems")
self.required_rubygems_version = '> 1.3.1'
end
invalidate_memoized_attributes
return @version
end
def stubbed?
false
end
def yaml_initialize(tag, vals) # :nodoc:
vals.each do |ivar, val|
case ivar
when "date"
# Force Date to go through the extra coerce logic in date=
self.date = val.tap(&Gem::UNTAINT)
else
instance_variable_set "@#{ivar}", val.tap(&Gem::UNTAINT)
end
end
@original_platform = @platform # for backwards compatibility
self.platform = Gem::Platform.new @platform
end
##
# Reset nil attributes to their default values to make the spec valid
def reset_nil_attributes_to_default
nil_attributes = self.class.non_nil_attributes.find_all do |name|
!instance_variable_defined?("@#{name}") || instance_variable_get("@#{name}").nil?
end
nil_attributes.each do |attribute|
default = self.default_value attribute
value = case default
when Time, Numeric, Symbol, true, false, nil then default
else default.dup
end
instance_variable_set "@#{attribute}", value
end
@installed_by_version ||= nil
end
def raw_require_paths # :nodoc:
@require_paths
end
end
| 26.802917 | 131 | 0.640845 |
f7915c8523e733a9de7215286aac30b82ea9e6d8 | 928 | require 'nokogiri'
require 'rails/generators/testing/behaviour'
require 'generators/camunda/bpmn_classes/bpmn_classes_generator.rb'
describe Camunda::Generators::BpmnClassesGenerator do
include FileUtils
let(:dummy_app_root) { File.expand_path('../dummy', __dir__) }
describe 'runs sample with success' do
before { described_class.start([File.expand_path("spec/bpmn_test_files/sample.bpmn")], destination_root: dummy_app_root) }
after { remove_dir File.expand_path("../dummy/app/bpmn", __dir__) }
it("has the module") { expect(File).to exist(File.join(dummy_app_root, "app/bpmn/camunda_workflow.rb")) }
it("has the class") { expect(File).to exist(File.join(dummy_app_root, "app/bpmn/camunda_workflow/do_something.rb")) }
end
it 'runs sample sample with failure' do
described_class.start([File.expand_path("spec/bpmn_test_files/sample_fail.bpmn")], destination_root: dummy_app_root)
end
end
| 40.347826 | 126 | 0.760776 |
ac5283e6bb477c154b7a4aa4eefac5daee162ad1 | 1,277 | # frozen_string_literal: true
require_relative "lib/dannugjr_view_tool/version"
Gem::Specification.new do |spec|
spec.name = "dannugjr_view_tool"
spec.version = DannugjrViewTool::VERSION
spec.authors = ["Kerbs"]
spec.email = ["[email protected]"]
spec.summary = "Various view specific methods for applications I use."
spec.description = "Provides generated Html data for Rails applications."
spec.homepage = "https://devcamp.com"
spec.license = "MIT"
spec.required_ruby_version = Gem::Requirement.new(">= 2.4.0")
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path(__dir__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{\A(?:test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
# Uncomment to register a new dependency of your gem
# spec.add_dependency "example-gem", "~> 1.0"
# For more information and examples about making a new gem, checkout our
# guide at: https://bundler.io/guides/creating_gem.html
end
| 38.69697 | 88 | 0.677369 |
2632826b14ed43e08fc75a99f5c6365b443a05a1 | 3,587 | # frozen_string_literal: true
require 'bootsnap'
require 'optparse'
require 'fileutils'
module Bootsnap
class CLI
unless Regexp.method_defined?(:match?)
module RegexpMatchBackport
refine Regexp do
def match?(string)
!!match(string)
end
end
end
using RegexpMatchBackport
end
attr_reader :cache_dir, :argv
attr_accessor :compile_gemfile, :exclude
def initialize(argv)
@argv = argv
self.cache_dir = ENV.fetch('BOOTSNAP_CACHE_DIR', 'tmp/cache')
self.compile_gemfile = false
self.exclude = nil
end
def precompile_command(*sources)
require 'bootsnap/compile_cache/iseq'
fix_default_encoding do
Bootsnap::CompileCache::ISeq.cache_dir = self.cache_dir
if compile_gemfile
sources += $LOAD_PATH
end
sources.map { |d| File.expand_path(d) }.each do |path|
if !exclude || !exclude.match?(path)
list_ruby_files(path).each do |ruby_file|
if !exclude || !exclude.match?(ruby_file)
CompileCache::ISeq.fetch(ruby_file, cache_dir: cache_dir)
end
end
end
end
end
0
end
dir_sort = begin
Dir['.', sort: false]
true
rescue ArgumentError, TypeError
false
end
if dir_sort
def list_ruby_files(path)
if File.directory?(path)
Dir[File.join(path, '**/*.rb'), sort: false]
elsif File.exist?(path)
[path]
else
[]
end
end
else
def list_ruby_files(path)
if File.directory?(path)
Dir[File.join(path, '**/*.rb')]
elsif File.exist?(path)
[path]
else
[]
end
end
end
def run
parser.parse!(argv)
command = argv.shift
method = "#{command}_command"
if respond_to?(method)
public_send(method, *argv)
else
invalid_usage!("Unknown command: #{command}")
end
end
private
def fix_default_encoding
if Encoding.default_external == Encoding::US_ASCII
Encoding.default_external = Encoding::UTF_8
begin
yield
ensure
Encoding.default_external = Encoding::US_ASCII
end
else
yield
end
end
def invalid_usage!(message)
STDERR.puts message
STDERR.puts
STDERR.puts parser
1
end
def cache_dir=(dir)
@cache_dir = File.expand_path(File.join(dir, 'bootsnap/compile-cache'))
end
def parser
@parser ||= OptionParser.new do |opts|
opts.banner = "Usage: bootsnap COMMAND [ARGS]"
opts.separator ""
opts.separator "GLOBAL OPTIONS"
opts.separator ""
help = <<~EOS
Path to the bootsnap cache directory. Defaults to tmp/cache
EOS
opts.on('--cache-dir DIR', help.strip) do |dir|
self.cache_dir = dir
end
opts.separator ""
opts.separator "COMMANDS"
opts.separator ""
opts.separator " precompile [DIRECTORIES...]: Precompile all .rb files in the passed directories"
help = <<~EOS
Precompile the gems in Gemfile
EOS
opts.on('--gemfile', help) { self.compile_gemfile = true }
help = <<~EOS
Path pattern to not precompile. e.g. --exclude 'aws-sdk|google-api'
EOS
opts.on('--exclude PATTERN', help) { |pattern| self.exclude = Regexp.new(pattern) }
end
end
end
end
| 23.598684 | 108 | 0.571787 |
4a0f56c650a340439f580a9ab8d67e2e485b86c4 | 543 | require_relative '../../test_helper'
class MachineWithOtherStates < StateMachinesTest
def setup
@klass = Class.new
@machine = StateMachines::Machine.new(@klass, initial: :parked)
@parked, @idling = @machine.other_states(:parked, :idling)
end
def test_should_include_other_states_in_known_states
assert_equal [@parked, @idling], @machine.states.to_a
end
def test_should_use_default_value
assert_equal 'idling', @idling.value
end
def test_should_not_create_matcher
assert_nil @idling.matcher
end
end
| 23.608696 | 67 | 0.74954 |
bb2023bdd33e2a77c08b282b0fd13e970fe6475e | 462 | module TicketNotifier
extend ActiveSupport::Concern
included do
after_create -> { notify_patron_new_ticket }
after_update -> { notify_patron_resolved_status }
end
def notify_patron_new_ticket
TicketMailer.new_ticket(self).deliver_later
end
def notify_patron_resolved_status
if self.saved_change_to_status? && self[:status] == SearchTicket::STATUS_RESOLVED
TicketMailer.ticket_resolved(self).deliver_later
end
end
end
| 22 | 85 | 0.768398 |
79232f86e2be56defe8403a91dfdf18223113a99 | 228 | class User < ActiveRecord::Base
has_many :reviews
has_secure_password
validates :email, presence: true, uniqueness: true
validates :username, presence: true, uniqueness: true
validates :password, presence: true
end | 25.333333 | 55 | 0.763158 |
f8d1529c52964bff95da2bdba5922834609e784d | 758 | # frozen_string_literal: true
module API
module Entities
class PendingMember < Grape::Entity
expose :id
expose :user_name, as: :name, if: -> (_) { user.present? }
expose :user_username, as: :username, if: -> (_) { user.present? }
expose :email
expose :web_url, if: -> (_) { user.present? }
expose :invite?, as: :invited
expose :avatar_url do |_|
user&.avatar_url || GravatarService.new.execute(email)
end
expose :approved do |member|
member.active?
end
def email
object.invite_email || object.user.email
end
def web_url
Gitlab::Routing.url_helpers.user_url(user)
end
def user
object.user
end
end
end
end
| 21.657143 | 72 | 0.58971 |
ac47ae99e0382cd51b490ad6340f2493c255b68f | 4,778 | require 'spec_helper'
describe FbGraph::Connections::Insights do
describe '#insights' do
context 'when included by FbGraph::Page' do
context 'when no access_token given' do
it 'should raise FbGraph::Unauthorized' do
mock_graph :get, 'FbGraph/insights', 'pages/insights/FbGraph_public', :status => [401, 'Unauthorized'] do
lambda do
FbGraph::Page.new('FbGraph').insights
end.should raise_exception(FbGraph::Unauthorized)
end
end
end
context 'when access_token is given' do
it 'should return insights as FbGraph::Insight' do
mock_graph :get, 'FbGraph/insights', 'pages/insights/FbGraph_private', :access_token => 'access_token' do
insights = FbGraph::Page.new('FbGraph').insights(:access_token => 'access_token')
insights.class.should == FbGraph::Connection
insights.first.should == FbGraph::Insight.new(
'117513961602338/insights/page_fan_adds_unique/day',
:access_token => 'access_token',
:name => 'page_fan_adds_unique',
:description => 'Daily New Likes of your Page (Unique Users)',
:period => 'day',
:values => [{
:value => 1,
:end_time => '2010-11-27T08:00:00+0000'
}]
)
insights.each do |insight|
insight.should be_instance_of(FbGraph::Insight)
end
end
end
end
context 'when metrics is given' do
it 'should treat metrics as connection scope' do
mock_graph :get, 'FbGraph/insights/page_like_adds', 'pages/insights/page_like_adds/FbGraph_private', :access_token => 'access_token' do
insights = FbGraph::Page.new('FbGraph').insights(:access_token => 'access_token', :metrics => :page_like_adds)
insights.options.should == {
:connection_scope => 'page_like_adds',
:access_token => 'access_token'
}
insights.first.should == FbGraph::Insight.new(
'117513961602338/insights/page_like_adds/day',
:access_token => 'access_token',
:name => 'page_like_adds',
:description => 'Daily Likes of your Page\'s content (Total Count)',
:period => 'day',
:values => [{
:value => 0,
:end_time => '2010-12-09T08:00:00+0000'
}, {
:value => 0,
:end_time => '2010-12-10T08:00:00+0000'
}, {
:value => 0,
:end_time => '2010-12-11T08:00:00+0000'
}]
)
end
end
it 'should support period also' do
mock_graph :get, 'FbGraph/insights/page_like_adds/day', 'pages/insights/page_like_adds/day/FbGraph_private', :access_token => 'access_token' do
insights = FbGraph::Page.new('FbGraph').insights(:access_token => 'access_token', :metrics => :page_like_adds, :period => :day)
insights.options.should == {
:connection_scope => 'page_like_adds/day',
:access_token => 'access_token'
}
insights.first.should == FbGraph::Insight.new(
'117513961602338/insights/page_like_adds/day',
:access_token => 'access_token',
:name => 'page_like_adds',
:description => 'Daily Likes of your Page\'s content (Total Count)',
:period => 'day',
:values => [{
:value => 1,
:end_time => '2010-12-09T08:00:00+0000'
}, {
:value => 1,
:end_time => '2010-12-10T08:00:00+0000'
}, {
:value => 1,
:end_time => '2010-12-11T08:00:00+0000'
}]
)
end
end
it 'should used for pagination' do
mock_graph :get, 'FbGraph/insights/page_like_adds/day', 'pages/insights/page_like_adds/day/FbGraph_private', :access_token => 'access_token' do
insights = FbGraph::Page.new('FbGraph').insights(:access_token => 'access_token', :metrics => :page_like_adds, :period => :day)
expect { insights.next }.to request_to 'FbGraph/insights/page_like_adds/day?access_token=134145643294322%7C9b95ab3141be69aff9766c90-579612276%7C9UA_-V98QdZDfoX4MSS-DdwTFFk&since=1292065709&until=1292324909'
expect { insights.previous }.to request_to 'FbGraph/insights/page_like_adds/day?access_token=134145643294322%7C9b95ab3141be69aff9766c90-579612276%7C9UA_-V98QdZDfoX4MSS-DdwTFFk&since=1291547309&until=1291806509'
end
end
end
end
end
end
| 45.504762 | 222 | 0.570113 |
39fa383d4f470e83ec2c8076aa4bb22f21017776 | 38,046 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::BatchAI::Mgmt::V2018_05_01
#
# The Azure BatchAI Management API.
#
class Experiments
include MsRestAzure
#
# Creates and initializes a new instance of the Experiments class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [BatchAIManagementClient] reference to the BatchAIManagementClient
attr_reader :client
#
# Gets a list of Experiments within the specified Workspace.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiments_list_by_workspace_options
# [ExperimentsListByWorkspaceOptions] Additional parameters for the operation
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<Experiment>] operation results.
#
def list_by_workspace(resource_group_name, workspace_name, experiments_list_by_workspace_options:nil, custom_headers:nil)
first_page = list_by_workspace_as_lazy(resource_group_name, workspace_name, experiments_list_by_workspace_options:experiments_list_by_workspace_options, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Gets a list of Experiments within the specified Workspace.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiments_list_by_workspace_options
# [ExperimentsListByWorkspaceOptions] Additional parameters for the operation
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_workspace_with_http_info(resource_group_name, workspace_name, experiments_list_by_workspace_options:nil, custom_headers:nil)
list_by_workspace_async(resource_group_name, workspace_name, experiments_list_by_workspace_options:experiments_list_by_workspace_options, custom_headers:custom_headers).value!
end
#
# Gets a list of Experiments within the specified Workspace.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiments_list_by_workspace_options
# [ExperimentsListByWorkspaceOptions] Additional parameters for the operation
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_workspace_async(resource_group_name, workspace_name, experiments_list_by_workspace_options:nil, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'workspace_name is nil' if workspace_name.nil?
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'MaxLength': '64'" if !workspace_name.nil? && workspace_name.length > 64
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'MinLength': '1'" if !workspace_name.nil? && workspace_name.length < 1
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'Pattern': '^[-\w_]+$'" if !workspace_name.nil? && workspace_name.match(Regexp.new('^^[-\w_]+$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
max_results = nil
unless experiments_list_by_workspace_options.nil?
max_results = experiments_list_by_workspace_options.maxResults
end
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'workspaceName' => workspace_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version,'maxresults' => max_results},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::BatchAI::Mgmt::V2018_05_01::Models::ExperimentListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Creates an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Experiment] operation results.
#
def create(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
response = create_async(resource_group_name, workspace_name, experiment_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def create_async(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
# Send request
promise = begin_create_async(resource_group_name, workspace_name, experiment_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::BatchAI::Mgmt::V2018_05_01::Models::Experiment.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Deletes an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def delete(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
response = delete_async(resource_group_name, workspace_name, experiment_name, custom_headers:custom_headers).value!
nil
end
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def delete_async(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
# Send request
promise = begin_delete_async(resource_group_name, workspace_name, experiment_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Gets information about an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Experiment] operation results.
#
def get(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
response = get_async(resource_group_name, workspace_name, experiment_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets information about an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
get_async(resource_group_name, workspace_name, experiment_name, custom_headers:custom_headers).value!
end
#
# Gets information about an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'workspace_name is nil' if workspace_name.nil?
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'MaxLength': '64'" if !workspace_name.nil? && workspace_name.length > 64
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'MinLength': '1'" if !workspace_name.nil? && workspace_name.length < 1
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'Pattern': '^[-\w_]+$'" if !workspace_name.nil? && workspace_name.match(Regexp.new('^^[-\w_]+$$')).nil?
fail ArgumentError, 'experiment_name is nil' if experiment_name.nil?
fail ArgumentError, "'experiment_name' should satisfy the constraint - 'MaxLength': '64'" if !experiment_name.nil? && experiment_name.length > 64
fail ArgumentError, "'experiment_name' should satisfy the constraint - 'MinLength': '1'" if !experiment_name.nil? && experiment_name.length < 1
fail ArgumentError, "'experiment_name' should satisfy the constraint - 'Pattern': '^[-\w_]+$'" if !experiment_name.nil? && experiment_name.match(Regexp.new('^^[-\w_]+$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'workspaceName' => workspace_name,'experimentName' => experiment_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::BatchAI::Mgmt::V2018_05_01::Models::Experiment.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Creates an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Experiment] operation results.
#
def begin_create(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
response = begin_create_async(resource_group_name, workspace_name, experiment_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Creates an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_create_with_http_info(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
begin_create_async(resource_group_name, workspace_name, experiment_name, custom_headers:custom_headers).value!
end
#
# Creates an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_create_async(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'workspace_name is nil' if workspace_name.nil?
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'MaxLength': '64'" if !workspace_name.nil? && workspace_name.length > 64
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'MinLength': '1'" if !workspace_name.nil? && workspace_name.length < 1
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'Pattern': '^[-\w_]+$'" if !workspace_name.nil? && workspace_name.match(Regexp.new('^^[-\w_]+$$')).nil?
fail ArgumentError, 'experiment_name is nil' if experiment_name.nil?
fail ArgumentError, "'experiment_name' should satisfy the constraint - 'MaxLength': '64'" if !experiment_name.nil? && experiment_name.length > 64
fail ArgumentError, "'experiment_name' should satisfy the constraint - 'MinLength': '1'" if !experiment_name.nil? && experiment_name.length < 1
fail ArgumentError, "'experiment_name' should satisfy the constraint - 'Pattern': '^[-\w_]+$'" if !experiment_name.nil? && experiment_name.match(Regexp.new('^^[-\w_]+$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'workspaceName' => workspace_name,'experimentName' => experiment_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 202
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::BatchAI::Mgmt::V2018_05_01::Models::Experiment.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_delete(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
response = begin_delete_async(resource_group_name, workspace_name, experiment_name, custom_headers:custom_headers).value!
nil
end
#
# Deletes an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_delete_with_http_info(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
begin_delete_async(resource_group_name, workspace_name, experiment_name, custom_headers:custom_headers).value!
end
#
# Deletes an Experiment.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiment_name [String] The name of the experiment. Experiment names
# can only contain a combination of alphanumeric characters along with dash (-)
# and underscore (_). The name must be from 1 through 64 characters long.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_delete_async(resource_group_name, workspace_name, experiment_name, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, "'resource_group_name' should satisfy the constraint - 'Pattern': '^[-\w\._]+$'" if !resource_group_name.nil? && resource_group_name.match(Regexp.new('^^[-\w\._]+$$')).nil?
fail ArgumentError, 'workspace_name is nil' if workspace_name.nil?
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'MaxLength': '64'" if !workspace_name.nil? && workspace_name.length > 64
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'MinLength': '1'" if !workspace_name.nil? && workspace_name.length < 1
fail ArgumentError, "'workspace_name' should satisfy the constraint - 'Pattern': '^[-\w_]+$'" if !workspace_name.nil? && workspace_name.match(Regexp.new('^^[-\w_]+$$')).nil?
fail ArgumentError, 'experiment_name is nil' if experiment_name.nil?
fail ArgumentError, "'experiment_name' should satisfy the constraint - 'MaxLength': '64'" if !experiment_name.nil? && experiment_name.length > 64
fail ArgumentError, "'experiment_name' should satisfy the constraint - 'MinLength': '1'" if !experiment_name.nil? && experiment_name.length < 1
fail ArgumentError, "'experiment_name' should satisfy the constraint - 'Pattern': '^[-\w_]+$'" if !experiment_name.nil? && experiment_name.match(Regexp.new('^^[-\w_]+$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.BatchAI/workspaces/{workspaceName}/experiments/{experimentName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'workspaceName' => workspace_name,'experimentName' => experiment_name,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 202 || status_code == 204
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Gets a list of Experiments within the specified Workspace.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExperimentListResult] operation results.
#
def list_by_workspace_next(next_page_link, custom_headers:nil)
response = list_by_workspace_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets a list of Experiments within the specified Workspace.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_workspace_next_with_http_info(next_page_link, custom_headers:nil)
list_by_workspace_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Gets a list of Experiments within the specified Workspace.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_workspace_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::BatchAI::Mgmt::V2018_05_01::Models::ExperimentListResult.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets a list of Experiments within the specified Workspace.
#
# @param resource_group_name [String] Name of the resource group to which the
# resource belongs.
# @param workspace_name [String] The name of the workspace. Workspace names can
# only contain a combination of alphanumeric characters along with dash (-) and
# underscore (_). The name must be from 1 through 64 characters long.
# @param experiments_list_by_workspace_options
# [ExperimentsListByWorkspaceOptions] Additional parameters for the operation
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ExperimentListResult] which provide lazy access to pages of the
# response.
#
def list_by_workspace_as_lazy(resource_group_name, workspace_name, experiments_list_by_workspace_options:nil, custom_headers:nil)
response = list_by_workspace_async(resource_group_name, workspace_name, experiments_list_by_workspace_options:experiments_list_by_workspace_options, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_by_workspace_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 53.360449 | 198 | 0.714687 |
1c1aca39b27729a483bb8359269569cf2ef606b5 | 2,044 | ENV["RAILS_ENV"] ||= 'test'
require 'spec_helper'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
require 'capybara/rspec'
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
Dir[Rails.root.join("spec/support/**/*.rb")].each { |f| require f }
# Checks for pending migrations before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
config.include FactoryGirl::Syntax::Methods
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
config.include(EmailSpec::Helpers)
config.include(EmailSpec::Matchers)
end
| 41.714286 | 79 | 0.740705 |
91a33ca18d38b3b0b8f987d6c801829f8f9bb4e5 | 931 | # frozen_string_literal: true
require "rails_helper"
module Renalware
describe "pd regime bag's days assigned are by default set all to true", type: :system do
it "days can be deleselected when creating a new pd regime", js: true do
create(:bag_type, manufacturer: "CompanyA", description: "BagDescription")
patient = create(:patient)
login_as_clinical
visit new_patient_pd_regime_path(patient, type: "PD::CAPDRegime")
fill_in "Start date", with: "25/05/2015"
select "CAPD 3 exchanges per day", from: "Treatment"
find("a.add-bag").click
select "CompanyA BagDescription", from: "* Bag type"
select "2500", from: "Volume (ml)"
uncheck "Tue"
uncheck "Thu"
within ".patient-content" do
click_on t("btn.save")
end
within ".current-regime" do
expect(page).to have_content("Sun, Mon, Wed, Fri, Sat")
end
end
end
end
| 31.033333 | 91 | 0.654135 |
f77641dbadb3e82098d8c1dedebf84d5fafdd4c2 | 2,613 | require 'spec_helper'
require 'r10k/action/puppetfile/install'
describe R10K::Action::Puppetfile::Install do
let(:default_opts) { {root: "/some/nonexistent/path"} }
let(:puppetfile) { R10K::Puppetfile.new('/some/nonexistent/path', nil, nil) }
def installer(opts = {}, argv = [], settings = {})
opts = default_opts.merge(opts)
return described_class.new(opts, argv, settings)
end
before(:each) do
allow(puppetfile).to receive(:load!).and_return(nil)
allow(R10K::Puppetfile).to receive(:new).with("/some/nonexistent/path", nil, nil, nil, nil).and_return(puppetfile)
end
it_behaves_like "a puppetfile install action"
describe "installing modules" do
let(:modules) do
(1..4).map do |idx|
R10K::Module::Base.new("author/modname#{idx}", "/some/nonexistent/path/modname#{idx}", nil)
end
end
before do
allow(puppetfile).to receive(:purge!)
allow(puppetfile).to receive(:modules).and_return(modules)
end
it "syncs each module in the Puppetfile" do
modules.each { |m| expect(m).to receive(:sync) }
expect(installer.call).to eq true
end
it "returns false if a module failed to install" do
modules[0..2].each { |m| expect(m).to receive(:sync) }
expect(modules[3]).to receive(:sync).and_raise
expect(installer.call).to eq false
end
end
describe "purging" do
before do
allow(puppetfile).to receive(:modules).and_return([])
end
it "purges the moduledir after installation" do
expect(puppetfile).to receive(:purge!)
installer.call
end
end
describe "using custom paths" do
it "can use a custom puppetfile path" do
expect(R10K::Puppetfile).to receive(:new).with("/some/nonexistent/path", nil, "/some/other/path/Puppetfile", nil, nil).and_return(puppetfile)
installer({puppetfile: "/some/other/path/Puppetfile"}).call
end
it "can use a custom moduledir path" do
expect(R10K::Puppetfile).to receive(:new).with("/some/nonexistent/path", "/some/other/path/site-modules", nil, nil, nil).and_return(puppetfile)
installer({moduledir: "/some/other/path/site-modules"}).call
end
end
describe "forcing to overwrite local changes" do
before do
allow(puppetfile).to receive(:modules).and_return([])
end
it "can use the force overwrite option" do
subject = described_class.new({root: "/some/nonexistent/path", force: true}, [])
expect(R10K::Puppetfile).to receive(:new).with("/some/nonexistent/path", nil, nil, nil, true).and_return(puppetfile)
subject.call
end
end
end
| 30.741176 | 149 | 0.672407 |
011ca193d8dcdb58e4ec0fcbdca2b146212a1a37 | 382 | maintainer "Logan Lowell - Infochimps, Inc"
maintainer_email "[email protected]"
license "Apache 2.0"
version "0.0.2"
description "ZeroMQ: The socket library that acts as a concurrency framework."
depends "silverware"
recipe "zeromq::install_from_release", "Installs ZeroMQ"
%w[ debian ubuntu ].each do |os|
supports os
end
| 25.466667 | 83 | 0.667539 |
2651d6ff738348b579f63bfd891f1a42dc690ea2 | 28,823 | require "abstract_unit"
require "controller/fake_models"
class TestController < ActionController::Base
end
module RenderTestCases
def setup_view(paths)
@assigns = { secret: "in the sauce" }
@view = Class.new(ActionView::Base) do
def view_cache_dependencies; end
def fragment_cache_key(key)
ActiveSupport::Cache.expand_cache_key(key, :views)
end
end.new(paths, @assigns)
@controller_view = TestController.new.view_context
# Reload and register danish language for testing
I18n.backend.store_translations "da", {}
I18n.backend.store_translations "pt-BR", {}
# Ensure original are still the same since we are reindexing view paths
assert_equal ORIGINAL_LOCALES, I18n.available_locales.map(&:to_s).sort
end
def test_render_without_options
e = assert_raises(ArgumentError) { @view.render() }
assert_match(/You invoked render but did not give any of (.+) option./, e.message)
end
def test_render_file
assert_equal "Hello world!", @view.render(file: "test/hello_world")
end
# Test if :formats, :locale etc. options are passed correctly to the resolvers.
def test_render_file_with_format
assert_match "<h1>No Comment</h1>", @view.render(file: "comments/empty", formats: [:html])
assert_match "<error>No Comment</error>", @view.render(file: "comments/empty", formats: [:xml])
assert_match "<error>No Comment</error>", @view.render(file: "comments/empty", formats: :xml)
end
def test_render_template_with_format
assert_match "<h1>No Comment</h1>", @view.render(template: "comments/empty", formats: [:html])
assert_match "<error>No Comment</error>", @view.render(template: "comments/empty", formats: [:xml])
end
def test_rendered_format_without_format
@view.render(inline: "test")
assert_equal :html, @view.lookup_context.rendered_format
end
def test_render_partial_implicitly_use_format_of_the_rendered_template
@view.lookup_context.formats = [:json]
assert_equal "Hello world", @view.render(template: "test/one", formats: [:html])
end
def test_render_partial_implicitly_use_format_of_the_rendered_partial
@view.lookup_context.formats = [:html]
assert_equal "Third level", @view.render(template: "test/html_template")
end
def test_render_partial_use_last_prepended_format_for_partials_with_the_same_names
@view.lookup_context.formats = [:html]
assert_equal "\nHTML Template, but JSON partial", @view.render(template: "test/change_priority")
end
def test_render_template_with_a_missing_partial_of_another_format
@view.lookup_context.formats = [:html]
e = assert_raise ActionView::Template::Error do
@view.render(template: "with_format", formats: [:json])
end
assert_includes(e.message, "Missing partial /_missing with {:locale=>[:en], :formats=>[:json], :variants=>[], :handlers=>[:raw, :erb, :html, :builder, :ruby]}.")
end
def test_render_file_with_locale
assert_equal "<h1>Kein Kommentar</h1>", @view.render(file: "comments/empty", locale: [:de])
assert_equal "<h1>Kein Kommentar</h1>", @view.render(file: "comments/empty", locale: :de)
end
def test_render_template_with_locale
assert_equal "<h1>Kein Kommentar</h1>", @view.render(template: "comments/empty", locale: [:de])
end
def test_render_file_with_handlers
assert_equal "<h1>No Comment</h1>\n", @view.render(file: "comments/empty", handlers: [:builder])
assert_equal "<h1>No Comment</h1>\n", @view.render(file: "comments/empty", handlers: :builder)
end
def test_render_template_with_handlers
assert_equal "<h1>No Comment</h1>\n", @view.render(template: "comments/empty", handlers: [:builder])
end
def test_render_raw_template_with_handlers
assert_equal "<%= hello_world %>\n", @view.render(template: "plain_text")
end
def test_render_raw_template_with_quotes
assert_equal %q;Here are some characters: !@#$%^&*()-="'}{`; + "\n", @view.render(template: "plain_text_with_characters")
end
def test_render_raw_is_html_safe_and_does_not_escape_output
buffer = ActiveSupport::SafeBuffer.new
buffer << @view.render(file: "plain_text")
assert_equal true, buffer.html_safe?
assert_equal buffer, "<%= hello_world %>\n"
end
def test_render_ruby_template_with_handlers
assert_equal "Hello from Ruby code", @view.render(template: "ruby_template")
end
def test_render_ruby_template_inline
assert_equal "4", @view.render(inline: "(2**2).to_s", type: :ruby)
end
def test_render_file_with_localization_on_context_level
old_locale, @view.locale = @view.locale, :da
assert_equal "Hey verden", @view.render(file: "test/hello_world")
ensure
@view.locale = old_locale
end
def test_render_file_with_dashed_locale
old_locale, @view.locale = @view.locale, :"pt-BR"
assert_equal "Ola mundo", @view.render(file: "test/hello_world")
ensure
@view.locale = old_locale
end
def test_render_file_at_top_level
assert_equal "Elastica", @view.render(file: "/shared")
end
def test_render_file_with_full_path
template_path = File.join(File.dirname(__FILE__), "../fixtures/test/hello_world")
assert_equal "Hello world!", @view.render(file: template_path)
end
def test_render_file_with_instance_variables
assert_equal "The secret is in the sauce\n", @view.render(file: "test/render_file_with_ivar")
end
def test_render_file_with_locals
locals = { secret: "in the sauce" }
assert_equal "The secret is in the sauce\n", @view.render(file: "test/render_file_with_locals", locals: locals)
end
def test_render_file_not_using_full_path_with_dot_in_path
assert_equal "The secret is in the sauce\n", @view.render(file: "test/dot.directory/render_file_with_ivar")
end
def test_render_partial_from_default
assert_equal "only partial", @view.render("test/partial_only")
end
def test_render_outside_path
assert File.exist?(File.join(File.dirname(__FILE__), "../../test/abstract_unit.rb"))
assert_raises ActionView::MissingTemplate do
@view.render(template: "../\\../test/abstract_unit.rb")
end
end
def test_render_partial
assert_equal "only partial", @view.render(partial: "test/partial_only")
end
def test_render_partial_with_format
assert_equal "partial html", @view.render(partial: "test/partial")
end
def test_render_partial_with_selected_format
assert_equal "partial html", @view.render(partial: "test/partial", formats: :html)
assert_equal "partial js", @view.render(partial: "test/partial", formats: [:js])
end
def test_render_partial_at_top_level
# file fixtures/_top_level_partial_only (not fixtures/test)
assert_equal "top level partial", @view.render(partial: "/top_level_partial_only")
end
def test_render_partial_with_format_at_top_level
# file fixtures/_top_level_partial.html (not fixtures/test, with format extension)
assert_equal "top level partial html", @view.render(partial: "/top_level_partial")
end
def test_render_partial_with_locals
assert_equal "5", @view.render(partial: "test/counter", locals: { counter_counter: 5 })
end
def test_render_partial_with_locals_from_default
assert_equal "only partial", @view.render("test/partial_only", counter_counter: 5)
end
def test_render_partial_with_number
assert_nothing_raised { @view.render(partial: "test/200") }
end
def test_render_partial_with_missing_filename
assert_raises(ActionView::MissingTemplate) { @view.render(partial: "test/") }
end
def test_render_partial_with_incompatible_object
e = assert_raises(ArgumentError) { @view.render(partial: nil) }
assert_equal "'#{nil.inspect}' is not an ActiveModel-compatible object. It must implement :to_partial_path.", e.message
end
def test_render_partial_starting_with_a_capital
assert_nothing_raised { @view.render(partial: "test/FooBar") }
end
def test_render_partial_with_hyphen
assert_nothing_raised { @view.render(partial: "test/a-in") }
end
def test_render_partial_with_unicode_text
assert_nothing_raised { @view.render(partial: "test/🍣") }
end
def test_render_partial_with_invalid_option_as
e = assert_raises(ArgumentError) { @view.render(partial: "test/partial_only", as: "a-in") }
assert_equal "The value (a-in) of the option `as` is not a valid Ruby identifier; " +
"make sure it starts with lowercase letter, " +
"and is followed by any combination of letters, numbers and underscores.", e.message
end
def test_render_partial_with_hyphen_and_invalid_option_as
e = assert_raises(ArgumentError) { @view.render(partial: "test/a-in", as: "a-in") }
assert_equal "The value (a-in) of the option `as` is not a valid Ruby identifier; " +
"make sure it starts with lowercase letter, " +
"and is followed by any combination of letters, numbers and underscores.", e.message
end
def test_render_partial_with_errors
e = assert_raises(ActionView::Template::Error) { @view.render(partial: "test/raise") }
assert_match %r!method.*doesnt_exist!, e.message
assert_equal "", e.sub_template_message
assert_equal "1", e.line_number
assert_equal "1: <%= doesnt_exist %>", e.annoted_source_code[0].strip
assert_equal File.expand_path("#{FIXTURE_LOAD_PATH}/test/_raise.html.erb"), e.file_name
end
def test_render_error_indentation
e = assert_raises(ActionView::Template::Error) { @view.render(partial: "test/raise_indentation") }
error_lines = e.annoted_source_code
assert_match %r!error\shere!, e.message
assert_equal "11", e.line_number
assert_equal " 9: <p>Ninth paragraph</p>", error_lines.second
assert_equal " 10: <p>Tenth paragraph</p>", error_lines.third
end
def test_render_sub_template_with_errors
e = assert_raises(ActionView::Template::Error) { @view.render(template: "test/sub_template_raise") }
assert_match %r!method.*doesnt_exist!, e.message
assert_equal "Trace of template inclusion: #{File.expand_path("#{FIXTURE_LOAD_PATH}/test/sub_template_raise.html.erb")}", e.sub_template_message
assert_equal "1", e.line_number
assert_equal File.expand_path("#{FIXTURE_LOAD_PATH}/test/_raise.html.erb"), e.file_name
end
def test_render_file_with_errors
e = assert_raises(ActionView::Template::Error) { @view.render(file: File.expand_path("test/_raise", FIXTURE_LOAD_PATH)) }
assert_match %r!method.*doesnt_exist!, e.message
assert_equal "", e.sub_template_message
assert_equal "1", e.line_number
assert_equal "1: <%= doesnt_exist %>", e.annoted_source_code[0].strip
assert_equal File.expand_path("#{FIXTURE_LOAD_PATH}/test/_raise.html.erb"), e.file_name
end
def test_render_object
assert_equal "Hello: david", @view.render(partial: "test/customer", object: Customer.new("david"))
assert_equal "FalseClass", @view.render(partial: "test/klass", object: false)
assert_equal "NilClass", @view.render(partial: "test/klass", object: nil)
end
def test_render_object_with_array
assert_equal "[1, 2, 3]", @view.render(partial: "test/object_inspector", object: [1, 2, 3])
end
def test_render_partial_collection
assert_equal "Hello: davidHello: mary", @view.render(partial: "test/customer", collection: [ Customer.new("david"), Customer.new("mary") ])
end
def test_render_partial_collection_with_partial_name_containing_dot
assert_equal "Hello: davidHello: mary",
@view.render(partial: "test/customer.mobile", collection: [ Customer.new("david"), Customer.new("mary") ])
end
def test_render_partial_collection_as_by_string
assert_equal "david david davidmary mary mary",
@view.render(partial: "test/customer_with_var", collection: [ Customer.new("david"), Customer.new("mary") ], as: "customer")
end
def test_render_partial_collection_as_by_symbol
assert_equal "david david davidmary mary mary",
@view.render(partial: "test/customer_with_var", collection: [ Customer.new("david"), Customer.new("mary") ], as: :customer)
end
def test_render_partial_collection_without_as
assert_equal "local_inspector,local_inspector_counter,local_inspector_iteration",
@view.render(partial: "test/local_inspector", collection: [ Customer.new("mary") ])
end
def test_render_partial_with_empty_collection_should_return_nil
assert_nil @view.render(partial: "test/customer", collection: [])
end
def test_render_partial_with_nil_collection_should_return_nil
assert_nil @view.render(partial: "test/customer", collection: nil)
end
def test_render_partial_collection_for_non_array
customers = Enumerator.new do |y|
y.yield(Customer.new("david"))
y.yield(Customer.new("mary"))
end
assert_equal "Hello: davidHello: mary", @view.render(partial: "test/customer", collection: customers)
end
def test_render_partial_without_object_does_not_put_partial_name_to_local_assigns
assert_equal "false", @view.render(partial: "test/partial_name_in_local_assigns")
end
def test_render_partial_with_nil_object_puts_partial_name_to_local_assigns
assert_equal "true", @view.render(partial: "test/partial_name_in_local_assigns", object: nil)
end
def test_render_partial_with_nil_values_in_collection
assert_equal "Hello: davidHello: Anonymous", @view.render(partial: "test/customer", collection: [ Customer.new("david"), nil ])
end
def test_render_partial_with_layout_using_collection_and_template
assert_equal "<b>Hello: Amazon</b><b>Hello: Yahoo</b>", @view.render(partial: "test/customer", layout: "test/b_layout_for_partial", collection: [ Customer.new("Amazon"), Customer.new("Yahoo") ])
end
def test_render_partial_with_layout_using_collection_and_template_makes_current_item_available_in_layout
assert_equal '<b class="amazon">Hello: Amazon</b><b class="yahoo">Hello: Yahoo</b>',
@view.render(partial: "test/customer", layout: "test/b_layout_for_partial_with_object", collection: [ Customer.new("Amazon"), Customer.new("Yahoo") ])
end
def test_render_partial_with_layout_using_collection_and_template_makes_current_item_counter_available_in_layout
assert_equal '<b data-counter="0">Hello: Amazon</b><b data-counter="1">Hello: Yahoo</b>',
@view.render(partial: "test/customer", layout: "test/b_layout_for_partial_with_object_counter", collection: [ Customer.new("Amazon"), Customer.new("Yahoo") ])
end
def test_render_partial_with_layout_using_object_and_template_makes_object_available_in_layout
assert_equal '<b class="amazon">Hello: Amazon</b>',
@view.render(partial: "test/customer", layout: "test/b_layout_for_partial_with_object", object: Customer.new("Amazon"))
end
def test_render_partial_with_empty_array_should_return_nil
assert_nil @view.render(partial: [])
end
def test_render_partial_using_string
assert_equal "Hello: Anonymous", @controller_view.render("customer")
end
def test_render_partial_with_locals_using_string
assert_equal "Hola: david", @controller_view.render("customer_greeting", greeting: "Hola", customer_greeting: Customer.new("david"))
end
def test_render_partial_with_object_uses_render_partial_path
assert_equal "Hello: lifo",
@controller_view.render(partial: Customer.new("lifo"), locals: { greeting: "Hello" })
end
def test_render_partial_with_object_and_format_uses_render_partial_path
assert_equal "<greeting>Hello</greeting><name>lifo</name>",
@controller_view.render(partial: Customer.new("lifo"), formats: :xml, locals: { greeting: "Hello" })
end
def test_render_partial_using_object
assert_equal "Hello: lifo",
@controller_view.render(Customer.new("lifo"), greeting: "Hello")
end
def test_render_partial_using_collection
customers = [ Customer.new("Amazon"), Customer.new("Yahoo") ]
assert_equal "Hello: AmazonHello: Yahoo",
@controller_view.render(customers, greeting: "Hello")
end
def test_render_partial_using_collection_without_path
assert_equal "hi good customer: david0", @controller_view.render([ GoodCustomer.new("david") ], greeting: "hi")
end
def test_render_partial_without_object_or_collection_does_not_generate_partial_name_local_variable
exception = assert_raises ActionView::Template::Error do
@controller_view.render("partial_name_local_variable")
end
assert_instance_of NameError, exception.cause
assert_equal :partial_name_local_variable, exception.cause.name
end
# TODO: The reason for this test is unclear, improve documentation
def test_render_partial_and_fallback_to_layout
assert_equal "Before (Josh)\n\nAfter", @view.render(partial: "test/layout_for_partial", locals: { name: "Josh" })
end
# TODO: The reason for this test is unclear, improve documentation
def test_render_missing_xml_partial_and_raise_missing_template
@view.formats = [:xml]
assert_raises(ActionView::MissingTemplate) { @view.render(partial: "test/layout_for_partial") }
ensure
@view.formats = nil
end
def test_render_layout_with_block_and_other_partial_inside
render = @view.render(layout: "test/layout_with_partial_and_yield") { "Yield!" }
assert_equal "Before\npartial html\nYield!\nAfter\n", render
end
def test_render_inline
assert_equal "Hello, World!", @view.render(inline: "Hello, World!")
end
def test_render_inline_with_locals
assert_equal "Hello, Josh!", @view.render(inline: "Hello, <%= name %>!", locals: { name: "Josh" })
end
def test_render_fallbacks_to_erb_for_unknown_types
assert_equal "Hello, World!", @view.render(inline: "Hello, World!", type: :bar)
end
CustomHandler = lambda do |template|
"@output_buffer = ''\n" +
"@output_buffer << 'source: #{template.source.inspect}'\n"
end
def test_render_inline_with_render_from_to_proc
ActionView::Template.register_template_handler :ruby_handler, :source.to_proc
assert_equal "3", @view.render(inline: "(1 + 2).to_s", type: :ruby_handler)
ensure
ActionView::Template.unregister_template_handler :ruby_handler
end
def test_render_inline_with_compilable_custom_type
ActionView::Template.register_template_handler :foo, CustomHandler
assert_equal 'source: "Hello, World!"', @view.render(inline: "Hello, World!", type: :foo)
ensure
ActionView::Template.unregister_template_handler :foo
end
def test_render_inline_with_locals_and_compilable_custom_type
ActionView::Template.register_template_handler :foo, CustomHandler
assert_equal 'source: "Hello, <%= name %>!"', @view.render(inline: "Hello, <%= name %>!", locals: { name: "Josh" }, type: :foo)
ensure
ActionView::Template.unregister_template_handler :foo
end
def test_render_body
assert_equal "some body", @view.render(body: "some body")
end
def test_render_plain
assert_equal "some plaintext", @view.render(plain: "some plaintext")
end
def test_render_knows_about_types_registered_when_extensions_are_checked_earlier_in_initialization
ActionView::Template::Handlers.extensions
ActionView::Template.register_template_handler :foo, CustomHandler
assert_includes ActionView::Template::Handlers.extensions, :foo
ensure
ActionView::Template.unregister_template_handler :foo
end
def test_render_does_not_use_unregistered_extension_and_template_handler
ActionView::Template.register_template_handler :foo, CustomHandler
ActionView::Template.unregister_template_handler :foo
assert_not ActionView::Template::Handlers.extensions.include?(:foo)
assert_equal "Hello, World!", @view.render(inline: "Hello, World!", type: :foo)
ensure
ActionView::Template::Handlers.class_variable_get(:@@template_handlers).delete(:foo)
end
def test_render_ignores_templates_with_malformed_template_handlers
%w(malformed malformed.erb malformed.html.erb malformed.en.html.erb).each do |name|
assert File.exist?(File.expand_path("#{FIXTURE_LOAD_PATH}/test/malformed/#{name}~")), "Malformed file (#{name}~) which should be ignored does not exists"
assert_raises(ActionView::MissingTemplate) { @view.render(file: "test/malformed/#{name}") }
end
end
def test_render_with_layout
assert_equal %(<title></title>\nHello world!\n),
@view.render(file: "test/hello_world", layout: "layouts/yield")
end
def test_render_with_layout_which_has_render_inline
assert_equal %(welcome\nHello world!\n),
@view.render(file: "test/hello_world", layout: "layouts/yield_with_render_inline_inside")
end
def test_render_with_layout_which_renders_another_partial
assert_equal %(partial html\nHello world!\n),
@view.render(file: "test/hello_world", layout: "layouts/yield_with_render_partial_inside")
end
def test_render_partial_with_html_only_extension
assert_equal %(<h1>partial html</h1>\nHello world!\n),
@view.render(file: "test/hello_world", layout: "layouts/render_partial_html")
end
def test_render_layout_with_block_and_yield
assert_equal %(Content from block!\n),
@view.render(layout: "layouts/yield_only") { "Content from block!" }
end
def test_render_layout_with_block_and_yield_with_params
assert_equal %(Yield! Content from block!\n),
@view.render(layout: "layouts/yield_with_params") { |param| "#{param} Content from block!" }
end
def test_render_layout_with_block_which_renders_another_partial_and_yields
assert_equal %(partial html\nContent from block!\n),
@view.render(layout: "layouts/partial_and_yield") { "Content from block!" }
end
def test_render_partial_and_layout_without_block_with_locals
assert_equal %(Before (Foo!)\npartial html\nAfter),
@view.render(partial: "test/partial", layout: "test/layout_for_partial", locals: { name: "Foo!" })
end
def test_render_partial_and_layout_without_block_with_locals_and_rendering_another_partial
assert_equal %(Before (Foo!)\npartial html\npartial with partial\n\nAfter),
@view.render(partial: "test/partial_with_partial", layout: "test/layout_for_partial", locals: { name: "Foo!" })
end
def test_render_partial_shortcut_with_block_content
assert_equal %(Before (shortcut test)\nBefore\n\n Yielded: arg1/arg2\n\nAfter\nAfter),
@view.render(partial: "test/partial_shortcut_with_block_content", layout: "test/layout_for_partial", locals: { name: "shortcut test" })
end
def test_render_layout_with_a_nested_render_layout_call
assert_equal %(Before (Foo!)\nBefore (Bar!)\npartial html\nAfter\npartial with layout\n\nAfter),
@view.render(partial: "test/partial_with_layout", layout: "test/layout_for_partial", locals: { name: "Foo!" })
end
def test_render_layout_with_a_nested_render_layout_call_using_block_with_render_partial
assert_equal %(Before (Foo!)\nBefore (Bar!)\n\n partial html\n\nAfterpartial with layout\n\nAfter),
@view.render(partial: "test/partial_with_layout_block_partial", layout: "test/layout_for_partial", locals: { name: "Foo!" })
end
def test_render_layout_with_a_nested_render_layout_call_using_block_with_render_content
assert_equal %(Before (Foo!)\nBefore (Bar!)\n\n Content from inside layout!\n\nAfterpartial with layout\n\nAfter),
@view.render(partial: "test/partial_with_layout_block_content", layout: "test/layout_for_partial", locals: { name: "Foo!" })
end
def test_render_partial_with_layout_raises_descriptive_error
e = assert_raises(ActionView::MissingTemplate) { @view.render(partial: "test/partial", layout: true) }
assert_match "Missing partial /_true with", e.message
end
def test_render_with_nested_layout
assert_equal %(<title>title</title>\n\n<div id="column">column</div>\n<div id="content">content</div>\n),
@view.render(file: "test/nested_layout", layout: "layouts/yield")
end
def test_render_with_file_in_layout
assert_equal %(\n<title>title</title>\n\n),
@view.render(file: "test/layout_render_file")
end
def test_render_layout_with_object
assert_equal %(<title>David</title>),
@view.render(file: "test/layout_render_object")
end
def test_render_with_passing_couple_extensions_to_one_register_template_handler_function_call
ActionView::Template.register_template_handler :foo1, :foo2, CustomHandler
assert_equal @view.render(inline: "Hello, World!", type: :foo1), @view.render(inline: "Hello, World!", type: :foo2)
ensure
ActionView::Template.unregister_template_handler :foo1, :foo2
end
def test_render_throws_exception_when_no_extensions_passed_to_register_template_handler_function_call
assert_raises(ArgumentError) { ActionView::Template.register_template_handler CustomHandler }
end
end
class CachedViewRenderTest < ActiveSupport::TestCase
include RenderTestCases
# Ensure view path cache is primed
def setup
view_paths = ActionController::Base.view_paths
assert_equal ActionView::OptimizedFileSystemResolver, view_paths.first.class
setup_view(view_paths)
end
def teardown
GC.start
I18n.reload!
end
end
class LazyViewRenderTest < ActiveSupport::TestCase
include RenderTestCases
# Test the same thing as above, but make sure the view path
# is not eager loaded
def setup
path = ActionView::FileSystemResolver.new(FIXTURE_LOAD_PATH)
view_paths = ActionView::PathSet.new([path])
assert_equal ActionView::FileSystemResolver.new(FIXTURE_LOAD_PATH), view_paths.first
setup_view(view_paths)
end
def teardown
GC.start
I18n.reload!
end
def test_render_utf8_template_with_magic_comment
with_external_encoding Encoding::ASCII_8BIT do
result = @view.render(file: "test/utf8_magic", formats: [:html], layouts: "layouts/yield")
assert_equal Encoding::UTF_8, result.encoding
assert_equal "\nРусский \nтекст\n\nUTF-8\nUTF-8\nUTF-8\n", result
end
end
def test_render_utf8_template_with_default_external_encoding
with_external_encoding Encoding::UTF_8 do
result = @view.render(file: "test/utf8", formats: [:html], layouts: "layouts/yield")
assert_equal Encoding::UTF_8, result.encoding
assert_equal "Русский текст\n\nUTF-8\nUTF-8\nUTF-8\n", result
end
end
def test_render_utf8_template_with_incompatible_external_encoding
with_external_encoding Encoding::SHIFT_JIS do
e = assert_raises(ActionView::Template::Error) { @view.render(file: "test/utf8", formats: [:html], layouts: "layouts/yield") }
assert_match "Your template was not saved as valid Shift_JIS", e.cause.message
end
end
def test_render_utf8_template_with_partial_with_incompatible_encoding
with_external_encoding Encoding::SHIFT_JIS do
e = assert_raises(ActionView::Template::Error) { @view.render(file: "test/utf8_magic_with_bare_partial", formats: [:html], layouts: "layouts/yield") }
assert_match "Your template was not saved as valid Shift_JIS", e.cause.message
end
end
def with_external_encoding(encoding)
old = Encoding.default_external
silence_warnings { Encoding.default_external = encoding }
yield
ensure
silence_warnings { Encoding.default_external = old }
end
end
class CachedCollectionViewRenderTest < ActiveSupport::TestCase
class CachedCustomer < Customer; end
include RenderTestCases
# Ensure view path cache is primed
setup do
view_paths = ActionController::Base.view_paths
assert_equal ActionView::OptimizedFileSystemResolver, view_paths.first.class
ActionView::PartialRenderer.collection_cache = ActiveSupport::Cache::MemoryStore.new
setup_view(view_paths)
end
teardown do
GC.start
I18n.reload!
end
test "collection caching does not cache by default" do
customer = Customer.new("david", 1)
key = cache_key(customer, "test/_customer")
ActionView::PartialRenderer.collection_cache.write(key, "Cached")
assert_not_equal "Cached",
@view.render(partial: "test/customer", collection: [customer])
end
test "collection caching with partial that doesn't use fragment caching" do
customer = Customer.new("david", 1)
key = cache_key(customer, "test/_customer")
ActionView::PartialRenderer.collection_cache.write(key, "Cached")
assert_equal "Cached",
@view.render(partial: "test/customer", collection: [customer], cached: true)
end
test "collection caching with cached true" do
customer = CachedCustomer.new("david", 1)
key = cache_key(customer, "test/_cached_customer")
ActionView::PartialRenderer.collection_cache.write(key, "Cached")
assert_equal "Cached",
@view.render(partial: "test/cached_customer", collection: [customer], cached: true)
end
private
def cache_key(*names, virtual_path)
digest = ActionView::Digestor.digest name: virtual_path, finder: @view.lookup_context, dependencies: []
@view.fragment_cache_key([ *names, digest ])
end
end
| 40.653032 | 198 | 0.750165 |
26660c9415f61cc104d01538e44ccea0373aeb30 | 41,545 | #
# Author:: Adam Jacob (<[email protected]>)
# Author:: Christopher Walters (<[email protected]>)
# Author:: Tim Hinderliter (<[email protected]>)
# Author:: Seth Chisamore (<[email protected]>)
# Copyright:: Copyright 2008-2018, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
describe Chef::Resource do
let(:cookbook_repo_path) { File.join(CHEF_SPEC_DATA, "cookbooks") }
let(:cookbook_collection) { Chef::CookbookCollection.new(Chef::CookbookLoader.new(cookbook_repo_path)) }
let(:node) { Chef::Node.new }
let(:events) { Chef::EventDispatch::Dispatcher.new }
let(:run_context) { Chef::RunContext.new(node, cookbook_collection, events) }
let(:resource) { resource_class.new("funk", run_context) }
let(:resource_class) { Chef::Resource }
it "should mixin shell_out" do
expect(resource.respond_to?(:shell_out)).to be true
end
it "should mixin shell_out!" do
expect(resource.respond_to?(:shell_out!)).to be true
end
describe "when inherited" do
it "adds an entry to a list of subclasses" do
subclass = Class.new(Chef::Resource)
expect(Chef::Resource.resource_classes).to include(subclass)
end
it "keeps track of subclasses of subclasses" do
subclass = Class.new(Chef::Resource)
subclass_of_subclass = Class.new(subclass)
expect(Chef::Resource.resource_classes).to include(subclass_of_subclass)
end
end
describe "when declaring the identity attribute" do
it "has :name as identity attribute by default" do
expect(Chef::Resource.identity_attr).to eq(:name)
end
it "sets an identity attribute" do
resource_class = Class.new(Chef::Resource)
resource_class.identity_attr(:path)
expect(resource_class.identity_attr).to eq(:path)
end
it "inherits an identity attribute from a superclass" do
resource_class = Class.new(Chef::Resource)
resource_subclass = Class.new(resource_class)
resource_class.identity_attr(:package_name)
expect(resource_subclass.identity_attr).to eq(:package_name)
end
it "overrides the identity attribute from a superclass when the identity attr is set" do
resource_class = Class.new(Chef::Resource)
resource_subclass = Class.new(resource_class)
resource_class.identity_attr(:package_name)
resource_subclass.identity_attr(:something_else)
expect(resource_subclass.identity_attr).to eq(:something_else)
end
end
describe "when no identity attribute has been declared" do
let(:resource_sans_id) { Chef::Resource.new("my-name") }
# Would rather force identity attributes to be set for everything,
# but that's not plausible for back compat reasons.
it "uses the name as the identity" do
expect(resource_sans_id.identity).to eq("my-name")
end
end
describe "when an identity attribute has been declared" do
let(:file_resource) do
file_resource_class = Class.new(Chef::Resource) do
identity_attr :path
attr_accessor :path
end
file_resource = file_resource_class.new("identity-attr-test")
file_resource.path = "/tmp/foo.txt"
file_resource
end
it "gives the value of its identity attribute" do
expect(file_resource.identity).to eq("/tmp/foo.txt")
end
end
describe "when declaring state attributes" do
it "has no state_attrs by default" do
expect(Chef::Resource.state_attrs).to be_empty
end
it "sets a list of state attributes" do
resource_class = Class.new(Chef::Resource)
resource_class.state_attrs(:checksum, :owner, :group, :mode)
expect(resource_class.state_attrs).to match_array([:checksum, :owner, :group, :mode])
end
it "inherits state attributes from the superclass" do
resource_class = Class.new(Chef::Resource)
resource_subclass = Class.new(resource_class)
resource_class.state_attrs(:checksum, :owner, :group, :mode)
expect(resource_subclass.state_attrs).to match_array([:checksum, :owner, :group, :mode])
end
it "combines inherited state attributes with non-inherited state attributes" do
resource_class = Class.new(Chef::Resource)
resource_subclass = Class.new(resource_class)
resource_class.state_attrs(:checksum, :owner)
resource_subclass.state_attrs(:group, :mode)
expect(resource_subclass.state_attrs).to match_array([:checksum, :owner, :group, :mode])
end
end
describe "when a set of state attributes has been declared" do
let(:file_resource) do
file_resource_class = Class.new(Chef::Resource) do
state_attrs :checksum, :owner, :group, :mode
attr_accessor :checksum
attr_accessor :owner
attr_accessor :group
attr_accessor :mode
end
file_resource = file_resource_class.new("describe-state-test")
file_resource.checksum = "abc123"
file_resource.owner = "root"
file_resource.group = "wheel"
file_resource.mode = "0644"
file_resource
end
it "describes its state" do
resource_state = file_resource.state_for_resource_reporter
expect(resource_state.keys).to match_array([:checksum, :owner, :group, :mode])
expect(resource_state[:checksum]).to eq("abc123")
expect(resource_state[:owner]).to eq("root")
expect(resource_state[:group]).to eq("wheel")
expect(resource_state[:mode]).to eq("0644")
end
end
describe "#state_for_resource_reporter" do
context "when a property is marked as sensitive" do
it "suppresses the sensitive property's value" do
resource_class = Class.new(Chef::Resource) { property :foo, String, sensitive: true }
resource = resource_class.new("sensitive_property_tests")
resource.foo = "some value"
expect(resource.state_for_resource_reporter[:foo]).to eq("*sensitive value suppressed*")
end
end
context "when a property is not marked as sensitive" do
it "does not suppress the property's value" do
resource_class = Class.new(Chef::Resource) { property :foo, String }
resource = resource_class.new("sensitive_property_tests")
resource.foo = "some value"
expect(resource.state_for_resource_reporter[:foo]).to eq("some value")
end
end
end
describe "load_from" do
let(:prior_resource) do
prior_resource = Chef::Resource.new("funk")
prior_resource.source_line
prior_resource.allowed_actions << :funkytown
prior_resource.action(:funkytown)
prior_resource
end
before(:each) do
resource.allowed_actions << :funkytown
run_context.resource_collection << prior_resource
end
it "should load the attributes of a prior resource" do
resource.load_from(prior_resource)
end
it "should not inherit the action from the prior resource" do
resource.load_from(prior_resource)
expect(resource.action).not_to eq(prior_resource.action)
end
end
describe "name" do
it "should have a name" do
expect(resource.name).to eql("funk")
end
it "should let you set a new name" do
resource.name "monkey"
expect(resource.name).to eql("monkey")
end
it "coerces arrays to names" do
expect(resource.name %w{a b}).to eql("a, b")
end
it "should coerce objects to a string" do
expect(resource.name Object.new).to be_a(String)
end
end
describe "notifies" do
it "should make notified resources appear in the actions hash" do
run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee")
resource.notifies :reload, run_context.resource_collection.find(:zen_master => "coffee")
expect(resource.delayed_notifications.detect { |e| e.resource.name == "coffee" && e.action == :reload }).not_to be_nil
end
it "should make notified resources be capable of acting immediately" do
run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee")
resource.notifies :reload, run_context.resource_collection.find(:zen_master => "coffee"), :immediate
expect(resource.immediate_notifications.detect { |e| e.resource.name == "coffee" && e.action == :reload }).not_to be_nil
end
it "should raise an exception if told to act in other than :delay or :immediate(ly)" do
run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee")
expect do
resource.notifies :reload, run_context.resource_collection.find(:zen_master => "coffee"), :someday
end.to raise_error(ArgumentError)
end
it "should allow multiple notified resources appear in the actions hash" do
run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee")
resource.notifies :reload, run_context.resource_collection.find(:zen_master => "coffee")
expect(resource.delayed_notifications.detect { |e| e.resource.name == "coffee" && e.action == :reload }).not_to be_nil
run_context.resource_collection << Chef::Resource::ZenMaster.new("beans")
resource.notifies :reload, run_context.resource_collection.find(:zen_master => "beans")
expect(resource.delayed_notifications.detect { |e| e.resource.name == "beans" && e.action == :reload }).not_to be_nil
end
it "creates a notification for a resource that is not yet in the resource collection" do
resource.notifies(:restart, :service => "apache")
expected_notification = Chef::Resource::Notification.new({ :service => "apache" }, :restart, resource)
expect(resource.delayed_notifications).to include(expected_notification)
end
it "notifies another resource immediately" do
resource.notifies_immediately(:restart, :service => "apache")
expected_notification = Chef::Resource::Notification.new({ :service => "apache" }, :restart, resource)
expect(resource.immediate_notifications).to include(expected_notification)
end
it "notifies a resource to take action at the end of the chef run" do
resource.notifies_delayed(:restart, :service => "apache")
expected_notification = Chef::Resource::Notification.new({ :service => "apache" }, :restart, resource)
expect(resource.delayed_notifications).to include(expected_notification)
end
it "notifies a resource with an array for its name via its prettified string name" do
run_context.resource_collection << Chef::Resource::ZenMaster.new(%w{coffee tea})
resource.notifies :reload, run_context.resource_collection.find(:zen_master => "coffee, tea")
expect(resource.delayed_notifications.detect { |e| e.resource.name == "coffee, tea" && e.action == :reload }).not_to be_nil
end
it "notifies a resource without a name via a string name with brackets" do
run_context.resource_collection << Chef::Resource::ZenMaster.new("")
resource.notifies :reload, "zen_master[]"
end
it "notifies a resource without a name via a string name without brackets" do
run_context.resource_collection << Chef::Resource::ZenMaster.new("")
resource.notifies :reload, "zen_master"
expect(resource.delayed_notifications.first.resource).to eql("zen_master")
end
it "notifies a resource without a name via a hash name with an empty string" do
run_context.resource_collection << Chef::Resource::ZenMaster.new("")
resource.notifies :reload, zen_master: ""
expect(resource.delayed_notifications.first.resource).to eql(zen_master: "")
end
end
describe "subscribes" do
it "should make resources appear in the actions hash of subscribed nodes" do
run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee")
zr = run_context.resource_collection.find(:zen_master => "coffee")
resource.subscribes :reload, zr
expect(zr.delayed_notifications.detect { |e| e.resource.name == "funk" && e.action == :reload }).not_to be_nil
end
it "should make resources appear in the actions hash of subscribed nodes" do
run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee")
zr = run_context.resource_collection.find(:zen_master => "coffee")
resource.subscribes :reload, zr
expect(zr.delayed_notifications.detect { |e| e.resource.name == resource.name && e.action == :reload }).not_to be_nil
run_context.resource_collection << Chef::Resource::ZenMaster.new("bean")
zrb = run_context.resource_collection.find(:zen_master => "bean")
zrb.subscribes :reload, zr
expect(zr.delayed_notifications.detect { |e| e.resource.name == resource.name && e.action == :reload }).not_to be_nil
end
it "should make subscribed resources be capable of acting immediately" do
run_context.resource_collection << Chef::Resource::ZenMaster.new("coffee")
zr = run_context.resource_collection.find(:zen_master => "coffee")
resource.subscribes :reload, zr, :immediately
expect(zr.immediate_notifications.detect { |e| e.resource.name == resource.name && e.action == :reload }).not_to be_nil
end
end
describe "defined_at" do
it "should correctly parse source_line on unix-like operating systems" do
resource.source_line = "/some/path/to/file.rb:80:in `wombat_tears'"
expect(resource.defined_at).to eq("/some/path/to/file.rb line 80")
end
it "should correctly parse source_line on Windows" do
resource.source_line = "C:/some/path/to/file.rb:80 in 1`wombat_tears'"
expect(resource.defined_at).to eq("C:/some/path/to/file.rb line 80")
end
it "should include the cookbook and recipe when it knows it" do
resource.source_line = "/some/path/to/file.rb:80:in `wombat_tears'"
resource.recipe_name = "wombats"
resource.cookbook_name = "animals"
expect(resource.defined_at).to eq("animals::wombats line 80")
end
it "should recognize dynamically defined resources" do
expect(resource.defined_at).to eq("dynamically defined")
end
end
describe "to_s" do
it "should become a string like resource_name[name]" do
zm = Chef::Resource::ZenMaster.new("coffee")
expect(zm.to_s).to eql("zen_master[coffee]")
end
end
describe "to_text" do
it "prints nice message" do
resource_class = Class.new(Chef::Resource) { property :foo, String }
resource = resource_class.new("sensitive_property_tests")
resource.foo = "some value"
expect(resource.to_text).to match(/foo "some value"/)
end
context "when property is sensitive" do
it "supresses that properties value" do
resource_class = Class.new(Chef::Resource) { property :foo, String, sensitive: true }
resource = resource_class.new("sensitive_property_tests")
resource.foo = "some value"
expect(resource.to_text).to match(/foo "\*sensitive value suppressed\*"/)
end
end
context "when property is required" do
it "does not propagate vailidation errors" do
resource_class = Class.new(Chef::Resource) { property :foo, String, required: true }
resource = resource_class.new("required_property_tests")
expect { resource.to_text }.to_not raise_error Chef::Exceptions::ValidationFailed
end
end
end
context "Documentation of resources" do
it "can have a description" do
c = Class.new(Chef::Resource) do
description "my description"
end
expect(c.description).to eq "my description"
end
it "can say when it was introduced" do
c = Class.new(Chef::Resource) do
introduced "14.0"
end
expect(c.introduced).to eq "14.0"
end
it "can have some examples" do
c = Class.new(Chef::Resource) do
examples <<-EOH
resource "foo" do
foo foo
end
EOH
end
expect(c.examples).to eq <<-EOH
resource "foo" do
foo foo
end
EOH
end
end
describe "self.resource_name" do
context "When resource_name is not set" do
it "and there are no provides lines, resource_name is nil" do
c = Class.new(Chef::Resource) do
end
r = c.new("hi")
r.declared_type = :d
expect(c.resource_name).to be_nil
expect(r.resource_name).to be_nil
expect(r.declared_type).to eq :d
end
it "and there are no provides lines, resource_name is used" do
c = Class.new(Chef::Resource) do
def initialize(*args, &block)
@resource_name = :blah
super
end
end
r = c.new("hi")
r.declared_type = :d
expect(c.resource_name).to be_nil
expect(r.resource_name).to eq :blah
expect(r.declared_type).to eq :d
end
it "and the resource class gets a late-bound name, resource_name is nil" do
c = Class.new(Chef::Resource) do
def self.name
"ResourceSpecNameTest"
end
end
r = c.new("hi")
r.declared_type = :d
expect(c.resource_name).to be_nil
expect(r.resource_name).to be_nil
expect(r.declared_type).to eq :d
end
end
it "resource_name without provides is honored" do
c = Class.new(Chef::Resource) do
resource_name "blah"
end
r = c.new("hi")
r.declared_type = :d
expect(c.resource_name).to eq :blah
expect(r.resource_name).to eq :blah
expect(r.declared_type).to eq :d
end
it "setting class.resource_name with 'resource_name = blah' overrides declared_type" do
c = Class.new(Chef::Resource) do
provides :self_resource_name_test_2
end
c.resource_name = :blah
r = c.new("hi")
r.declared_type = :d
expect(c.resource_name).to eq :blah
expect(r.resource_name).to eq :blah
expect(r.declared_type).to eq :d
end
it "setting class.resource_name with 'resource_name blah' overrides declared_type" do
c = Class.new(Chef::Resource) do
resource_name :blah
provides :self_resource_name_test_3
end
r = c.new("hi")
r.declared_type = :d
expect(c.resource_name).to eq :blah
expect(r.resource_name).to eq :blah
expect(r.declared_type).to eq :d
end
end
describe "to_json" do
it "should serialize to json" do
json = resource.to_json
expect(json).to match(/json_class/)
expect(json).to match(/instance_vars/)
end
include_examples "to_json equivalent to Chef::JSONCompat.to_json" do
let(:jsonable) { resource }
end
end
describe "to_hash" do
context "when the resource has a property with a default" do
let(:resource_class) { Class.new(Chef::Resource) { property :a, default: 1 } }
it "should include the default in the hash" do
expect(resource.to_hash.keys.sort).to eq([:a, :allowed_actions, :params, :provider, :updated,
:updated_by_last_action, :before,
:name, :source_line,
:action, :elapsed_time,
:default_guard_interpreter, :guard_interpreter].sort)
expect(resource.to_hash[:name]).to eq "funk"
expect(resource.to_hash[:a]).to eq 1
end
end
it "should convert to a hash" do
hash = resource.to_hash
expected_keys = [ :allowed_actions, :params, :provider, :updated,
:updated_by_last_action, :before,
:name, :source_line,
:action, :elapsed_time,
:default_guard_interpreter, :guard_interpreter ]
expect(hash.keys - expected_keys).to eq([])
expect(expected_keys - hash.keys).to eq([])
expect(hash[:name]).to eql("funk")
end
end
describe "self.json_create" do
it "should deserialize itself from json" do
json = Chef::JSONCompat.to_json(resource)
serialized_node = Chef::Resource.from_json(json)
expect(serialized_node).to be_a_kind_of(Chef::Resource)
expect(serialized_node.name).to eql(resource.name)
end
end
describe "ignore_failure" do
it "should default to throwing an error if a provider fails for a resource" do
expect(resource.ignore_failure).to eq(false)
end
it "should allow you to set whether a provider should throw exceptions with ignore_failure" do
resource.ignore_failure(true)
expect(resource.ignore_failure).to eq(true)
end
it "should allow you to set quiet ignore_failure as a symbol" do
resource.ignore_failure(:quiet)
expect(resource.ignore_failure).to eq(:quiet)
end
it "should allow you to set quiet ignore_failure as a string" do
resource.ignore_failure("quiet")
expect(resource.ignore_failure).to eq("quiet")
end
end
describe "retries" do
let(:retriable_resource) do
retriable_resource = Chef::Resource::Cat.new("precious", run_context)
retriable_resource.provider = Chef::Provider::SnakeOil
retriable_resource.action = :purr
retriable_resource
end
before do
node.automatic_attrs[:platform] = "fubuntu"
node.automatic_attrs[:platform_version] = "10.04"
end
it "should default to not retrying if a provider fails for a resource" do
expect(retriable_resource.retries).to eq(0)
end
it "should allow you to set how many retries a provider should attempt after a failure" do
retriable_resource.retries(2)
expect(retriable_resource.retries).to eq(2)
end
it "should default to a retry delay of 2 seconds" do
expect(retriable_resource.retry_delay).to eq(2)
end
it "should allow you to set the retry delay" do
retriable_resource.retry_delay(10)
expect(retriable_resource.retry_delay).to eq(10)
end
it "should keep given value of retries intact after the provider fails for a resource" do
retriable_resource.retries(3)
retriable_resource.retry_delay(0) # No need to wait.
provider = Chef::Provider::SnakeOil.new(retriable_resource, run_context)
allow(Chef::Provider::SnakeOil).to receive(:new).and_return(provider)
allow(provider).to receive(:action_purr).and_raise
expect(retriable_resource).to receive(:sleep).exactly(3).times
expect { retriable_resource.run_action(:purr) }.to raise_error(RuntimeError)
expect(retriable_resource.retries).to eq(3)
end
it "should not rescue from non-StandardError exceptions" do
retriable_resource.retries(3)
retriable_resource.retry_delay(0) # No need to wait.
provider = Chef::Provider::SnakeOil.new(retriable_resource, run_context)
allow(Chef::Provider::SnakeOil).to receive(:new).and_return(provider)
allow(provider).to receive(:action_purr).and_raise(LoadError)
expect(retriable_resource).not_to receive(:sleep)
expect { retriable_resource.run_action(:purr) }.to raise_error(LoadError)
end
end
it "runs an action by finding its provider, loading the current resource and then running the action" do
skip
end
describe "when updated by a provider" do
before do
resource.updated_by_last_action(true)
end
it "records that it was updated" do
expect(resource).to be_updated
end
it "records that the last action updated the resource" do
expect(resource).to be_updated_by_last_action
end
describe "and then run again without being updated" do
before do
resource.updated_by_last_action(false)
end
it "reports that it is updated" do
expect(resource).to be_updated
end
it "reports that it was not updated by the last action" do
expect(resource).not_to be_updated_by_last_action
end
end
end
describe "when invoking its action" do
let(:resource) do
resource = Chef::Resource.new("provided", run_context)
resource.provider = Chef::Provider::SnakeOil
resource
end
before do
node.automatic_attrs[:platform] = "fubuntu"
node.automatic_attrs[:platform_version] = "10.04"
end
it "does not run only_if if no only_if command is given" do
expect_any_instance_of(Chef::Resource::Conditional).not_to receive(:evaluate)
resource.only_if.clear
resource.run_action(:purr)
end
it "runs runs an only_if when one is given" do
snitch_variable = nil
resource.only_if { snitch_variable = true }
expect(resource.only_if.first.positivity).to eq(:only_if)
#Chef::Mixin::Command.should_receive(:only_if).with(true, {}).and_return(false)
resource.run_action(:purr)
expect(snitch_variable).to be_truthy
end
it "runs multiple only_if conditionals" do
snitch_var1, snitch_var2 = nil, nil
resource.only_if { snitch_var1 = 1 }
resource.only_if { snitch_var2 = 2 }
resource.run_action(:purr)
expect(snitch_var1).to eq(1)
expect(snitch_var2).to eq(2)
end
it "accepts command options for only_if conditionals" do
expect_any_instance_of(Chef::Resource::Conditional).to receive(:evaluate_command).at_least(1).times
resource.only_if("true", :cwd => "/tmp")
expect(resource.only_if.first.command_opts).to eq({ :cwd => "/tmp" })
resource.run_action(:purr)
end
it "runs not_if as a command when it is a string" do
expect_any_instance_of(Chef::Resource::Conditional).to receive(:evaluate_command).at_least(1).times
resource.not_if "pwd"
resource.run_action(:purr)
end
it "runs not_if as a block when it is a ruby block" do
expect_any_instance_of(Chef::Resource::Conditional).to receive(:evaluate_block).at_least(1).times
resource.not_if { puts "foo" }
resource.run_action(:purr)
end
it "does not run not_if if no not_if command is given" do
expect_any_instance_of(Chef::Resource::Conditional).not_to receive(:evaluate)
resource.not_if.clear
resource.run_action(:purr)
end
it "accepts command options for not_if conditionals" do
resource.not_if("pwd" , :cwd => "/tmp")
expect(resource.not_if.first.command_opts).to eq({ :cwd => "/tmp" })
end
it "accepts multiple not_if conditionals" do
snitch_var1, snitch_var2 = true, true
resource.not_if { snitch_var1 = nil }
resource.not_if { snitch_var2 = false }
resource.run_action(:purr)
expect(snitch_var1).to be_nil
expect(snitch_var2).to be_falsey
end
it "reports 0 elapsed time if actual elapsed time is < 0" do
expected = Time.now
allow(Time).to receive(:now).and_return(expected, expected - 1)
resource.run_action(:purr)
expect(resource.elapsed_time).to eq(0)
end
describe "guard_interpreter attribute" do
it "should be set to :default by default" do
expect(resource.guard_interpreter).to eq(:default)
end
it "if set to :default should return :default when read" do
resource.guard_interpreter(:default)
expect(resource.guard_interpreter).to eq(:default)
end
it "should raise Chef::Exceptions::ValidationFailed on an attempt to set the guard_interpreter attribute to something other than a Symbol" do
expect { resource.guard_interpreter("command_dot_com") }.to raise_error(Chef::Exceptions::ValidationFailed)
end
it "should not raise an exception when setting the guard interpreter attribute to a Symbol" do
allow(Chef::GuardInterpreter::ResourceGuardInterpreter).to receive(:new).and_return(nil)
expect { resource.guard_interpreter(:command_dot_com) }.not_to raise_error
end
end
end
describe "should_skip?" do
before do
resource = Chef::Resource::Cat.new("sugar", run_context)
end
it "should return false by default" do
expect(resource.should_skip?(:purr)).to be_falsey
end
it "should return false when only_if is met" do
resource.only_if { true }
expect(resource.should_skip?(:purr)).to be_falsey
end
it "should return true when only_if is not met" do
resource.only_if { false }
expect(resource.should_skip?(:purr)).to be_truthy
end
it "should return true when not_if is met" do
resource.not_if { true }
expect(resource.should_skip?(:purr)).to be_truthy
end
it "should return false when not_if is not met" do
resource.not_if { false }
expect(resource.should_skip?(:purr)).to be_falsey
end
it "should return true when only_if is met but also not_if is met" do
resource.only_if { true }
resource.not_if { true }
expect(resource.should_skip?(:purr)).to be_truthy
end
it "should return false when only_if is met and also not_if is not met" do
resource.only_if { true }
resource.not_if { false }
expect(resource.should_skip?(:purr)).to be_falsey
end
it "should return true when one of multiple only_if's is not met" do
resource.only_if { true }
resource.only_if { false }
resource.only_if { true }
expect(resource.should_skip?(:purr)).to be_truthy
end
it "should return true when one of multiple not_if's is met" do
resource.not_if { false }
resource.not_if { true }
resource.not_if { false }
expect(resource.should_skip?(:purr)).to be_truthy
end
it "should return false when all of multiple only_if's are met" do
resource.only_if { true }
resource.only_if { true }
resource.only_if { true }
expect(resource.should_skip?(:purr)).to be_falsey
end
it "should return false when all of multiple not_if's are not met" do
resource.not_if { false }
resource.not_if { false }
resource.not_if { false }
expect(resource.should_skip?(:purr)).to be_falsey
end
it "should return true when action is :nothing" do
expect(resource.should_skip?(:nothing)).to be_truthy
end
it "should return true when action is :nothing ignoring only_if/not_if conditionals" do
resource.only_if { true }
resource.not_if { false }
expect(resource.should_skip?(:nothing)).to be_truthy
end
it "should print \"skipped due to action :nothing\" message for doc formatter when action is :nothing" do
fdoc = Chef::Formatters.new(:doc, STDOUT, STDERR)
allow(run_context).to receive(:events).and_return(fdoc)
expect(fdoc).to receive(:puts).with(" (skipped due to action :nothing)", anything())
resource.should_skip?(:nothing)
end
end
describe "when resource action is :nothing" do
let(:resource1) do
resource1 = Chef::Resource::Cat.new("sugar", run_context)
resource1.action = :nothing
resource1
end
before do
node.automatic_attrs[:platform] = "fubuntu"
node.automatic_attrs[:platform_version] = "10.04"
end
it "should not run only_if/not_if conditionals (CHEF-972)" do
snitch_var1 = 0
resource1.only_if { snitch_var1 = 1 }
resource1.not_if { snitch_var1 = 2 }
resource1.run_action(:nothing)
expect(snitch_var1).to eq(0)
end
it "should run only_if/not_if conditionals when notified to run another action (CHEF-972)" do
snitch_var1 = snitch_var2 = 0
runner = Chef::Runner.new(run_context)
Chef::Provider::SnakeOil.provides :cat, __core_override__: true
resource1.only_if { snitch_var1 = 1 }
resource1.not_if { snitch_var2 = 2 }
resource2 = Chef::Resource::Cat.new("coffee", run_context)
resource2.notifies :purr, resource1
resource2.action = :purr
run_context.resource_collection << resource1
run_context.resource_collection << resource2
runner.converge
expect(snitch_var1).to eq(1)
expect(snitch_var2).to eq(2)
end
end
describe "building the platform map" do
let(:klz) { Class.new(Chef::Resource) }
before do
Chef::Resource::Klz = klz
end
after do
Chef::Resource.send(:remove_const, :Klz)
end
it "adds mappings for a single platform" do
expect(Chef.resource_handler_map).to receive(:set).with(
:dinobot, Chef::Resource::Klz, { platform: ["autobots"] }
)
klz.provides :dinobot, platform: ["autobots"]
end
it "adds mappings for multiple platforms" do
expect(Chef.resource_handler_map).to receive(:set).with(
:energy, Chef::Resource::Klz, { platform: %w{autobots decepticons} }
)
klz.provides :energy, platform: %w{autobots decepticons}
end
it "adds mappings for all platforms" do
expect(Chef.resource_handler_map).to receive(:set).with(
:tape_deck, Chef::Resource::Klz, {}
)
klz.provides :tape_deck
end
end
describe "resource_for_node" do
describe "lookups from the platform map" do
let(:klz1) { Class.new(Chef::Resource) }
before(:each) do
Chef::Resource::Klz1 = klz1
node = Chef::Node.new
node.name("bumblebee")
node.automatic[:platform] = "autobots"
node.automatic[:platform_version] = "6.1"
Object.const_set("Soundwave", klz1)
klz1.provides :soundwave
end
after(:each) do
Object.send(:remove_const, :Soundwave)
Chef::Resource.send(:remove_const, :Klz1)
end
it "returns a resource by short_name if nothing else matches" do
expect(Chef::Resource.resource_for_node(:soundwave, node)).to eql(klz1)
end
end
describe "lookups from the platform map" do
let(:klz2) { Class.new(Chef::Resource) }
before(:each) do
Chef::Resource::Klz2 = klz2
node.name("bumblebee")
node.automatic[:platform] = "autobots"
node.automatic[:platform_version] = "6.1"
klz2.provides :dinobot, :platform => ["autobots"]
Object.const_set("Grimlock", klz2)
klz2.provides :grimlock
end
after(:each) do
Object.send(:remove_const, :Grimlock)
Chef::Resource.send(:remove_const, :Klz2)
end
it "returns a resource by short_name and node" do
expect(Chef::Resource.resource_for_node(:dinobot, node)).to eql(klz2)
end
end
end
describe "when creating notifications" do
describe "with a string resource spec" do
it "creates a delayed notification when timing is not specified" do
resource.notifies(:run, "execute[foo]")
expect(run_context.delayed_notification_collection.size).to eq(1)
end
it "creates a delayed notification when :delayed is not specified" do
resource.notifies(:run, "execute[foo]", :delayed)
expect(run_context.delayed_notification_collection.size).to eq(1)
end
it "creates an immediate notification when :immediate is specified" do
resource.notifies(:run, "execute[foo]", :immediate)
expect(run_context.immediate_notification_collection.size).to eq(1)
end
it "creates an immediate notification when :immediately is specified" do
resource.notifies(:run, "execute[foo]", :immediately)
expect(run_context.immediate_notification_collection.size).to eq(1)
end
describe "with a syntax error in the resource spec" do
it "raises an exception immmediately" do
expect do
resource.notifies(:run, "typo[missing-closing-bracket")
end.to raise_error(Chef::Exceptions::InvalidResourceSpecification)
end
end
end
describe "with a resource reference" do
let(:notified_resource) { Chef::Resource.new("punk", run_context) }
it "creates a delayed notification when timing is not specified" do
resource.notifies(:run, notified_resource)
expect(run_context.delayed_notification_collection.size).to eq(1)
end
it "creates a delayed notification when :delayed is not specified" do
resource.notifies(:run, notified_resource, :delayed)
expect(run_context.delayed_notification_collection.size).to eq(1)
end
it "creates an immediate notification when :immediate is specified" do
resource.notifies(:run, notified_resource, :immediate)
expect(run_context.immediate_notification_collection.size).to eq(1)
end
it "creates an immediate notification when :immediately is specified" do
resource.notifies(:run, notified_resource, :immediately)
expect(run_context.immediate_notification_collection.size).to eq(1)
end
end
end
describe "resource sensitive attribute" do
let(:resource_file) { Chef::Resource::File.new("/nonexistent/CHEF-5098/file", run_context) }
let(:action) { :create }
def compiled_resource_data(resource, action, err)
error_inspector = Chef::Formatters::ErrorInspectors::ResourceFailureInspector.new(resource, action, err)
description = Chef::Formatters::ErrorDescription.new("test")
error_inspector.add_explanation(description)
Chef::Log.info("descrtiption: #{description.inspect},error_inspector: #{error_inspector}")
description.sections[1]["Compiled Resource:"]
end
it "set to false by default" do
expect(resource.sensitive).to be_falsey
end
it "when set to false should show compiled resource for failed resource" do
expect { resource_file.run_action(action) }.to raise_error { |err|
expect(compiled_resource_data(resource_file, action, err)).to match 'path "/nonexistent/CHEF-5098/file"'
}
end
it "when set to true should show compiled resource for failed resource" do
resource_file.sensitive true
expect { resource_file.run_action(action) }.to raise_error { |err|
expect(compiled_resource_data(resource_file, action, err)).to eql("suppressed sensitive resource output")
}
end
end
describe "#action" do
let(:resource_class) do
Class.new(described_class) do
allowed_actions(%i{one two})
end
end
let(:resource) { resource_class.new("test", nil) }
subject { resource.action }
context "with a no action" do
it { is_expected.to eq [:nothing] }
end
context "with a default action" do
let(:resource_class) do
Class.new(described_class) do
default_action(:one)
end
end
it { is_expected.to eq [:one] }
end
context "with a symbol action" do
before { resource.action(:one) }
it { is_expected.to eq [:one] }
end
context "with a string action" do
before { resource.action("two") }
it { is_expected.to eq [:two] }
end
context "with an array action" do
before { resource.action([:two, :one]) }
it { is_expected.to eq [:two, :one] }
end
context "with an assignment" do
before { resource.action = :one }
it { is_expected.to eq [:one] }
end
context "with an array assignment" do
before { resource.action = [:two, :one] }
it { is_expected.to eq [:two, :one] }
end
context "with an invalid action" do
it { expect { resource.action(:three) }.to raise_error Chef::Exceptions::ValidationFailed }
end
context "with an invalid assignment action" do
it { expect { resource.action = :three }.to raise_error Chef::Exceptions::ValidationFailed }
end
end
describe ".default_action" do
let(:default_action) {}
let(:resource_class) do
actions = default_action
Class.new(described_class) do
default_action(actions) if actions
end
end
subject { resource_class.default_action }
context "with no default actions" do
it { is_expected.to eq [:nothing] }
end
context "with a symbol default action" do
let(:default_action) { :one }
it { is_expected.to eq [:one] }
end
context "with a string default action" do
let(:default_action) { "one" }
it { is_expected.to eq [:one] }
end
context "with an array default action" do
let(:default_action) { [:two, :one] }
it { is_expected.to eq [:two, :one] }
end
end
describe ".preview_resource" do
let(:klass) { Class.new(Chef::Resource) }
before do
allow(Chef::DSL::Resources).to receive(:add_resource_dsl).with(:test_resource)
end
it "defaults to false" do
expect(klass.preview_resource).to eq false
end
it "can be set to true" do
klass.preview_resource(true)
expect(klass.preview_resource).to eq true
end
it "does not affect provides by default" do
expect(Chef.resource_handler_map).to receive(:set).with(:test_resource, klass, { canonical: true })
klass.resource_name(:test_resource)
end
it "adds allow_cookbook_override when true" do
expect(Chef.resource_handler_map).to receive(:set).with(:test_resource, klass, { canonical: true, allow_cookbook_override: true })
klass.preview_resource(true)
klass.resource_name(:test_resource)
end
it "allows manually overriding back to false" do
expect(Chef.resource_handler_map).to receive(:set).with(:test_resource, klass, { allow_cookbook_override: false })
klass.preview_resource(true)
klass.provides(:test_resource, allow_cookbook_override: false)
end
end
end
| 35.148054 | 147 | 0.680828 |
ffe0e45937d4e020c87ca9ba77f2d6aa434a2064 | 3,881 | #CLI controller
class RollForInitiative::CLI
attr_accessor :player_name, :klasses
@@site = "https://www.dndbeyond.com/classes"
def call
RollForInitiative::Scraper.new.klasses(@@site)
RollForInitiative::Klass.get_klass_names
welcome_msg
klass_list
end
def welcome_msg
puts "Welcome to The Dragon Province!"
puts "'exit' can be entered at any time to leave"
sleep(1)
puts "Please, enter your name:"
@player_name = gets.strip
if @player_name.downcase == 'exit'
goodbye
end
sleep(2)
puts "What a curious name..."
sleep(2)
end
def klass_list
puts "Choose your class mortal:"
sleep(1)
@klasses = RollForInitiative::Klass.all
@klasses.each do |klass|
puts "#{klass.name} - #{klass.short}"
end
sleep(1)
klass_choose
end
def klass_choose
puts "I'm a very particular god the class must be spelled right. OR ELSE!!!"
@klass_picked = nil
@klass_picked = gets.strip.downcase
if RollForInitiative::Klass.klass_name_list.include?(@klass_picked)
RollForInitiative::Klass.all.each do |type|
if type.name == @klass_picked.split.map(&:capitalize).join(' ')
RollForInitiative::Scraper.new.grab_encounter(@@site.gsub('/classes', type.klass_url))
puts "#{RollForInitiative::Scraper.encounter}"
sleep(2)
puts "Does this sound like you? (y/n)"
while input = gets.strip.downcase
case input
when "y"
@chosen_klass = type
picked
when "n"
klass_list
when "exit"
goodbye
else
puts "Only enter 'y' or 'n'."
end
end
end
end
elsif @klass_picked == 'exit'
goodbye
else
puts "You are trying my patience. Check your spelling!"
sleep(2)
klass_list
end
end
def picked
puts "Time for adventure #{@player_name} the #{@chosen_klass.name}."
sleep(1)
adventure
end
def adventure
RollForInitiative::Scraper.new.who_you_fight
puts "Let's see who you fight:"
sleep(1)
puts "#{RollForInitiative::Scraper.fight}"
puts "Will you 'Fight' or 'Run'?"
while input = gets.strip.downcase
case input
when "fight"
puts "you strike down your opponent. You win!"
sleep(1)
fight_again
when "run"
puts "Your enemy strikes you while your back is turned. You Lose"
sleep(1)
fight_again
when "exit"
goodbye
else
puts "I don't understand your command only enter 'Fight' or 'run'"
end
end
end
def fight_again
puts "would you like to fight again?(y/n)"
while input = gets.strip.downcase
case input
when 'y'
adventure
when 'n'
goodbye
when 'exit'
goodbye
else
puts "I don't understand the command only enter 'y' or 'n'."
end
end
end
def goodbye
puts "Goodbye...for now."
exit
end
end
| 28.536765 | 107 | 0.472043 |
5d281b6ed992228d8b6595c09d90a54b8bf8431b | 5,109 | # frozen_string_literal: true
require_relative '../../../step/buy_sell_par_shares'
require_relative '../../../step/share_buying'
require_relative '../../../action/buy_shares'
require_relative '../../../action/par'
module Engine
module Game
module G1873
module Step
class BuySellParShares < Engine::Step::BuySellParShares
def description
'Sell then Buy Certificates or Form Public Mine'
end
def setup
@reopened = nil
super
end
def purchasable_companies(_entity)
[]
end
def can_ipo_any?(entity)
!bought? && @game.corporations.any? do |c|
@game.can_par?(c, entity) && (@game.public_mine?(c) || can_buy?(entity, c.shares.first&.to_bundle))
end
end
def can_buy?(entity, bundle)
corp = bundle.corporation
return if corp.receivership? && [email protected]_restart?(corp, entity)
super
end
def can_buy_multiple?(entity, corporation, _owner)
return unless corporation.corporation?
if @reopened == corporation
entity.percent_of(corporation) < 40
elsif @game.railway?(corporation)
@round.current_actions.any? { |x| x.is_a?(Action::Par) && x.corporation == corporation } &&
@round.current_actions.none? { |x| x.is_a?(Action::BuyShares) }
else
false
end
end
def can_sell?(entity, bundle)
return unless bundle
corporation = bundle.corporation
timing = @game.check_sale_timing(entity, corporation)
timing &&
!(@game.class::MUST_SELL_IN_BLOCKS && @round.players_sold[entity][corporation] == :now) &&
can_sell_order? &&
(@game.share_pool.fit_in_bank?(bundle) || corporation == @game.mhe) &&
can_dump?(entity, bundle) &&
president_can_sell?(entity, corporation)
end
# president of corp can't dump unless someone else has 20% - even with a president cert of 10%
def can_dump?(entity, bundle)
@game.dumpable?(bundle, entity)
end
# president of RR can never drop below 20% if it hasn't finished it's concession (operated)
# or nobody else has at least 20%
def president_can_sell?(entity, corporation)
return true unless corporation.owner == entity
return true if [email protected]_pending?(corporation) || corporation == @game.mhe
corporation.share_holders[entity] > 20
end
def ipo_type(entity)
if @game.railway?(entity)
:par
else
:form
end
end
def get_par_prices(entity, corp)
@game
.stock_market
.par_prices
.select { |p| p.price <= entity.cash || @game.public_mine?(corp) }
end
def pool_shares(corporation)
if corporation.receivership? && corporation != @game.mhe && corporation.total_shares == 10
shares = @game.share_pool.shares_by_corporation[corporation].reject(&:president).reverse
# offer 20% bundle
[ShareBundle.new(shares.take(2))]
else
@game.share_pool.shares_by_corporation[corporation].group_by(&:percent).values
.map(&:first).sort_by(&:percent).reverse
end
end
def process_buy_shares(action)
corporation = action.bundle.corporation
was_receivership = corporation.receivership? && corporation != @game.mhe
buy_shares(action.entity, action.bundle, swap: action.swap,
allow_president_change: @game.pres_change_ok?(corporation))
if was_receivership
@reopened = corporation
remove_company(action.entity, corporation)
end
track_action(action, corporation)
end
def process_par(action)
corporation = action.corporation
entity = action.entity
if @game.railway?(corporation)
super
remove_company(entity, corporation)
@game.replace_company!(corporation)
return
end
form_public_mine(entity, corporation)
track_action(action, corporation)
end
def form_public_mine(entity, corporation)
corporation.owner = entity
@round.pending_forms << { corporation: corporation, owner: entity, targets: [] }
@log << "#{entity.name} forms Public Mining Company #{corporation.name}"
end
def remove_company(entity, corporation)
co = @game.companies.find { |c| c.id == corporation.id }
entity.companies.delete(co)
co.owner = nil
end
end
end
end
end
end
| 33.834437 | 113 | 0.559796 |
08a7a075fed1a9cd379d721f6da961ecc30eca07 | 10,855 | # Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Properties for creating a network source object.
class Identity::Models::CreateNetworkSourceDetails
# **[Required]** The OCID of the tenancy (root compartment) containing the network source object.
# @return [String]
attr_accessor :compartment_id
# **[Required]** The name you assign to the network source during creation. The name must be unique across all groups
# in the tenancy and cannot be changed.
#
# @return [String]
attr_accessor :name
# A list of allowed public IP addresses and CIDR ranges.
#
# @return [Array<String>]
attr_accessor :public_source_list
# A list of allowed VCN OCID and IP range pairs.
# Example:`\"vcnId\": \"ocid1.vcn.oc1.iad.aaaaaaaaexampleuniqueID\", \"ipRanges\": [ \"129.213.39.0/24\" ]`
#
# @return [Array<OCI::Identity::Models::NetworkSourcesVirtualSourceList>]
attr_accessor :virtual_source_list
# A list of services allowed to make on-behalf-of requests. These requests can have different source IP addresses
# than those listed in the network source.
# Currently, only `all` and `none` are supported. The default is `all`.
#
# @return [Array<String>]
attr_accessor :services
# **[Required]** The description you assign to the network source during creation. Does not have to be unique, and it's changeable.
# @return [String]
attr_accessor :description
# Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
# For more information, see [Resource Tags](https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
# Example: `{\"Department\": \"Finance\"}`
#
# @return [Hash<String, String>]
attr_accessor :freeform_tags
# Defined tags for this resource. Each key is predefined and scoped to a namespace.
# For more information, see [Resource Tags](https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
# Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
#
# @return [Hash<String, Hash<String, Object>>]
attr_accessor :defined_tags
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'compartment_id': :'compartmentId',
'name': :'name',
'public_source_list': :'publicSourceList',
'virtual_source_list': :'virtualSourceList',
'services': :'services',
'description': :'description',
'freeform_tags': :'freeformTags',
'defined_tags': :'definedTags'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'compartment_id': :'String',
'name': :'String',
'public_source_list': :'Array<String>',
'virtual_source_list': :'Array<OCI::Identity::Models::NetworkSourcesVirtualSourceList>',
'services': :'Array<String>',
'description': :'String',
'freeform_tags': :'Hash<String, String>',
'defined_tags': :'Hash<String, Hash<String, Object>>'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :compartment_id The value to assign to the {#compartment_id} property
# @option attributes [String] :name The value to assign to the {#name} property
# @option attributes [Array<String>] :public_source_list The value to assign to the {#public_source_list} property
# @option attributes [Array<OCI::Identity::Models::NetworkSourcesVirtualSourceList>] :virtual_source_list The value to assign to the {#virtual_source_list} property
# @option attributes [Array<String>] :services The value to assign to the {#services} property
# @option attributes [String] :description The value to assign to the {#description} property
# @option attributes [Hash<String, String>] :freeform_tags The value to assign to the {#freeform_tags} property
# @option attributes [Hash<String, Hash<String, Object>>] :defined_tags The value to assign to the {#defined_tags} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.compartment_id = attributes[:'compartmentId'] if attributes[:'compartmentId']
raise 'You cannot provide both :compartmentId and :compartment_id' if attributes.key?(:'compartmentId') && attributes.key?(:'compartment_id')
self.compartment_id = attributes[:'compartment_id'] if attributes[:'compartment_id']
self.name = attributes[:'name'] if attributes[:'name']
self.public_source_list = attributes[:'publicSourceList'] if attributes[:'publicSourceList']
raise 'You cannot provide both :publicSourceList and :public_source_list' if attributes.key?(:'publicSourceList') && attributes.key?(:'public_source_list')
self.public_source_list = attributes[:'public_source_list'] if attributes[:'public_source_list']
self.virtual_source_list = attributes[:'virtualSourceList'] if attributes[:'virtualSourceList']
raise 'You cannot provide both :virtualSourceList and :virtual_source_list' if attributes.key?(:'virtualSourceList') && attributes.key?(:'virtual_source_list')
self.virtual_source_list = attributes[:'virtual_source_list'] if attributes[:'virtual_source_list']
self.services = attributes[:'services'] if attributes[:'services']
self.description = attributes[:'description'] if attributes[:'description']
self.freeform_tags = attributes[:'freeformTags'] if attributes[:'freeformTags']
raise 'You cannot provide both :freeformTags and :freeform_tags' if attributes.key?(:'freeformTags') && attributes.key?(:'freeform_tags')
self.freeform_tags = attributes[:'freeform_tags'] if attributes[:'freeform_tags']
self.defined_tags = attributes[:'definedTags'] if attributes[:'definedTags']
raise 'You cannot provide both :definedTags and :defined_tags' if attributes.key?(:'definedTags') && attributes.key?(:'defined_tags')
self.defined_tags = attributes[:'defined_tags'] if attributes[:'defined_tags']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
compartment_id == other.compartment_id &&
name == other.name &&
public_source_list == other.public_source_list &&
virtual_source_list == other.virtual_source_list &&
services == other.services &&
description == other.description &&
freeform_tags == other.freeform_tags &&
defined_tags == other.defined_tags
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[compartment_id, name, public_source_list, virtual_source_list, services, description, freeform_tags, defined_tags].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 42.568627 | 245 | 0.689083 |
1d166b836b9b4e9926c090e1a766a3035a0c0f44 | 17,567 | # encoding: utf-8
#--
# Copyright DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#++
require File.dirname(__FILE__) + '/integration_test_case.rb'
require File.dirname(__FILE__) + '/datatype_utils.rb'
class MetadataTest < IntegrationTestCase
def setup
@@ccm_cluster.setup_schema("CREATE KEYSPACE simplex WITH replication = {'class': 'SimpleStrategy', 'replication_factor': 1}")
@cluster = Cassandra.cluster(
schema_refresh_delay: 0.1,
schema_refresh_timeout: 0.1
)
@listener = SchemaChangeListener.new(@cluster)
@session = @cluster.connect('simplex')
@session.execute("CREATE TABLE simplex.users (user_id bigint, first text, last text, age int, PRIMARY KEY (user_id, last))")
@listener.wait_for_table('simplex', 'users')
@session.execute("CREATE TABLE simplex.custom (f1 int PRIMARY KEY," \
" f2 'org.apache.cassandra.db.marshal.CompositeType(org.apache.cassandra.db.marshal.UUIDType,org.apache.cassandra.db.marshal.UTF8Type)')")
@listener.wait_for_table('simplex', 'custom')
@session.execute("CREATE TABLE simplex.test1 (key text, value text, PRIMARY KEY(key))")
@listener.wait_for_table('simplex', 'test1')
@session.execute("CREATE TABLE simplex.test2 (key text, value text, PRIMARY KEY(key))")
@listener.wait_for_table('simplex', 'test2')
@session.execute("CREATE TABLE simplex.audit (key timeuuid, keyspace_name text, table_name text, primary_key text, PRIMARY KEY(key))")
@listener.wait_for_table('simplex', 'audit')
@session.execute(<<EOF)
CREATE TABLE rb264(inclusion_r_t text, inclusion_r_id text, inclusion_uaid timeuuid, own_t text, own_id text,
PRIMARY KEY (inclusion_r_t, inclusion_r_id, inclusion_uaid, own_t, own_id)
) WITH CLUSTERING ORDER BY (inclusion_r_id ASC, inclusion_uaid ASC, own_t ASC, own_id ASC)
EOF
@listener.wait_for_table('simplex', 'rb264')
end
def teardown
@cluster && @cluster.close
end
# Test for retrieving keyspace metadata
#
# test_can_retrieve_keyspace_metadata tests that all pieces of keyspace metadata can be retrieved. It goes through
# each piece of keyspace metadata and verifies that each piece is as expected.
#
# @since 3.0.0
# @jira_ticket RUBY-181
# @expected_result keyspace metadata should be retrieved.
#
# @test_category metadata
#
def test_can_retrieve_keyspace_metadata
ks_meta = @cluster.keyspace('simplex')
assert_equal 'simplex', ks_meta.name
assert_equal 'SimpleStrategy', ks_meta.replication.klass
assert_equal 1, ks_meta.replication.options['replication_factor'].to_i
assert ks_meta.durable_writes?
assert ks_meta.has_table?('users')
results =
if CCM.cassandra_version < '3.0'
@session.execute("SELECT columnfamily_name FROM system.schema_columnfamilies WHERE keyspace_name = 'simplex'")
else
@session.execute("SELECT table_name FROM system_schema.tables WHERE keyspace_name = 'simplex'")
end
assert_equal results.size, ks_meta.tables.size
ks_cql = Regexp.new(/CREATE KEYSPACE simplex WITH replication = {'class': 'SimpleStrategy', \
'replication_factor': '1'} AND durable_writes = true;/)
assert_match ks_cql, ks_meta.to_cql
end
# Test for retrieving table metadata
#
# test_can_retrieve_table_metadata tests that all pieces of table metadata can be retrieved. It goes through each piece
# of table metadata and verifies that each piece is as expected.
#
# @since 3.0.0
# @jira_ticket RUBY-181
# @expected_result table metadata should be retrieved.
#
# @test_category metadata
#
def test_can_retrieve_table_metadata
assert @cluster.keyspace('simplex').has_table?('users')
table_meta = @cluster.keyspace('simplex').table('users')
assert_equal 'users', table_meta.name
assert_equal 'simplex', table_meta.keyspace.name
assert_empty table_meta.indexes
refute_nil table_meta.id unless CCM.cassandra_version < '3.0.0'
refute_nil table_meta.options
assert_columns([['user_id', :bigint], ['last', :text]], table_meta.primary_key)
assert_columns([['user_id', :bigint]], table_meta.partition_key)
assert_columns([ ['last', :text]], table_meta.clustering_columns)
assert_equal :asc, table_meta.clustering_order.first
assert_equal 4, table_meta.columns.size
table_meta.each_column do |column|
assert ['user_id', 'first', 'last', 'age'].any? { |name| name == column.name }
assert [:bigint, :text, :int].any? { |type| type == column.type.kind }
end
end
# Regression test for retrieving table metadata, RUBY-264
#
# test_ruby_264 tests that a table with a relatively large number of clustering columns is not erroneously
# considered to use compact storage.
#
# @since 3.0.0
# @jira_ticket RUBY-264
# @expected_result table metadata should be retrieved.
#
# @test_category metadata
#
def test_ruby_264
assert @cluster.keyspace('simplex').has_table?('rb264')
table_meta = @cluster.keyspace('simplex').table('rb264')
assert(!table_meta.options.compact_storage?)
end
# Test for column ordering in table metadata
#
# test_column_ordering_is_deterministic tests that the metadata relating to the columns are retrieved in the proper
# order. This proper order is: partition key, clustering columns, and then all other columns alphanumerically.
#
# @since 3.0.0
# @jira_ticket RUBY-180
# @expected_result column ordering should be correct in table metadata.
#
# @test_category metadata
#
def test_column_ordering_is_deterministic
assert @cluster.keyspace('simplex').has_table?('users')
table_meta = @cluster.keyspace('simplex').table('users')
table_cql = Regexp.new(/CREATE TABLE simplex\."users" \(
user_id bigint,
last text,
age int,
first text,
PRIMARY KEY \(user_id, last\)
\)/)
assert_equal 0, table_meta.to_cql =~ table_cql
col_names = ['user_id', 'last', 'age', 'first']
table_meta.each_column do |column|
assert_equal col_names[0], column.name
col_names.delete_at(0)
end
end
# Test for retrieving table metadata with quoted identifiers
#
# test_can_retrieve_quoted_table_metadata tests that all pieces of table metadata can be retrieved, when the
# table has quoted identifiers. It goes through each piece of table view metadata and verifies that each piece
# is as expected.
#
# @since 3.0.0
# @jira_ticket RUBY-175
# @expected_result table metadata with quoted identifiers should be retrieved.
#
# @test_category metadata
#
def test_can_retrieve_quoted_table_metadata
# Check upper-case chars, unescaped upper-case chars, quoted numbers, quoted identifier with single quote,
# quoted identifier with double quotes
@session.execute("CREATE TABLE roles (\"FOO\" text, BAR ascii, \"10\" int, \"'20'\" int, \"\"\"30\"\"\" int,
\"f00\"\"b4r\" text, PRIMARY KEY (\"FOO\", BAR, \"10\", \"'20'\", \"\"\"30\"\"\", \"f00\"\"b4r\"))")
@listener.wait_for_table('simplex', 'roles')
assert @cluster.keyspace('simplex').has_table?('roles')
table_meta = @cluster.keyspace('simplex').table('roles')
assert_equal 'roles', table_meta.name
assert_equal 'simplex', table_meta.keyspace.name
assert_empty table_meta.indexes
refute_nil table_meta.id unless CCM.cassandra_version < '3.0.0'
refute_nil table_meta.options
assert_columns([['FOO', :text], ['bar', :ascii], ['10', :int], ["'20'", :int], ["\"30\"", :int], ["f00\"b4r", :text]],
table_meta.primary_key)
assert_columns([['FOO', :text]], table_meta.partition_key)
assert_columns([['bar', :ascii], ['10', :int], ["'20'", :int], ["\"30\"", :int], ["f00\"b4r", :text]],
table_meta.clustering_columns)
assert_equal :asc, table_meta.clustering_order.first
assert_equal 6, table_meta.columns.size
table_meta.each_column do |column|
assert ['FOO', 'bar', '10', "'20'", "\"30\"", "f00\"b4r"].any? { |name| name == column.name }
assert [:text, :ascii, :int].any? { |type| type == column.type.kind }
end
table_cql = Regexp.new(/CREATE TABLE simplex\."roles" \(
"FOO" text,
bar ascii,
"10" int,
"'20'" int,
"""30""" int,
"f00""b4r" text,
PRIMARY KEY \("FOO", bar, "10", "'20'", """30""", "f00""b4r"\)
\)/)
assert_equal 0, table_meta.to_cql =~ table_cql
@session.execute("DROP TABLE roles")
# Check all the reserved words
reserved_word_int_list = ["zz int PRIMARY KEY"]
DatatypeUtils.reserved_words.each do |word|
reserved_word_int_list.push("\"#{word}\" int")
end
@session.execute("CREATE TABLE reserved_words (#{reserved_word_int_list.join(',')})")
@listener.wait_for_table('simplex', 'reserved_words')
assert @cluster.keyspace('simplex').has_table?('reserved_words')
table_meta = @cluster.keyspace('simplex').table('reserved_words')
refute_nil table_meta.to_cql
@session.execute("DROP TABLE reserved_words")
end
# Test for skipping internal columns in static-compact tables
#
# test_skip_internal_columns_for_static_compact_table tests that the "column1 text" clustering
# column and "value blob" regular columns are excluded from table metadata for static-compact tables.
# It also coerces columns marked static to be regular instead.
#
# @since 3.0.0
# @jira_ticket RUBY-185
# @expected_result the metadata should only report columns we've consciously added to the table.
#
# @test_category metadata
#
def test_skip_internal_columns_for_static_compact_table
skip("WITH COMPACT STORAGE has been removed in C* 4.0") if CCM.cassandra_version > '4.0'
@session.execute("CREATE TABLE simplex.blobby (key blob PRIMARY KEY, f1 blob, f2 blob) WITH COMPACT STORAGE")
@listener.wait_for_table('simplex', 'blobby')
assert @cluster.keyspace('simplex').has_table?('blobby')
table_meta = @cluster.keyspace('simplex').table('blobby')
table_cql = Regexp.new(/CREATE TABLE simplex\.blobby \(
"key" blob PRIMARY KEY,
"f1" blob,
"f2" blob
\)/)
m = table_meta.to_cql =~ table_cql
refute_nil m, "actual cql: #{table_meta.to_cql}"
assert_equal 0, m, "actual cql: #{table_meta.to_cql}"
assert_equal 3, table_meta.columns.size
table_meta.each_column do |column|
assert ['key', 'f1', 'f2'].any? { |name| name == column.name }
assert_equal :blob, column.type.kind
refute column.static?
end
@session.execute("DROP TABLE simplex.blobby")
end
# Test for skipping internal columns in dense tables
#
# test_skip_internal_columns_for_dense_table tests that "value <empty-type>" column is excluded from table metadata
# for dense tables.
#
# @since 3.0.0
# @jira_ticket RUBY-185
# @expected_result the metadata should only report columns we've consciously added to the table.
#
# @test_category metadata
#
def test_skip_internal_columns_for_dense_table
# NOTE: It seems that the dense table does not have an empty-type column. The Java driver has logic to
# handle that, but maybe it's outdated and unnecessary. This test serves the purpose of keeping us on guard,
# in case some version of C* does create the internal column; if we encounter such a C*, the test will fail and
# we'll go to the effort of fixing the issue.
skip("WITH COMPACT STORAGE has been removed in C* 4.0") if CCM.cassandra_version > '4.0'
@session.execute("CREATE TABLE simplex.dense (f1 int, f2 int, f3 int, PRIMARY KEY (f1, f2)) WITH COMPACT STORAGE")
@listener.wait_for_table('simplex', 'dense')
assert @cluster.keyspace('simplex').has_table?('dense')
table_meta = @cluster.keyspace('simplex').table('dense')
table_cql = Regexp.new(/CREATE TABLE simplex\.dense \(
"f1" int,
"f2" int,
"f3" int,
PRIMARY KEY \("f1", "f2"\)
\)
WITH CLUSTERING ORDER BY \("f2" ASC\)
AND COMPACT STORAGE/)
m = table_meta.to_cql =~ table_cql
refute_nil m, "actual cql: #{table_meta.to_cql}"
assert_equal 0, m, "actual cql: #{table_meta.to_cql}"
assert_equal 3, table_meta.columns.size
table_meta.each_column do |column|
assert ['f1', 'f2', 'f3'].any? { |name| name == column.name }
assert_equal :int, column.type.kind
refute column.static?
end
@session.execute("DROP TABLE simplex.dense")
end
# Test for handling custom type columns in table metadata
#
# test_custom_type_column_in_table tests that a custom type column in a table is processed properly
# when collecting table metadata.
#
# @since 3.0.0
# @jira_ticket RUBY-186
# @expected_result the metadata should correctly report the custom type column.
#
# @test_category metadata
#
def test_custom_type_column_in_table
skip("Custom type representation was changed in Cassandra 3.0 to be a single-quoted string") if CCM.cassandra_version < '3.0.0'
assert @cluster.keyspace('simplex').has_table?('custom')
table_meta = @cluster.keyspace('simplex').table('custom')
table_cql = Regexp.new(/CREATE TABLE simplex\."custom" \(
"f1" int PRIMARY KEY,
"f2" 'org.apache.cassandra.db.marshal.CompositeType\(org.apache.cassandra.db.marshal.UUIDType,org.apache.cassandra.db.marshal.UTF8Type\)'
\)/)
assert_equal 0, table_meta.to_cql =~ table_cql, "actual cql: #{table_meta.to_cql}"
assert_equal 2, table_meta.columns.size
column = table_meta.columns[0]
assert_equal 'f1', column.name
assert_equal :int, column.type.kind
column = table_meta.columns[1]
assert_equal 'f2', column.name
assert_equal :custom, column.type.kind
assert_equal 'org.apache.cassandra.db.marshal.CompositeType(org.apache.cassandra.db.marshal.UUIDType,org.apache.cassandra.db.marshal.UTF8Type)',
column.type.name
end
# Test for retrieving crc_check_balance property
#
# test_table_metadata_contains_crc_check_balance tests that the 'crc_check_balance' property of table metadata is able
# to be retrieved.
#
# @since 3.0.0
# @jira_ticket RUBY-179
# @expected_result crc_check_balance property should be retrieved from the table metadata
#
# @test_category metadata
#
def test_table_metadata_contains_crc_check_balance
skip("The crc_check_balance property on a table was introduced in Cassandra 3.0") if CCM.cassandra_version < '3.0.0'
assert @cluster.keyspace('simplex').has_table?('users')
table_meta = @cluster.keyspace('simplex').table('users')
assert_equal 1.0, table_meta.options.crc_check_chance
end
# Test for retrieving extensions property
#
# test_table_metadata_contains_extensions tests that the 'extensions' property of table metadata is able to be
# retrieved.
#
# @since 3.0.0
# @jira_ticket RUBY-170
# @expected_result extensions property should be retrieved from the table metadata
#
# @test_category metadata
#
def test_table_metadata_contains_extensions
skip("The extensions property on a table was introduced in Cassandra 3.0") if CCM.cassandra_version < '3.0.0'
assert @cluster.keyspace('simplex').has_table?('users')
table_meta = @cluster.keyspace('simplex').table('users')
assert_empty table_meta.options.extensions
end
# Test for retrieving trigger metadata
#
# test_can_retrieve_trigger_metadata tests that all pieces of trigger metadata can be retrieved. It first creates a
# simple trigger. It then goes through each piece of the trigger metadata and verifies that each piece is as expected.
# It finally creates another trigger with the same name, on a different table and verifies that it is retrieved and
# complete.
#
# @since 3.1.0
# @jira_ticket RUBY-187
# @expected_result trigger metadata should be retrieved.
#
# @test_category metadata
#
def test_can_retrieve_trigger_metadata
skip("Triggers were introduced in Cassandra 2.0") if CCM.cassandra_version < '2.0.0'
# trigger1, on test1 table
@session.execute("CREATE TRIGGER trigger1 ON simplex.test1 USING 'org.apache.cassandra.triggers.AuditTrigger'")
@listener.wait_for_trigger('simplex', 'test1', 'trigger1')
assert @cluster.keyspace('simplex').table('test1').has_trigger?('trigger1')
trigger_meta = @cluster.keyspace('simplex').table('test1').trigger('trigger1')
assert_equal 'trigger1', trigger_meta.name
assert_equal 'test1', trigger_meta.table.name
assert_equal 'org.apache.cassandra.triggers.AuditTrigger', trigger_meta.options['class']
# trigger1, on test2 table
@session.execute("CREATE TRIGGER trigger1 ON simplex.test2 USING 'org.apache.cassandra.triggers.AuditTrigger'")
@listener.wait_for_trigger('simplex', 'test2', 'trigger1')
assert @cluster.keyspace('simplex').table('test2').has_trigger?('trigger1')
trigger_meta2 = @cluster.keyspace('simplex').table('test2').trigger('trigger1')
assert_equal 'trigger1', trigger_meta2.name
assert_equal 'test2', trigger_meta2.table.name
assert_equal 'org.apache.cassandra.triggers.AuditTrigger', trigger_meta2.options['class']
refute_equal trigger_meta, trigger_meta2
end
end
| 40.291284 | 148 | 0.71418 |
1c48ca173e9e178cf4fd02daca64debc42d8aeed | 2,390 | # frozen_string_literal: true
class ActiveJobs
ALL_LOCK = Mutex.new
class << self
def clusters
Rails.cache.fetch('clusters', expires_in: 12.hours) do
OodCore::Clusters.new(
OodCore::Clusters.load_file(Configuration.clusters_config_dir).reject do |c|
!c.errors.empty? || !c.allow? || c.kubernetes? || c.linux_host?
end
)
end
end
def all
Rails.logger.debug("ActiveJobs.all being called at at #{Time.now}")
ALL_LOCK.synchronize do
Rails.cache.fetch('all_jobs', expires_in: 50.seconds) do
Rails.logger.debug("ActiveJobs fetching new jobs at #{Time.now}")
clusters.map do |c|
jobs = info_all(c)
{
'cluster_name' => c.id,
'info' => info_from_jobs(jobs)
}
end
end
end
end
def cluster_info(cluster_id)
ActiveJobs.all.select { |info| info['cluster_name'] == cluster_id }.first
end
private
def info_from_jobs(jobs)
gpu_jobs_running = jobs.count { |job| job.status == 'running' && job.native[:gres].include?("gpu") }
gpu_jobs_queued = jobs.count { |job| job.status == 'queued' && job.native[:gres].include?("gpu") }
jobs_running = jobs.count { |job| job.status == 'running' }
jobs_queued = jobs.count { |job| job.status == 'queued' }
{
'gpu_jobs_running': gpu_jobs_running,
'gpu_jobs_queued': gpu_jobs_queued,
'gpu_jobs_running_pct': (gpu_jobs_running + gpu_jobs_queued != 0) ? (gpu_jobs_running.to_f / (gpu_jobs_running + gpu_jobs_queued) * 100).to_i : 0,
'gpu_jobs_queued_pct': (gpu_jobs_running + gpu_jobs_queued != 0) ? (gpu_jobs_queued.to_f / (gpu_jobs_running + gpu_jobs_queued) * 100).to_i : 0,
'jobs_running': jobs_running,
'jobs_queued': jobs_queued,
'jobs_running_pct': (jobs_running + jobs_queued != 0) ? (jobs_running.to_f / (jobs_running + jobs_queued) * 100).to_i : 0,
'jobs_queued_pct': (jobs_running + jobs_queued != 0) ? (jobs_queued.to_f / (jobs_running + jobs_queued) * 100).to_i : 0
}
end
def info_all(cluster)
cluster.job_adapter.info_all
rescue StandardError => e
Rails.logger.warn("could not get jobs from #{cluster.id} due to error: #{e.message}")
[]
end
end
end
| 36.212121 | 154 | 0.608787 |
01a34625578c5b0b235f3a92fa925c3fe94ea164 | 1,200 | class Api::V1::TagsController < Api::V1::BaseController
# before_action :set_tag, only: [:index, :create, :update, :destroy]
before_action :authorize_tag, only: [:show, :update, :destroy]
before_action :set_tag, only: [:show, :update, :destroy]
def index
@tags = Tag.all
end
def show
end
def create
@tag = Tag.new(tag_params)
if @tag.save
render :show, status: :created
else
render json: { message: @tag.errors.full_messages }, status: :unprocessable_entity
end
end
def update
if @tag.update(tag_params)
render :show, status: :ok
else
render json: { message: @tag.errors.full_messages }, status: :unprocessable_entity
end
end
def destroy
@tag.destroy
render :show, status: :ok
end
private
def set_tag
# @tag = Tag.find_by(activity_id: params[:activity_id])
@tag = Tag.find(params[:id])
end
def authorize_tag
@activity = Activity.find_by(id: Tag.find_by(id: params[:id])[:activity_id])
render json: { message: "No autorizado" }, status: :unauthorized unless @user == @activity.user
end
def tag_params
params.require(:tag).permit(:tag, :datetime, :activity_id)
end
end
| 23.529412 | 99 | 0.6625 |
013a2b5f0e3e2dac551df2a36c8ea982e2e6274a | 128 | class UserSerializer < ActiveModel::Serializer
attributes :id, :name
has_many :expenses, serializer: ExpenseSerializer
end
| 25.6 | 52 | 0.796875 |
abef9b70399d83e7a104f43c51139d83396a49b4 | 487 | require 'bundler/setup'
Bundler.require
ActiveRecord::Base.establish_connection(
:adapter => "sqlite3",
:database => "db/project.sqlite"
#might have to do something else to make this my database
)
require_relative "../app/controllers/application_controller.rb"
require_all 'app'
Dir[File.join(File.dirname(__FILE__), "../app/models", "*.rb")].each {|f| require f}
Dir[File.join(File.dirname(__FILE__), "../app/controllers", "*.rb")].sort.each {|f| require f}
#why sort??
| 28.647059 | 94 | 0.706366 |
edd94ead422ffe151435790d6b8559e992b757e1 | 1,140 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-pi/types'
require_relative 'aws-sdk-pi/client_api'
require_relative 'aws-sdk-pi/client'
require_relative 'aws-sdk-pi/errors'
require_relative 'aws-sdk-pi/resource'
require_relative 'aws-sdk-pi/customizations'
# This module provides support for AWS Performance Insights. This module is available in the
# `aws-sdk-pi` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from AWS Performance Insights all
# extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::PI::Errors::ServiceError
# # rescues all service API errors
# end
#
# See {Errors} for more information.
#
# @service
module Aws::PI
GEM_VERSION = '1.3.0'
end
| 23.75 | 92 | 0.735088 |
7a84615bd2b8b8bcd7f2b8a31aa53a61ba26f3ff | 2,835 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_08_01
module Models
#
# RouteTable route.
#
class HubRoute
include MsRestAzure
# @return [String] The name of the Route that is unique within a
# RouteTable. This name can be used to access this route.
attr_accessor :name
# @return [String] The type of destinations (eg: CIDR, ResourceId,
# Service).
attr_accessor :destination_type
# @return [Array<String>] List of all destinations.
attr_accessor :destinations
# @return [String] The type of next hop (eg: ResourceId).
attr_accessor :next_hop_type
# @return [String] NextHop resource ID.
attr_accessor :next_hop
#
# Mapper for HubRoute class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'HubRoute',
type: {
name: 'Composite',
class_name: 'HubRoute',
model_properties: {
name: {
client_side_validation: true,
required: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
destination_type: {
client_side_validation: true,
required: true,
serialized_name: 'destinationType',
type: {
name: 'String'
}
},
destinations: {
client_side_validation: true,
required: true,
serialized_name: 'destinations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
next_hop_type: {
client_side_validation: true,
required: true,
serialized_name: 'nextHopType',
type: {
name: 'String'
}
},
next_hop: {
client_side_validation: true,
required: true,
serialized_name: 'nextHop',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 28.069307 | 72 | 0.470899 |
1c967fe0a3468f038f28bff978fc5e183e80bff6 | 1,723 | require "rails_helper"
module Fieri
RSpec.describe JobsController, type: :controller do
routes { Fieri::Engine.routes }
describe "#create" do
let(:params) do
{
"fieri_key" => ENV["FIERI_KEY"],
"cookbook" =>
{
"name" => "apache2",
"version" => "1.2.0",
"artifact_url" => "http://example.com/apache.tar.gz",
},
}
end
let(:supermarket_api_runner) { SupermarketApiRunner.new }
let(:cookbook_json_response) { File.read("spec/support/cookbook_metrics_fixture.json") }
let(:version_json_response) { File.read("spec/support/cookbook_version_fixture.json") }
before do
allow(SupermarketApiRunner).to receive(:new).and_return(supermarket_api_runner)
allow(supermarket_api_runner).to receive(:cookbook_api_response).and_return(cookbook_json_response)
allow(supermarket_api_runner).to receive(:cookbook_version_api_response).and_return(version_json_response)
end
it "calls the MetricsRunner" do
expect(MetricsRunner).to receive(:perform_async).with(hash_including(params["cookbook"]))
post :create, params: params
end
context "authenticating submissions for cookbook evaluation" do
it "fails when the fieri_key is not present" do
post :create, params: params.except("fieri_key")
expect(response.status).to eq(400)
end
it "fails when the fieri_key does not match" do
expect(subject).to receive(:fieri_key).and_return("totally_not_the_fieri_key")
post :create, params: params
expect(response.status).to eq(401)
end
end
end
end
end
| 32.509434 | 114 | 0.648868 |
1a8e404a6d1e3ee0b1fa17e065e1739a3031eda1 | 329 | require 'spec/spec_helper'
describe "freezable xml reference", :shared => true do
describe "with :frozen option" do
it "should be frozen" do
@frozen.frozen?.should be_true
end
end
describe "without :frozen option" do
it "should not be frozen" do
@unfrozen.frozen?.should be_false
end
end
end | 21.933333 | 54 | 0.683891 |
79d30e8414ed1fb8e596c52d4c76b0afb96d2915 | 542 | # This is a traditional example of a callback in Ruby but it is not idiomatic.
def finished_homework(subject)
p "Finished my #{subject} homework."
end
def do_homework(subject, callback)
p "Starting my #{subject} homework."
callback.call(subject)
end
do_homework("Math", method(:finished_homework))
# To perform an idiomatic ruby callback, the block syntax is used
def do_homework(subject, &block)
p "Starting my #{subject} homework."
yield subject
end
do_homework("Math") do |subject|
p "Finished my #{subject} homework."
end
| 24.636364 | 78 | 0.745387 |
3371dd4a49b2fcde2a54c40428a4b8c4f6bea976 | 2,580 | require_relative '../../../lib/bookwatch/html_document_manipulator'
require_relative '../../helpers/use_fixture_repo'
module Bookwatch
describe HtmlDocumentManipulator do
def html_document_manipulator
HtmlDocumentManipulator.new
end
describe 'setting an attribute' do
it 'returns a copy of the document with attribute set to the specified value' do
document_body = '<div class="wrapper"><div class="selector"></div></div>'
selector = 'div.selector'
attribute = 'data-name'
new_value = 'this is a name'
expect(html_document_manipulator.set_attribute(document: document_body,
selector: selector,
attribute: attribute,
value: new_value)
).to eq '<div class="wrapper"><div class="selector" data-name="this is a name"></div></div>'
end
end
describe 'reading from a particular part of a file' do
it 'returns the desired content as a string' do
Dir.mktmpdir do |tmpdir|
filepath = File.join tmpdir, 'filename.txt'
File.write(filepath, '<body><header><p>this is some text</p></header></body>')
doc = File.read filepath
expect(html_document_manipulator.read_html_in_tag(document: doc,
tag: 'body')).
to eq '<header><p>this is some text</p></header>'
end
end
context 'when the files are multiline' do
use_fixture_repo('my-dita-output-repo')
it 'returns the correct selection' do
filepath = File.expand_path './output.html'
doc = File.read filepath
expect(html_document_manipulator.read_html_in_tag(document: doc,
tag: 'title')).
to eq 'GemFire XD\'s Features and Benefits ("Features")'
end
end
context 'when the file does not contain the marker' do
it 'returns an empty string' do
Dir.mktmpdir do |tmpdir|
filepath = File.join tmpdir, 'filename.txt'
File.write(filepath, '<head><body>this is some text</body></head>')
doc = File.read filepath
expect(html_document_manipulator.read_html_in_tag(document: doc,
tag: 'nonexistent')).
to eq ''
end
end
end
end
end
end
| 37.391304 | 100 | 0.550388 |
ffb40547cb970fb286ead756ab5143b3b8974a36 | 3,422 | # encoding: utf-8
module Rubocop
module Cop
module Style
# Common functionality for modifier cops.
module FavorModifier
# TODO extremely ugly solution that needs lots of polish
def check(sexp, comments)
case sexp.loc.keyword.source
when 'if' then cond, body, _else = *sexp
when 'unless' then cond, _else, body = *sexp
else cond, body = *sexp
end
if length(sexp) > 3
false
else
body_length = body_length(body)
if body_length == 0
false
else
indentation = sexp.loc.keyword.column
kw_length = sexp.loc.keyword.size
cond_length = cond.loc.expression.size
space = 1
total = indentation + body_length + space + kw_length + space +
cond_length
total <= LineLength.max && !body_has_comment?(body, comments)
end
end
end
def length(sexp)
sexp.loc.expression.source.lines.to_a.size
end
def body_length(body)
if body && body.loc.expression
body.loc.expression.size
else
0
end
end
def body_has_comment?(body, comments)
comment_lines = comments.map(&:location).map(&:line)
body_line = body.loc.expression.line
comment_lines.include?(body_line)
end
end
# Checks for if and unless statements that would fit on one line
# if written as a modifier if/unless.
class IfUnlessModifier < Cop
include FavorModifier
def error_message
'Favor modifier if/unless usage when you have a single-line body. ' +
'Another good alternative is the usage of control flow &&/||.'
end
def investigate(source_buffer, source, tokens, ast, comments)
return unless ast
on_node(:if, ast) do |node|
# discard ternary ops, if/else and modifier if/unless nodes
return if ternary_op?(node)
return if modifier_if?(node)
return if elsif?(node)
return if if_else?(node)
if check(node, comments)
add_offence(:convention, node.loc.expression, error_message)
end
end
end
def ternary_op?(node)
node.loc.respond_to?(:question)
end
def modifier_if?(node)
node.loc.end.nil?
end
def elsif?(node)
node.loc.keyword.is?('elsif')
end
def if_else?(node)
node.loc.respond_to?(:else) && node.loc.else
end
end
# Checks for while and until statements that would fit on one line
# if written as a modifier while/until.
class WhileUntilModifier < Cop
include FavorModifier
MSG =
'Favor modifier while/until usage when you have a single-line body.'
def investigate(source_buffer, source, tokens, ast, comments)
return unless ast
on_node([:while, :until], ast) do |node|
# discard modifier while/until
next unless node.loc.end
if check(node, comments)
add_offence(:convention, node.loc.expression, MSG)
end
end
end
end
end
end
end
| 28.756303 | 79 | 0.556108 |
088131bdfd3332e0a34f74f75af12096654039f6 | 268 | require 'bundler/setup'
require File.join(File.dirname(__FILE__), "..", "lib", "rebay")
Dir["./spec/support/**/*.rb"].sort.each {|f| require f}
RSpec.configure do |config|
config.mock_with :rspec
end
Rebay::Api.configure do |rebay|
rebay.app_id = 'default'
end
| 20.615385 | 63 | 0.686567 |
7953a04edb55e78dfaaf77a53f9f42b136a4b6ac | 413 | module Pageflow
module RailsVersion
extend self
RAILS_VERSION_FILE = File.expand_path('../../../../.rails_version')
def detect
from_env || from_file || '5.2.0'
end
private
def from_env
ENV['PAGEFLOW_RAILS_VERSION']
end
def from_file
if File.exists?(RAILS_VERSION_FILE)
File.read(RAILS_VERSION_FILE).chomp.strip.presence
end
end
end
end
| 17.208333 | 71 | 0.639225 |
21cba877e9b2f9add57900202aea97001a00dcc7 | 1,455 | # frozen_string_literal: true
# Load default formatter gem
require "pathname"
require_relative "default_formatter"
require_relative "profiles/root_filter"
require_relative "profiles/test_frameworks"
require_relative "profiles/bundler_filter"
require_relative "profiles/hidden_filter"
require_relative "profiles/rails"
# Default configuration
SimpleCov.configure do
formatter SimpleCov::Formatter::MultiFormatter.new(
SimpleCov::Formatter.from_env(ENV)
)
load_profile "bundler_filter"
load_profile "hidden_filter"
# Exclude files outside of SimpleCov.root
load_profile "root_filter"
end
# Gotta stash this a-s-a-p, see the CommandGuesser class and i.e. #110 for further info
SimpleCov::CommandGuesser.original_run_command = "#{$PROGRAM_NAME} #{ARGV.join(' ')}"
at_exit do
next if SimpleCov.external_at_exit?
SimpleCov.at_exit_behavior
end
# Autoload config from ~/.simplecov if present
require_relative "load_global_config"
# Autoload config from .simplecov if present
# Recurse upwards until we find .simplecov or reach the root directory
config_path = Pathname.new(SimpleCov.root)
loop do
filename = config_path.join(".simplecov")
if filename.exist?
begin
load filename
rescue LoadError, StandardError
warn "Warning: Error occurred while trying to load #{filename}. " \
"Error message: #{$!.message}"
end
break
end
config_path, = config_path.split
break if config_path.root?
end
| 26.944444 | 87 | 0.770447 |
1d6ee5b3e16774e25980ed6c8847977c44588d74 | 355 | # This provider is in development and not ready for production
Puppet::Type.type(:lmax_network_route).provide(:interfaces) do
defaultfor :operatingsystem => [:ubuntu, :debian]
# There are several ways to implement this
# We can add a post-up/pre-down rules that call ip route
# or we can scripts in /etc/network/if-up.d /etc/network/if-down.d
end
| 35.5 | 68 | 0.749296 |
26954d6261c26d324e0c5740c8ac6b479b2b1664 | 13,982 | # frozen_string_literal: true
require 'rails_helper'
describe CampaignsController do
render_views
describe '#index' do
it 'renders a 200' do
get :index
expect(response.status).to eq(200)
end
end
describe '#create' do
let(:user) { create(:user) }
let(:admin) { create(:admin) }
let(:title) { 'My New? Campaign 5!' }
let(:expected_slug) { 'my_new_campaign_5' }
let(:campaign_params) do
{ campaign: { title: title,
default_passcode: 'custom',
custom_default_passcode: 'ohai' } }
end
context 'when user is an admin' do
before do
allow(controller).to receive(:current_user).and_return(admin)
end
it 'creates new campaigns with custom passcodes' do
post :create, params: campaign_params
new_campaign = Campaign.last
expect(new_campaign.slug).to eq(expected_slug)
expect(new_campaign.default_passcode).to eq('ohai')
end
it 'creates a campaign user for the current user' do
post :create, params: campaign_params
expect(CampaignsUsers.last.user_id).to eq(admin.id)
end
it 'does not create duplicate titles' do
Campaign.create(title: title, slug: 'foo')
post :create, params: campaign_params
expect(Campaign.last.slug).to eq('foo')
end
it 'does not create duplicate slugs' do
Campaign.create(title: 'foo', slug: expected_slug)
post :create, params: campaign_params
expect(Campaign.last.title).to eq('foo')
end
end
context 'when user is not an admin and feature flag is off' do
before do
allow(controller).to receive(:current_user).and_return(user)
allow(Features).to receive(:open_course_creation?).and_return(false)
end
it 'returns a 401 and does not create a campaign' do
post :create, params: campaign_params
expect(response.status).to eq(401)
expect(Campaign.count).to eq(1)
end
end
end
describe '#update' do
let(:user) { create(:user) }
let(:admin) { create(:admin) }
let(:campaign) { create(:campaign) }
let(:description) { 'My new campaign is the best campaign ever!' }
let(:campaign_params) { { slug: campaign.slug, description: description } }
it 'returns a 401 if the user is not an admin and not an organizer of the campaign' do
allow(controller).to receive(:current_user).and_return(user)
delete :update, params: { campaign: campaign_params, slug: campaign.slug }
expect(response.status).to eq(401)
end
it 'updates the campaign if the user is an organizer of the campaign' do
create(:campaigns_user, user_id: user.id, campaign_id: campaign.id,
role: CampaignsUsers::Roles::ORGANIZER_ROLE)
allow(controller).to receive(:current_user).and_return(user)
post :update, params: { campaign: campaign_params, slug: campaign.slug }
expect(response.status).to eq(302) # redirect to /overview
expect(campaign.reload.description).to eq(description)
end
it 'updates the campaign if the user is an admin' do
allow(controller).to receive(:current_user).and_return(admin)
post :update, params: { campaign: campaign_params, slug: campaign.slug }
expect(response.status).to eq(302) # redirect to /overview
expect(campaign.reload.description).to eq(description)
end
end
describe '#destroy' do
let(:user) { create(:user) }
let(:admin) { create(:admin) }
let(:campaign) { create(:campaign) }
it 'returns a 401 if the user is not an admin and not an organizer of the campaign' do
allow(controller).to receive(:current_user).and_return(user)
delete :destroy, params: { slug: campaign.slug }
expect(response.status).to eq(401)
expect(Campaign.find_by(slug: campaign.slug)).not_to be_nil
end
it 'deletes the campaign if the user is a campaign organizer' do
create(:campaigns_user, user_id: user.id, campaign_id: campaign.id,
role: CampaignsUsers::Roles::ORGANIZER_ROLE)
allow(controller).to receive(:current_user).and_return(user)
delete :destroy, params: { slug: campaign.slug }
expect(response.status).to eq(302) # redirect to /campaigns
expect(Campaign.find_by(slug: campaign.slug)).to be_nil
end
end
describe '#add_organizer' do
let(:user) { create(:user) }
let(:admin) { create(:admin) }
let(:campaign) { create(:campaign) }
it 'returns a 401 if the user is not an admin and not an organizer of the campaign' do
allow(controller).to receive(:current_user).and_return(user)
put :add_organizer, params: { slug: campaign.slug, username: 'MusikAnimal' }
expect(response.status).to eq(401)
expect(Campaign.find_by(slug: campaign.slug)).not_to be_nil
end
it 'adds the given user as an organizer of the campaign '\
'if the current user is a campaign organizer' do
create(:campaigns_user, user_id: user.id, campaign_id: campaign.id,
role: CampaignsUsers::Roles::ORGANIZER_ROLE)
user2 = create(:user, username: 'MusikAnimal')
allow(controller).to receive(:current_user).and_return(user)
put :add_organizer, params: { slug: campaign.slug, username: user2.username }
expect(response.status).to eq(302) # redirect to /overview
expect(CampaignsUsers.last.user_id).to eq(user2.id)
end
end
describe '#remove_organizer' do
let(:user) { create(:user) }
let(:user2) { create(:user, username: 'user2') }
let(:campaign) { create(:campaign) }
let(:organizer) do
create(:campaigns_user, user_id: user2.id, campaign_id: campaign.id,
role: CampaignsUsers::Roles::ORGANIZER_ROLE)
end
it 'returns a 401 if the user is not an admin and not an organizer of the campaign' do
allow(controller).to receive(:current_user).and_return(user)
put :remove_organizer, params: { slug: campaign.slug, id: organizer.user_id }
expect(response.status).to eq(401)
expect(CampaignsUsers.find_by(id: organizer.id)).not_to be_nil
end
it 'removes the given organizer from the campaign '\
'if the current user is a campaign organizer' do
create(:campaigns_user, user_id: user.id, campaign_id: campaign.id,
role: CampaignsUsers::Roles::ORGANIZER_ROLE)
allow(controller).to receive(:current_user).and_return(user)
put :remove_organizer, params: { slug: campaign.slug, id: organizer.user_id }
expect(response.status).to eq(302) # redirect to /overview
expect(CampaignsUsers.find_by(id: organizer.id)).to be_nil
end
end
describe '#remove_course' do
let(:user) { create(:user) }
let(:campaign) { create(:campaign) }
let(:course) { create(:course) }
let!(:campaigns_course) do
create(:campaigns_course, campaign_id: campaign.id,
course_id: course.id)
end
it 'returns a 401 if the user is not an admin and not an organizer of the campaign' do
allow(controller).to receive(:current_user).and_return(user)
put :remove_course, params: { slug: campaign.slug, course_id: course.id }
expect(response.status).to eq(401)
expect(CampaignsCourses.find_by(id: campaigns_course.id)).not_to be_nil
end
it 'removes the course from the campaign if the current user is a campaign organizer' do
create(:campaigns_user, user_id: user.id, campaign_id: campaign.id,
role: CampaignsUsers::Roles::ORGANIZER_ROLE)
allow(controller).to receive(:current_user).and_return(user)
put :remove_course, params: { slug: campaign.slug, course_id: course.id }
expect(response.status).to eq(302) # redirect to /overview
expect(CampaignsCourses.find_by(id: campaigns_course.id)).to be_nil
end
end
describe '#students' do
let(:course) { create(:course) }
let(:campaign) { create(:campaign) }
let(:student) { create(:user) }
before do
campaign.courses << course
create(:courses_user, course_id: course.id, user_id: student.id,
role: CoursesUsers::Roles::STUDENT_ROLE)
end
context 'without "course" option' do
let(:request_params) { { slug: campaign.slug, format: :csv } }
it 'returns a csv of student usernames' do
get :students, params: request_params
expect(response.body).to have_content(student.username)
end
end
context 'with "course" option' do
let(:request_params) { { slug: campaign.slug, course: true, format: :csv } }
it 'returns a csv of student usernames with course slugs' do
get :students, params: request_params
expect(response.body).to have_content(student.username)
expect(response.body).to have_content(course.slug)
end
end
end
describe '#instructors' do
let(:course) { create(:course) }
let(:campaign) { create(:campaign) }
let(:instructor) { create(:user) }
before do
campaign.courses << course
create(:courses_user, course_id: course.id, user_id: instructor.id,
role: CoursesUsers::Roles::INSTRUCTOR_ROLE)
end
context 'without "course" option' do
let(:request_params) { { slug: campaign.slug, format: :csv } }
it 'returns a csv of instructor usernames' do
get :instructors, params: request_params
expect(response.body).to have_content(instructor.username)
end
end
context 'with "course" option' do
let(:request_params) { { slug: campaign.slug, course: true, format: :csv } }
it 'returns a csv of instructor usernames with course slugs' do
get :instructors, params: request_params
expect(response.body).to have_content(instructor.username)
expect(response.body).to have_content(course.slug)
end
end
end
describe '#courses' do
let(:course) { create(:course, user_count: 1) }
let(:campaign) { create(:campaign) }
let(:instructor) { create(:user) }
let(:request_params) { { slug: campaign.slug, format: :csv } }
before do
campaign.courses << course
create(:courses_user, course_id: course.id, user_id: instructor.id,
role: CoursesUsers::Roles::INSTRUCTOR_ROLE)
end
it 'returns a csv of course data' do
get :courses, params: request_params
expect(response.body).to have_content(course.slug)
expect(response.body).to have_content(course.title)
expect(response.body).to have_content(course.school)
end
end
describe '#articles_csv' do
let(:course) { create(:course) }
let(:campaign) { create(:campaign) }
let(:article) { create(:article) }
let(:user) { create(:user) }
let!(:revision) { create(:revision, article: article, user: user, date: course.start + 1.hour) }
let(:request_params) { { slug: campaign.slug, format: :csv } }
before do
campaign.courses << course
create(:courses_user, course: course, user: user)
end
it 'returns a csv of course data' do
get :articles_csv, params: request_params
expect(response.body).to have_content(course.slug)
expect(response.body).to have_content(article.title)
end
end
describe '#overview' do
render_views
let(:user) { create(:user) }
let(:campaign) { create(:campaign) }
before do
create(:campaigns_user, user_id: user.id, campaign_id: campaign.id,
role: CampaignsUsers::Roles::ORGANIZER_ROLE)
get :overview, params: { slug: campaign.slug }
end
it 'renders 200' do
expect(response.status).to eq(200)
end
it 'shows the right campaign' do
expect(response.body).to have_content(campaign.title)
end
it 'shows properties of the campaign' do
expect(response.body).to have_content(campaign.description)
end
end
describe '#programs' do
render_views
let(:course) { create(:course) }
let(:course2) { create(:course, title: 'course2', slug: 'foo/course2') }
let(:campaign) { create(:campaign) }
before do
campaign.courses << course << course2
get :programs, params: { slug: campaign.slug }
end
it 'renders 200' do
expect(response.status).to eq(200)
end
it 'shows the right campaign' do
expect(response.body).to have_content(campaign.title)
end
it 'lists the programs for the given campaign' do
expect(response.body).to have_content(course.title)
expect(response.body).to have_content(course2.title)
expect(response.body).to have_content(course.school)
expect(response.body).to have_content(course.term)
end
it 'shows a remove button for the programs if the user is an organizer or admin' do
# don't show it if they are not an organizer or admin
expect(response.body).not_to have_content(I18n.t('assignments.remove'))
# when they are an organizer...
user = create(:user)
create(:campaigns_user, user_id: user.id, campaign_id: campaign.id,
role: CampaignsUsers::Roles::ORGANIZER_ROLE)
allow(controller).to receive(:current_user).and_return(user)
get :programs, params: { slug: campaign.slug }
expect(response.body).to have_content(I18n.t('assignments.remove'))
# when they are an admin...
admin = create(:admin)
allow(controller).to receive(:current_user).and_return(admin)
get :programs, params: { slug: campaign.slug }
expect(response.body).to have_content(I18n.t('assignments.remove'))
end
it 'searches title, school, and term of campaign courses' do
get :programs, params: { slug: campaign.slug, courses_query: course.title }
expect(response.body).to have_content(course.title)
end
end
end
| 37.18617 | 100 | 0.658346 |
f8c1a0133f53d13a58c5e59890ca19d5949e0f06 | 504 | # frozen_string_literal: true
require 'vk/api/responses'
module Vk
module API
class Messages < Vk::Schema::Namespace
module Responses
# @see https://github.com/VKCOM/vk-api-schema/blob/master/objects.json
class SearchDialogsResponse < Vk::Schema::Response
# @return [Array] @see https://github.com/VKCOM/vk-api-schema/blob/master/objects.json
attribute :response, API::Types::Coercible::Array.optional.default(nil)
end
end
end
end
end
| 29.647059 | 96 | 0.676587 |
ffae3b528055b587af1b24386f1d5a4adc4dbef3 | 1,399 | require File.dirname(__FILE__) + '/helper'
describe Mercurial::Node do
before do
@repository = Mercurial::Repository.open(Fixtures.test_repo)
end
it "should be considered directory when ends with a slash" do
node = @repository.nodes.find('new-directory/subdirectory/', 'a8b39838302f')
node.file?.must_equal false
node.directory?.must_equal true
end
it "should be considered file when doesn't end with a slash" do
node = @repository.nodes.find('new-directory/something.csv', 'a8b39838302f')
node.file?.must_equal true
node.directory?.must_equal false
end
it "should show node contents" do
node = @repository.nodes.find('new-directory/something.csv', 'a8b39838302f')
node.contents.strip.must_equal 'Here will be CSV.'
node = @repository.nodes.find('new-directory/something.csv', '291a498f04e9')
node.contents.strip.must_equal 'Here will be some new kind of CSV.'
end
it "should fetch contents of a node with whitespace in it's name" do
node = @repository.nodes.find('File With Whitespace.pdf', '8ddac5f6380e')
node.contents.must_equal ''
end
if RUBY_VERSION >= '1.9.1'
it "should return name in UTF-8 encoding on Ruby 1.9.1 and higher" do
node = @repository.nodes.find('кодировки/виндоуз-cp1251-lf', 'fe021a290ba1')
node.name.encoding.to_s.downcase.must_equal 'utf-8'
end
end
end
| 33.309524 | 82 | 0.708363 |
f77a889cde253bd92b63f6ea46906060c06ef085 | 69 | # frozen_string_literal: true
module Whatup
VERSION = '0.3.5'
end
| 11.5 | 29 | 0.724638 |
1c15f9e779e6ea329b357e436f804ae5db39395a | 1,021 | ##
# Parse a non-source file. We basically take the whole thing as one big
# comment. If the first character in the file is '#', we strip leading pound
# signs.
class RDoc::Parser::Simple < RDoc::Parser
parse_files_matching(//)
attr_reader :content # :nodoc:
##
# Prepare to parse a plain file
def initialize(top_level, file_name, content, options, stats)
super
preprocess = RDoc::Markup::PreProcess.new @file_name, @options.rdoc_include
preprocess.handle @content, @top_level
end
##
# Extract the file contents and attach them to the TopLevel as a comment
def scan
comment = remove_coding_comment @content
comment = remove_private_comments comment
@top_level.comment = comment
@top_level.parser = self.class
@top_level
end
def remove_private_comments(comment)
comment.gsub(/^--\n.*?^\+\+/m, '').sub(/^--\n.*/m, '')
end
def remove_coding_comment text
text.sub(/\A# .*coding[=:].*$/, '')
end
end
| 22.688889 | 80 | 0.649363 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.