hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
f8ce1358e4b82ad1245dc778e38839ac41274f5f | 1,488 | # This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require 'rails'
require 'action_mailer'
require 'rails-mailgun'
RSpec.configure do |config|
# ## Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
# config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
# config.use_transactional_fixtures = true
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
# config.infer_base_class_for_anonymous_controllers = false
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = "random"
end
# configure action mailer with mailgun settings
mailgun_settings = {
api_host: "samples.mailgun.org",
api_key: "key-3ax6xnjp29jd6fds4gc373sgvjxteol0"
}
ActionMailer::Base.delivery_method = :mailgun
ActionMailer::Base.mailgun_settings = mailgun_settings
| 33.818182 | 79 | 0.74664 |
ac97a49119a2239daeb794fba65453e89eb69b0e | 272 | AssetSync.configure do |config|
config.fog_provider = 'AWS'
config.aws_access_key_id = ENV['AWS_ACCESS_KEY_ID_IAM']
config.aws_secret_access_key = ENV['AWS_SECRET_ACCESS_KEY_IAM']
config.fog_directory = 'portfoliofabrizio'
config.fog_region = 'eu-central-1'
end
| 34 | 65 | 0.786765 |
1a3866cf360f318cdcc548cb32a57de9561faf78 | 2,692 | class Customer::OrdersController < Customer::CustomerApplicationController
include ActionView::Helpers::NumberHelper
before_action :authenticate_customer!, only: [:index, :new, :create]
after_action :inc_total, only: [:create, :update]
before_action :set_order, only: [:edit, :update, :destroy]
before_action :dec_total, only: [:destroy]
add_breadcrumb I18n.t('dock.dashboard'), :customer_orders_path
def index
@search = current_customer.orders.order(id: :desc).search(params[:q])
@orders = @search.result(distinct: true).paginate(page: params[:page])
respond_with(@orders)
end
def new
@order = Order.new
end
def create
@customer = current_customer
@order = @customer.orders.new
if @order.update(order_params)
@order.quantity = ((@order.width * @order.height * @order.count ) / 1000000.0).round(2)
@order.date = Date.today
@order.state = 'Siparişiniz ulaştı'
@order.amount = ( @order.quantity * @order.products.first.price * 1.18 ).round(2)
if @order.customer.total == nil
@order.customer.total = @order.amount.round
end
@order.save
flash[:success] = 'Sipariş başarılı bir şekilde kaydedilmiştir'
redirect_to new_customer_order_path
else
render 'new'
end
end
def edit
end
def update
@order.customer.total = @order.customer.total - @order.amount
@order.customer.save
if @order.update(order_params)
@order.quantity = ((@order.width * @order.height * @order.count ) / 1000000.0).round(2)
@order.date = Date.today
@order.amount = ( @order.quantity * @order.products.first.price * 1.18 ).round(2)
@order.save
flash[:success] = 'Sipariş başarılı bir şekilde güncellendi'
if customer_signed_in?
redirect_to customer_orders_path
else
redirect_to user_orders_path(@order.customer)
end
else
render 'edit'
end
end
def destroy
customer = @order.customer
@order.destroy
flash[:success] = 'Sipariş başarılı bir şekilde silindi'
if customer_signed_in?
redirect_to customer_orders_path
else
redirect_to user_orders_path(customer)
end
end
private
def set_order
@order = Order.find(params[:id])
end
def order_params
params.require(:order).permit(:width, :height, :count, :state, :deadline, :info, :product_ids)
end
def inc_total
@order.customer.total = @order.customer.total + @order.amount if @order.customer.orders.count != 1
@order.customer.save
end
def dec_total
@order.customer.total = @order.customer.total - @order.amount.to_i
@order.customer.save
end
end | 29.582418 | 104 | 0.670877 |
f80c3a914751daaeaddd525c3c4238423d59981d | 3,550 | module Commands
module Brewx
extend CommandHelpers
class << self
def init
require "yaml/store"
require_relative "../lib/cri_command_support"
extend CriCommandSupport
end
def run(args)
root_cmd = build_root_cmd
root_cmd.run(args, {}, hard_exit: false)
end
private
def config
@config ||= YAML::Store.new(File.expand_path("~/.brewx.yml"))
end
def brew_leaves
`brew leaves`.split("\n").map(&:strip)
end
def requested_cmd
cmd = define_cmd("requested") { |_opts, _args, cmd|
cmd.run(["list"])
}
list_cmd = define_cmd("list") {
config.transaction(true) do
puts "Requested packages:"
config.fetch(:requested, []).each do |pkg|
puts "- #{pkg}"
end
end
}
add_cmd = define_cmd("add") { |_opts, args, _cmd|
config.transaction do
config[:requested] ||= []
config[:requested] |= args
config[:requested].sort!
puts "Requested packages:"
config.fetch(:requested, []).each do |pkg|
puts "- #{pkg}"
end
end
}
rm_cmd = define_cmd("rm") { |_opts, args, _cmd|
config.transaction do
config[:requested] ||= []
config[:requested] -= args
config[:requested].sort!
puts "Requested packages:"
config.fetch(:requested, []).each do |pkg|
puts "- #{pkg}"
end
end
}
cmd.add_command(list_cmd)
cmd.add_command(add_cmd)
cmd.add_command(rm_cmd)
cmd
end
def orphans_cmd
define_cmd("orphans") do
requested = config.transaction(true) { config.fetch(:requested, []) }
orphans = brew_leaves - requested
p orphans
end
end
def uninstall_cmd
define_cmd("uninstall", summary: "Uninstall a package, listing any new leaves created") do |_opts, args, _cmd|
leaves = brew_leaves
p leaves
p args
system ["brew uninstall", args].join(" ")
after_leaves = brew_leaves
new_leaves = after_leaves - leaves
p new_leaves
end
end
def update_interactive_cmd
define_cmd("update_interactive") do
prompt = TTY::Prompt.new
list = JSON.parse(`brew outdated --json`)
result = prompt.multi_select("Update packages?", per_page: 99) { |menu|
list.each do |item|
label = [
"#{item["name"]} (#{item["installed_versions"].join(", ")} -> #{item["current_version"]})",
item["pinned"] ? "[pinned: #{item["pinned_version"]}]" : nil,
].compact.join(" ")
menu.choice label, item["name"], disabled: item["pinned"]
end
}
if result.length > 0
system "brew upgrade #{result.join(" ")}"
else
puts "nothing selected!"
end
rescue TTY::Reader::InputInterrupt
puts "Aborted!"
end
end
def build_root_cmd
root_cmd = define_cmd("brewx", summary: "Brew Extensions", help: true)
root_cmd.add_command(uninstall_cmd)
root_cmd.add_command(requested_cmd)
root_cmd.add_command(orphans_cmd)
root_cmd.add_command(update_interactive_cmd)
end
end
end
end
| 26.691729 | 118 | 0.527887 |
e202d8df62b9f93c6b25a0acc0734a0d80fded01 | 669 | Pod::Spec.new do |s|
s.name = 'CoreTextLabel'
s.version = '1.3.2'
s.summary = "Label to draw NSAttributedString or HTML with custom font and color. textIsTruncated, numberOfLines and lineSpacing is supported."
s.homepage = "https://github.com/appfarms/CoreTextLabel"
s.author = { 'Daniel Kuhnke' => '[email protected]' }
s.source = { :git => 'https://github.com/appfarms/CoreTextLabel.git', :tag => '1.3.2' }
s.platform = :ios
s.requires_arc = true
s.source_files = '*.{h,m}'
s.license = 'MIT'
s.ios.frameworks = 'QuartzCore', 'CoreText'
s.dependency 'RegexKitLite', '~>4.0'
end | 47.785714 | 152 | 0.612855 |
6a879e280c3edb14e0a48cf129c123ed87ab93df | 23,074 | # -*- encoding: utf-8 -*-
# This file generated automatically using vocab-fetch from etc/xsd.ttl
require 'rdf'
module RDF
class XSD < RDF::Vocabulary("http://www.w3.org/2001/XMLSchema#")
# Datatype definitions
term :ENTITIES,
comment: %(
ENTITIES represents the ENTITIES attribute type from [XML]. The ·value
space· of ENTITIES is the set of finite, non-zero-length sequences of
·ENTITY· values that have been declared as unparsed entities in a document
type definition. The ·lexical space· of ENTITIES is the set of
space-separated lists of tokens, of which each token is in the ·lexical
space· of ENTITY. The ·item type· of ENTITIES is ENTITY. ENTITIES is
derived from ·anySimpleType· in two steps: an anonymous list type is
defined, whose ·item type· is ENTITY; this is the ·base type· of ENTITIES,
which restricts its value space to lists with at least one item.
).freeze,
label: "ENTITIES".freeze,
subClassOf: "xsd:anySimpleType".freeze,
type: "rdfs:Datatype".freeze
term :ENTITY,
comment: %(
ENTITY represents the ENTITY attribute type from [XML]. The ·value space·
of ENTITY is the set of all strings that ·match· the NCName production in
[Namespaces in XML] and have been declared as an unparsed entity in a
document type definition. The ·lexical space· of ENTITY is the set of all
strings that ·match· the NCName production in [Namespaces in XML]. The
·base type· of ENTITY is NCName.
).freeze,
label: "ENTITY".freeze,
subClassOf: "xsd:NCName".freeze,
type: "rdfs:Datatype".freeze
term :ID,
comment: %(
ID represents the ID attribute type from [XML]. The ·value space· of ID is
the set of all strings that ·match· the NCName production in [Namespaces
in XML]. The ·lexical space· of ID is the set of all strings that ·match·
the NCName production in [Namespaces in XML]. The ·base type· of ID is
NCName.
).freeze,
label: "ID".freeze,
subClassOf: "xsd:NCName".freeze,
type: "rdfs:Datatype".freeze
term :IDREF,
comment: %(
IDREF represents the IDREF attribute type from [XML]. The ·value space· of
IDREF is the set of all strings that ·match· the NCName production in
[Namespaces in XML]. The ·lexical space· of IDREF is the set of strings
that ·match· the NCName production in [Namespaces in XML]. The ·base type·
of IDREF is NCName.
).freeze,
label: "IDREF".freeze,
subClassOf: "xsd:NCName".freeze,
type: "rdfs:Datatype".freeze
term :IDREFS,
comment: %(
IDREFS represents the IDREFS attribute type from [XML]. The ·value space·
of IDREFS is the set of finite, non-zero-length sequences of IDREFs. The
·lexical space· of IDREFS is the set of space-separated lists of tokens, of
which each token is in the ·lexical space· of IDREF. The ·item type· of
IDREFS is IDREF. IDREFS is derived from ·anySimpleType· in two steps: an
anonymous list type is defined, whose ·item type· is IDREF; this is the
·base type· of IDREFS, which restricts its value space to lists with at
least one item.
).freeze,
label: "IDREFS".freeze,
subClassOf: "xsd:anySimpleType".freeze,
type: "rdfs:Datatype".freeze
term :NCName,
comment: %(
NCName represents XML "non-colonized" Names. The ·value space· of NCName
is the set of all strings which ·match· the NCName production of
[Namespaces in XML]. The ·lexical space· of NCName is the set of all
strings which ·match· the NCName production of [Namespaces in XML]. The
·base type· of NCName is Name.
).freeze,
label: "NCName".freeze,
subClassOf: "xsd:Name".freeze,
type: "rdfs:Datatype".freeze
term :NMTOKEN,
comment: %(
NMTOKEN represents the NMTOKEN attribute type from [XML]. The ·value
space· of NMTOKEN is the set of tokens that ·match· the Nmtoken production
in [XML]. The ·lexical space· of NMTOKEN is the set of strings that
·match· the Nmtoken production in [XML]. The ·base type· of NMTOKEN is
token.
).freeze,
label: "NMTOKEN".freeze,
subClassOf: "xsd:token".freeze,
type: "rdfs:Datatype".freeze
term :NMTOKENS,
comment: %(
NMTOKENS represents the NMTOKENS attribute type from [XML]. The ·value
space· of NMTOKENS is the set of finite, non-zero-length sequences of
·NMTOKEN·s. The ·lexical space· of NMTOKENS is the set of space-separated
lists of tokens, of which each token is in the ·lexical space· of NMTOKEN.
The ·item type· of NMTOKENS is NMTOKEN. NMTOKENS is derived from
·anySimpleType· in two steps: an anonymous list type is defined, whose
·item type· is NMTOKEN; this is the ·base type· of NMTOKENS, which
restricts its value space to lists with at least one item.
).freeze,
label: "NMTOKENS".freeze,
subClassOf: "xsd:anySimpleType".freeze,
type: "rdfs:Datatype".freeze
term :NOTATION,
comment: %(
NOTATION represents the NOTATION attribute type from [XML]. The ·value
space· of NOTATION is the set of QNames of notations declared in the
current schema. The ·lexical space· of NOTATION is the set of all names of
notations declared in the current schema \(in the form of QNames\).
).freeze,
label: "NOTATION".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :Name,
comment: %(
Name represents XML Names. The ·value space· of Name is the set of all
strings which ·match· the Name production of [XML]. The ·lexical space· of
Name is the set of all strings which ·match· the Name production of [XML].
The ·base type· of Name is token.
).freeze,
label: "Name".freeze,
subClassOf: "xsd:token".freeze,
type: "rdfs:Datatype".freeze
term :QName,
comment: %(
QName represents XML qualified names. The ·value space· of QName is the set
of tuples {namespace name, local part}, where namespace name is an anyURI
and local part is an NCName. The ·lexical space· of QName is the set of
strings that ·match· the QName production of [Namespaces in XML].
).freeze,
label: "QName".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :anyAtomicType,
comment: %(
anyAtomicType is a special ·restriction· of anySimpleType. The ·value· and
·lexical spaces· of anyAtomicType are the unions of the ·value· and
·lexical spaces· of all the ·primitive· datatypes, and anyAtomicType is
their ·base type·.
).freeze,
label: "anySimpleType".freeze,
subClassOf: "xsd:anyType".freeze,
type: "rdfs:Datatype".freeze
term :anySimpleType,
comment: %(
The definition of anySimpleType is a special ·restriction· of anyType. The
·lexical space· of anySimpleType is the set of all sequences of Unicode
characters, and its ·value space· includes all ·atomic values· and all
finite-length lists of zero or more ·atomic values·.
).freeze,
label: "anySimpleType".freeze,
subClassOf: "xsd:anyType".freeze,
type: "rdfs:Datatype".freeze
term :anyType,
comment: %(
The root of the [XML Schema 1.1] datatype heirarchy.
).freeze,
label: "anyType".freeze,
type: "rdfs:Datatype".freeze
term :anyURI,
comment: %(
anyURI represents an Internationalized Resource Identifier Reference
\(IRI\). An anyURI value can be absolute or relative, and may have an
optional fragment identifier \(i.e., it may be an IRI Reference\). This
type should be used when the value fulfills the role of an IRI, as
defined in [RFC 3987] or its successor\(s\) in the IETF Standards Track.
).freeze,
label: "anyURI".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :base64Binary,
comment: %(
base64Binary represents arbitrary Base64-encoded binary data. For
base64Binary data the entire binary stream is encoded using the Base64
Encoding defined in [RFC 3548], which is derived from the encoding
described in [RFC 2045].
).freeze,
label: "base64Binary".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :boolean,
comment: %(
boolean represents the values of two-valued logic.
).freeze,
label: "base64Binary".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :byte,
comment: %(
byte is ·derived· from short by setting the value of ·maxInclusive· to be
127 and ·minInclusive· to be -128. The ·base type· of byte is short.
).freeze,
label: "byte".freeze,
subClassOf: "xsd:short".freeze,
type: "rdfs:Datatype".freeze
term :date,
comment: %(
date represents top-open intervals of exactly one day in length on the
timelines of dateTime, beginning on the beginning moment of each day, up to
but not including the beginning moment of the next day\). For non-timezoned
values, the top-open intervals disjointly cover the non-timezoned timeline,
one per day. For timezoned values, the intervals begin at every minute and
therefore overlap.
).freeze,
label: "date".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :dateTime,
comment: %(
dateTime represents instants of time, optionally marked with a particular
time zone offset. Values representing the same instant but having different
time zone offsets are equal but not identical.
).freeze,
label: "dateTime".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :dateTimeStamp,
comment: %(
The dateTimeStamp datatype is ·derived· from dateTime by giving the value
required to its explicitTimezone facet. The result is that all values of
dateTimeStamp are required to have explicit time zone offsets and the
datatype is totally ordered.
).freeze,
label: "dateTimeStamp".freeze,
subClassOf: "xsd:dateTime".freeze,
type: "rdfs:Datatype".freeze
term :dayTimeDuration,
comment: %(
dayTimeDuration is a datatype ·derived· from duration by restricting its
·lexical representations· to instances of dayTimeDurationLexicalRep. The
·value space· of dayTimeDuration is therefore that of duration restricted
to those whose ·months· property is 0. This results in a duration datatype
which is totally ordered.
).freeze,
label: "dayTimeDuration".freeze,
subClassOf: "xsd:duration".freeze,
type: "rdfs:Datatype".freeze
term :decimal,
comment: %(
decimal represents a subset of the real numbers, which can be represented
by decimal numerals. The ·value space· of decimal is the set of numbers
that can be obtained by dividing an integer by a non-negative power of ten,
i.e., expressible as i / 10n where i and n are integers and n ≥ 0.
Precision is not reflected in this value space; the number 2.0 is not
distinct from the number 2.00. The order relation on decimal is the order
relation on real numbers, restricted to this subset.
).freeze,
label: "decimal".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :double,
comment: %(
The double datatype is patterned after the IEEE double-precision 64-bit
floating point datatype [IEEE 754-2008]. Each floating point datatype has a
value space that is a subset of the rational numbers. Floating point
numbers are often used to approximate arbitrary real numbers.
).freeze,
label: "double".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :duration,
comment: %(
duration is a datatype that represents durations of time. The concept of
duration being captured is drawn from those of [ISO 8601], specifically
durations without fixed endpoints. For example, "15 days" \(whose most
common lexical representation in duration is "'P15D'"\) is a duration value;
"15 days beginning 12 July 1995" and "15 days ending 12 July 1995" are not
duration values. duration can provide addition and subtraction operations
between duration values and between duration/dateTime value pairs, and can
be the result of subtracting dateTime values. However, only addition to
dateTime is required for XML Schema processing and is defined in the
function ·dateTimePlusDuration·.
).freeze,
label: "duration".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :float,
comment: %(
The float datatype is patterned after the IEEE single-precision 32-bit
floating point datatype [IEEE 754-2008]. Its value space is a subset of the
rational numbers. Floating point numbers are often used to approximate
arbitrary real numbers.
).freeze,
label: "float".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :gDay,
comment: %(
gDay represents whole days within an arbitrary month—days that recur at the
same point in each \(Gregorian\) month. This datatype is used to represent a
specific day of the month. To indicate, for example, that an employee gets
a paycheck on the 15th of each month. \(Obviously, days beyond 28 cannot
occur in all months; they are nonetheless permitted, up to 31.\)
).freeze,
label: "gDay".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :gMonth,
comment: %(
gMonth represents whole \(Gregorian\) months within an arbitrary year—months
that recur at the same point in each year. It might be used, for example,
to say what month annual Thanksgiving celebrations fall in different
countries \(--11 in the United States, --10 in Canada, and possibly other
months in other countries\).
).freeze,
label: "gMonth".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :gMonthDay,
comment: %(
gMonthDay represents whole calendar days that recur at the same point in
each calendar year, or that occur in some arbitrary calendar year.
\(Obviously, days beyond 28 cannot occur in all Februaries; 29 is
nonetheless permitted.\)
).freeze,
label: "gMonthDay".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :gYear,
comment: %(
gYear represents Gregorian calendar years.
).freeze,
label: "gYear".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :gYearMonth,
comment: %(
gYearMonth represents specific whole Gregorian months in specific Gregorian years.
).freeze,
label: "gYearMonth".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :hexBinary,
comment: %(
hexBinary represents arbitrary hex-encoded binary data.
).freeze,
label: "hexBinary".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :int,
comment: %(
int is ·derived· from long by setting the value of ·maxInclusive· to be
2147483647 and ·minInclusive· to be -2147483648. The ·base type· of int
is long.
).freeze,
label: "int".freeze,
subClassOf: "xsd:long".freeze,
type: "rdfs:Datatype".freeze
term :integer,
comment: %(
integer is ·derived· from decimal by fixing the value of ·fractionDigits·
to be 0 and disallowing the trailing decimal point. This results in the
standard mathematical concept of the integer numbers. The ·value space· of
integer is the infinite set {...,-2,-1,0,1,2,...}. The ·base type· of
integer is decimal.
).freeze,
label: "integer".freeze,
subClassOf: "xsd:decimal".freeze,
type: "rdfs:Datatype".freeze
term :language,
comment: %(
language represents formal natural language identifiers, as defined by [BCP
47] \(currently represented by [RFC 4646] and [RFC 4647]\) or its
successor\(s\). The ·value space· and ·lexical space· of language are the set
of all strings that conform to the pattern [a-zA-Z]{1,8}\(-[a-zA-Z0-9]{1,8}\)*
).freeze,
label: "language".freeze,
subClassOf: "xsd:token".freeze,
type: "rdfs:Datatype".freeze
term :long,
comment: %(
long is ·derived· from integer by setting the value of ·maxInclusive· to
be 9223372036854775807 and ·minInclusive· to be -9223372036854775808. The
·base type· of long is integer.
).freeze,
label: "long".freeze,
subClassOf: "xsd:integer".freeze,
type: "rdfs:Datatype".freeze
term :negativeInteger,
comment: %(
negativeInteger is ·derived· from nonPositiveInteger by setting the value
of ·maxInclusive· to be -1. This results in the standard mathematical
concept of the negative integers. The ·value space· of negativeInteger is
the infinite set {...,-2,-1}. The ·base type· of negativeInteger is
nonPositiveInteger.
).freeze,
label: "negativeInteger".freeze,
subClassOf: "xsd:nonPositiveInteger".freeze,
type: "rdfs:Datatype".freeze
term :nonNegativeInteger,
comment: %(
nonNegativeInteger is ·derived· from integer by setting the value of
·minInclusive· to be 0. This results in the standard mathematical concept
of the non-negative integers. The ·value space· of nonNegativeInteger is
the infinite set {0,1,2,...}. The ·base type· of nonNegativeInteger is
integer.
).freeze,
label: "nonNegativeInteger".freeze,
subClassOf: "xsd:integer".freeze,
type: "rdfs:Datatype".freeze
term :nonPositiveInteger,
comment: %(
nonPositiveInteger is ·derived· from integer by setting the value of
·maxInclusive· to be 0. This results in the standard mathematical concept
of the non-positive integers. The ·value space· of nonPositiveInteger is
the infinite set {...,-2,-1,0}. The ·base type· of nonPositiveInteger is
integer.
).freeze,
label: "nonPositiveInteger".freeze,
subClassOf: "xsd:integer".freeze,
type: "rdfs:Datatype".freeze
term :normalizedString,
comment: %(
normalizedString represents white space normalized strings. The ·value
space· of normalizedString is the set of strings that do not contain the
carriage return \(#xD\), line feed \(#xA\) nor tab \(#x9\) characters. The
·lexical space· of normalizedString is the set of strings that do not
contain the carriage return \(#xD\), line feed \(#xA\) nor tab \(#x9\)
characters. The ·base type· of normalizedString is string.
).freeze,
label: "normalizedString".freeze,
subClassOf: "xsd:string".freeze,
type: "rdfs:Datatype".freeze
term :positiveInteger,
comment: %(
positiveInteger is ·derived· from nonNegativeInteger by setting the value
of ·minInclusive· to be 1. This results in the standard mathematical
concept of the positive integer numbers. The ·value space· of
positiveInteger is the infinite set {1,2,...}. The ·base type· of
positiveInteger is nonNegativeInteger.
).freeze,
label: "positiveInteger".freeze,
subClassOf: "xsd:nonNegativeInteger".freeze,
type: "rdfs:Datatype".freeze
term :short,
comment: %(
short is ·derived· from int by setting the value of ·maxInclusive· to be
32767 and ·minInclusive· to be -32768. The ·base type· of short is int.
).freeze,
label: "short".freeze,
subClassOf: "xsd:int".freeze,
type: "rdfs:Datatype".freeze
term :string,
comment: %(
The string datatype represents character strings in XML.
).freeze,
label: "string".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :time,
comment: %(
time represents instants of time that recur at the same point in each
calendar day, or that occur in some arbitrary calendar day.
).freeze,
label: "time".freeze,
subClassOf: "xsd:anyAtomicType".freeze,
type: "rdfs:Datatype".freeze
term :token,
comment: %(
token represents tokenized strings. The ·value space· of token is the set
of strings that do not contain the carriage return \(#xD\), line feed \(#xA\)
nor tab \(#x9\) characters, that have no leading or trailing spaces \(#x20\)
and that have no internal sequences of two or more spaces. The ·lexical
space· of token is the set of strings that do not contain the carriage
return \(#xD\), line feed \(#xA\) nor tab \(#x9\) characters, that have no
leading or trailing spaces \(#x20\) and that have no internal sequences of
two or more spaces. The ·base type· of token is normalizedString.
).freeze,
label: "token".freeze,
subClassOf: "xsd:normalizedString".freeze,
type: "rdfs:Datatype".freeze
term :unsignedByte,
comment: %(
nsignedByte is ·derived· from unsignedShort by setting the value of
·maxInclusive· to be 255. The ·base type· of unsignedByte is
unsignedShort.
).freeze,
label: "unsignedByte".freeze,
subClassOf: "xsd:unsignedShort".freeze,
type: "rdfs:Datatype".freeze
term :unsignedInt,
comment: %(
unsignedInt is ·derived· from unsignedLong by setting the value of
·maxInclusive· to be 4294967295. The ·base type· of unsignedInt is
unsignedLong.
).freeze,
label: "unsignedInt".freeze,
subClassOf: "xsd:unsignedLong".freeze,
type: "rdfs:Datatype".freeze
term :unsignedLong,
comment: %(
unsignedLong is ·derived· from nonNegativeInteger by setting the value of
·maxInclusive· to be 18446744073709551615. The ·base type· of unsignedLong
is nonNegativeInteger.
).freeze,
label: "unsignedLong".freeze,
subClassOf: "xsd:nonNegativeInteger".freeze,
type: "rdfs:Datatype".freeze
term :unsignedShort,
comment: %(
unsignedShort is ·derived· from unsignedInt by setting the value of
·maxInclusive· to be 65535. The ·base type· of unsignedShort is
unsignedInt.
).freeze,
label: "unsignedShort".freeze,
subClassOf: "xsd:unsignedInt".freeze,
type: "rdfs:Datatype".freeze
term :yearMonthDuration,
comment: %(
yearMonthDuration is a datatype ·derived· from duration by restricting its
·lexical representations· to instances of yearMonthDurationLexicalRep. The
·value space· of yearMonthDuration is therefore that of duration
restricted to those whose ·seconds· property is 0. This results in a
duration datatype which is totally ordered.
).freeze,
label: "yearMonthDuration".freeze,
subClassOf: "xsd:duration".freeze,
type: "rdfs:Datatype".freeze
end
end
| 44.034351 | 86 | 0.68653 |
d597e0817c0d5ed652ba005c3470bd4023580996 | 2,787 | require './test/test_helper'
require 'backports/2.3.0/queue/close'
require 'backports/ractor/filtered_queue'
class FilteredQueueTest < Test::Unit::TestCase
def setup
@q = ::Backports::FilteredQueue.new
end
def assert_remains(*values)
remain = []
remain << @q.pop until @q.empty?
assert_equal values, remain
end
def test_basic
@q << 1 << 2
x = []
x << @q.pop
@q << 3
x << @q.pop
x << @q.pop
t = Thread.new { sleep(0.1); @q << 4 << 5}
x << @q.pop
t.join
assert_equal([1,2,3,4], x)
assert_remains(5)
end
def test_close
Thread.new { sleep(0.2); @q.close }
assert_raise(::Backports::FilteredQueue::ClosedQueueError) { @q.pop }
assert_raise(::Backports::FilteredQueue::ClosedQueueError) { @q.pop }
assert_raise(::Backports::FilteredQueue::ClosedQueueError) { @q.pop(timeout: 0) }
assert_raise(::Backports::FilteredQueue::ClosedQueueError) { @q << 42 }
end
def test_close_after
@q << 1 << 2
@q.close
assert_equal(1, @q.pop)
assert_equal(2, @q.pop)
assert_raise(::Backports::FilteredQueue::ClosedQueueError) { @q.pop }
end
def test_timeout
assert_raise(::Backports::FilteredQueue::TimeoutError) { @q.pop(timeout: 0) }
Thread.new { sleep(0.2); @q << :done }
assert_raise(::Backports::FilteredQueue::TimeoutError) { @q.pop(timeout: 0.1) }
assert_equal(:done, @q.pop(timeout: 0.2))
end
def assert_eventually_equal(value)
100.times do
return true if value == yield
sleep(0.01)
end
assert value == yield
end
def test_filter
# send 0 to 7 to queue, with filters for 0 to 2.
# start queue with `before` elements already present
(0..4).each do |before|
other = [4,5,6,7]
calls = 0
before.times { @q << other.shift }
t = 3.times.map do |i|
Thread.new do
@q.pop { |n| calls += 1; Thread.pass; n == i }
end
end
Thread.pass
other.each { |i| @q << i }
assert_eventually_equal(3) {@q.num_waiting}
assert_eventually_equal(3 * 4) {calls}
@q << :extra
assert_eventually_equal(3 * 5) {calls}
@q << 0 << 1 << 2 << 3
t.each(&:join)
assert_equal 0, @q.num_waiting
assert_remains 4, 5, 6, 7, :extra, 3
end
end
def test_non_standard_filters
@q << 1 << 2 << 3
@q.pop { break }
@q.pop { raise 'err' } rescue nil
assert_remains 3
end
def test_recursive_filter
[
[0, :other],
[:other, 0],
].each do |a, b|
@q << :first << a << b << :last
inner = nil
a = @q.pop do
inner = @q.pop { |x| x == 0}
false # => ignored
end
assert_equal :first, a
assert_equal 0, inner
assert_remains :other, :last
end
end
end
| 25.336364 | 85 | 0.584141 |
f7a0fafee69c15f9a7f2d0c62920fc1dfe9d3361 | 2,469 |
require 'Bacon_Colored'
require 'uga_uga'
require 'pry'
require 'awesome_print'
module Bacon
class Context
def clean o
return o.strip if o.is_a?(String)
return o unless o.is_a?(Array) || o.is_a?(Uga_Uga)
return clean(o.stack) if o.respond_to?(:stack)
o.inject([]) do |memo, hash|
memo << hash[:type] unless hash[:type] == String
memo << clean(hash[:output])
memo
end
end
def uga *args
SAMPLE.run *args
end
end # === class Context
end # === module Bacon
SAMPLE = Uga_Uga.new do
skip if white?
case
when rex?("(white*)(...) { ") # === multi-line css
close = captures.first
shift
final = {
:type => :css,
:selectors => captures.last,
:raw => grab_until(/\A#{close}\}\ *\Z/)
}
when rex?(" (!{) {(...)} ") # === css one-liner
selectors , content = captures
shift
final = {
:type => :css,
:selectors => selectors,
:raw => [content],
:one_liner => true
}
when rex?(" String (word) ") # === Multi-line String
close = captures.first
shift
final = {
:type => String,
:output => grab_until(/\A\ *#{Regexp.escape close}\ *\Z/).join("\n")
}
when rex?(" color (word) ")
# l![/\A\ *color\ *([a-zA-Z0-9\-\_\#]+)\ *\Z/]
val = captures.first
final = {
:type => :css_property,
:name => :color,
:output=> val
}
when rex?(" id (word) ")
# l![/\A\ *id\ *([a-zA-Z0-9\-\_\#]+)\ *\Z/]
val = captures.first
final = {
:type => :id!,
:output=> val
}
else
tag = first('.')
if tail?("/#{tag}")
{:type=>:html, :tag=>tag.to_sym, :one_liner=>true, :output => shift}
else
tag = tag.to_sym
case tag
when :p, :a
line = shift
bracket = bracket(line, "/#{tag}")
final = {:type=>:html, :tag=>tag, :selector => line, :raw=>grab_until(bracket)}
final
else
if stack.empty?
{:type=>String, :raw=>grab_all}
elsif rex?(" (_) ", /\-{4,}/) # Start of multi-line string --------------------
eos = l!.rstrip
shift
{:type=>String, :raw=>grab_all_or_until(/\A#{eos}\ *|Z/)}
else
fail ArgumentError, "Unknown command: #{line_num}: #{l!.inspect}"
end
end
end
end # === case
end # === .new
| 22.651376 | 91 | 0.480356 |
f818d368204f38fd00a05f634080a348a3f0060e | 294 | class RemoveDuplicatesDeadlineTypes < ActiveRecord::Migration
def up
execute <<-SQL
update due_dates set deadline_type_id=7 where deadline_type_id=9
SQL
execute <<-SQL
delete from deadline_types where id=9
SQL
end
def down
end
end
| 15.473684 | 75 | 0.64966 |
1c6ee0bad34dff687b4981bd5f7824ca68f36e51 | 3,051 | require 'spec_helper'
describe Locomotive::Concerns::Site::AccessPoints do
let(:domains) { [] }
let(:site) { build(:site, domains: domains) }
describe '#domains=' do
it 'downcases the domains' do
site.domains = ['FIRST.com', 'second.com', 'THIRD.com']
expect(site.domains).to eq(['first.com', 'second.com', 'third.com'])
end
end
describe '#valid?' do
subject { site.valid? }
it { is_expected.to eq true }
describe 'forbidden domains defined' do
before { allow(Locomotive.config).to receive(:reserved_domains).and_return(['www.locomotiveapp.com', /.+\.acme\.org/]) }
let(:domains) { ['example.fr', 'acme.org'] }
it { is_expected.to eq true }
context 'setting a forbidden domain name' do
let(:domains) { ['example.fr', 'www.locomotiveapp.com', 'staging.acme.org'] }
it 'adds errors for each invalid domain' do
is_expected.to eq false
expect(site.errors['domains']).to eq(["www.locomotiveapp.com is already taken", "staging.acme.org is already taken"])
end
end
end
end
describe 'domain sync' do
let!(:listener) { DomainEventListener.new }
after { listener.shutdown }
subject { listener }
describe 'on saving' do
before { site.save }
it 'does not emit an event' do
expect(subject.size).to eq 0
end
context 'new site' do
let(:domains) { ['www.example.com', 'example.com'] }
it 'only tracks new domains' do
expect(subject.added).to eq(['www.example.com', 'example.com'])
expect(subject.removed).to eq([])
end
end
context 'existing site' do
let(:domains) { ['www.boring.org', 'www.awesome.io'] }
before { listener.clear; site.domains = ['www.acme.com', 'www.awesome.io']; site.save; site.reload }
it 'tracks new domains and removed ones' do
expect(subject.added).to eq(['www.acme.com'])
expect(subject.removed).to eq(['www.boring.org'])
end
end
end
describe 'on destroying' do
let(:domains) { ['www.example.com', 'example.com'] }
before { site.save; listener.clear; site.destroy }
it 'tracks removed domains' do
expect(subject.added).to eq([])
expect(subject.removed).to eq(['www.example.com', 'example.com'])
end
end
end
class DomainEventListener
def initialize
@stack = []
@subscriber = ActiveSupport::Notifications.subscribe('locomotive.site.domain_sync') do |name, start, finish, id, payload|
emit(name, payload)
end
end
def emit(name, info = {})
@stack << [name, info]
end
def size
@stack.size
end
def added
@stack.map { |(_, info)| info[:added] }.flatten.compact
end
def removed
@stack.map { |(_, info)| info[:removed] }.flatten.compact
end
def clear
@stack.clear
end
def shutdown
ActiveSupport::Notifications.unsubscribe(@subscriber)
end
end
end
| 23.469231 | 127 | 0.601114 |
f81305b57fafd7ea539f47f87ccbab53c617b208 | 799 | class TweetsController < ApplicationController
before_action :require_signed_in!
def index
@tweets = current_user.tweets
end
def new
@tweet = Tweet.new
end
def create
@tweet = current_user.tweets.new(tweet_params)
if @tweet.save
redirect_to tweet_url(@tweet)
else
flash.now[:errors] = @tweet.errors.full_messages
render :new
end
end
def show
@tweet = current_user.tweets.find(params[:id])
end
def edit
@tweet = current_user.tweets.find(params[:id])
end
def update
@tweet = current_user.tweets.find(params[:id])
if @tweet.update(tweet_params)
redirect_to tweet_url(@tweet)
else
render :edit
end
end
private
def tweet_params
params.require(:tweet).permit(:title, :body)
end
end
| 17.369565 | 54 | 0.668335 |
d5bcf422ce72934158876c632839dee21003cd02 | 1,563 | class Api::V1::SubmissionsController < ApplicationController
include ActionController::ImplicitRender
include SubmissionsHelper
def index
@type = type_for(params[:type])
@sort = sort_for(params[:sort])
@submissions = if params[:approved] == "0"
Submission.all
else
Submission.valid
end
unless @type == 'All'
@submissions = @submissions.where(:type => @type)
end
@submissions = @submissions.desc(@sort)
@submissions = @submissions.page(params[:page]).per(20)
@page = params[:page]
@max = @submissions.total_pages
if [email protected]?
render json: { error: 'No resources found' }, status: :no_content
end
end
def show
unless @submission = Submission.find(params[:id])
return render json: { error: 'Resource not found' }, status: :not_found
end
count = params[:comments].to_i ||= 10
@comments = @submission.comments.limit(count)
count = params[:uploads].to_i ||= 5
@uploads = @submission.uploads.where(:approved => true).desc(:created_at).limit(count)
end
def stats
stats = group_by Submission, 'created_at'
render json: stats.to_json
end
private
def type_for(type)
case type.downcase
when "models", "assets"
"Asset"
when "levels", "maps"
"Level"
else
"All"
end
end
def sort_for(sort)
case sort.downcase
when "newest"
"total_downloads"
when "updated"
"last_update"
when "popular"
"total_downloads"
else
"approved_at"
end
end
end
| 23.328358 | 90 | 0.636596 |
1ddf5ebee5413f89b76d78a9a0fb66d10ac10989 | 235 | # == Schema Information
#
# Table name: FORMAT
#
# id :integer not null, primary key
# format_name :string(20)
# av_category :string(3)
#
class Legacy::Format < Legacy::OracleBase
self.table_name = 'FORMAT'
end
| 18.076923 | 54 | 0.642553 |
2839f31fffee46b05dd6972df8c120d7fa5d4660 | 172 | class Api::V1::BaseController < ApplicationController
if Rails.env.production?
include ApiErrorConcern
end
self.responder = ApiResponder
respond_to :json
end
| 17.2 | 53 | 0.767442 |
f87a26f7107a2b2aceee1fcb411c8ad568ba7493 | 137 | # frozen_string_literal: true
FactoryBot.define do
factory :health_benefit, class: 'FinancialAssistance::HealthBenefit' do
end
end
| 17.125 | 73 | 0.79562 |
e83c7e41de83126bce22952f1c298f028ee274c4 | 1,208 | # Stream names are how we identify which updates should go to which users. All streams run over the same
# <tt>Turbo::StreamsChannel</tt>, but each with their own subscription. Since stream names are exposed directly to the user
# via the HTML stream subscription tags, we need to ensure that the name isn't tampered with, so the names are signed
# upon generation and verified upon receipt. All verification happens through the <tt>Turbo.signed_stream_verifier</tt>.
module Turbo::Streams::StreamName
# Used by <tt>Turbo::StreamsChannel</tt> to verify a signed stream name.
def verified_stream_name(signed_stream_name)
Turbo.signed_stream_verifier.verified signed_stream_name
end
# Used by <tt>Turbo::StreamsHelper#turbo_stream_from(*streamables)</tt> to generate a signed stream name.
def signed_stream_name(streamables)
Turbo.signed_stream_verifier.generate stream_name_from(streamables)
end
private
def stream_name_from(streamables)
if streamables.is_a?(Array)
streamables.map { |streamable| stream_name_from(streamable) }.join(":")
else
streamables.then { |streamable| streamable.try(:to_gid_param) || streamable.to_param }
end
end
end
| 48.32 | 123 | 0.764901 |
211b5d81a6946df82e13229bf6446d2c8922a581 | 1,009 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'prmd/version'
Gem::Specification.new do |spec|
spec.name = "prmd"
spec.version = Prmd::VERSION
spec.authors = ["geemus"]
spec.email = ["[email protected]"]
spec.description = %q{scaffold, verify and generate docs from JSON Schema}
spec.summary = %q{JSON Schema tooling}
spec.homepage = "https://github.com/heroku/prmd"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "erubis", "~> 2.7"
spec.add_dependency "json_schema", "~> 0.1"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake", "~> 10.2"
spec.add_development_dependency "minitest", "~> 5.3"
end
| 36.035714 | 78 | 0.64222 |
0336cd203424d5beba6f2e3e9ea27843823ff1a7 | 1,466 | require 'test_helper'
class ProblemsControllerTest < ActionController::TestCase
setup do
@problem = problems(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:problems)
end
test "should get new" do
get :new
assert_response :success
end
test "should create problem" do
assert_difference('Problem.count') do
post :create, problem: { from_user_id: @problem.from_user_id, problem_desc: @problem.problem_desc, problem_type_id: @problem.problem_type_id, resolved: @problem.resolved, sent: @problem.sent, thing_id: @problem.thing_id, to_user_id: @problem.to_user_id }
end
assert_redirected_to problem_path(assigns(:problem))
end
test "should show problem" do
get :show, id: @problem
assert_response :success
end
test "should get edit" do
get :edit, id: @problem
assert_response :success
end
test "should update problem" do
put :update, id: @problem, problem: { from_user_id: @problem.from_user_id, problem_desc: @problem.problem_desc, problem_type_id: @problem.problem_type_id, resolved: @problem.resolved, sent: @problem.sent, thing_id: @problem.thing_id, to_user_id: @problem.to_user_id }
assert_redirected_to problem_path(assigns(:problem))
end
test "should destroy problem" do
assert_difference('Problem.count', -1) do
delete :destroy, id: @problem
end
assert_redirected_to problems_path
end
end
| 29.32 | 271 | 0.731241 |
5dff47eb2201c8f3e66ea6920919d6b6019206ce | 9,944 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe CARMA::Models::Submission, type: :model do
describe '#carma_case_id' do
it 'is accessible' do
value = 'aB935000000A9GoCAK'
subject.carma_case_id = value
expect(subject.carma_case_id).to eq(value)
end
end
describe '#submitted_at' do
it 'is accessible' do
value = DateTime.now.iso8601
subject.submitted_at = value
expect(subject.submitted_at).to eq(value)
end
end
describe '#submitted?' do
it 'returns true if :carma_case_id is set' do
subject.carma_case_id = 'aB935000000A9GoCAK'
expect(subject.submitted?).to eq(true)
end
it 'returns true if :submitted_at is set' do
subject.submitted_at = DateTime.now.iso8601
expect(subject.submitted?).to eq(true)
end
it 'returns false if :carma_case_id and :submitted_at are falsy' do
expect(subject.submitted?).to eq(false)
end
end
describe '#data' do
it 'is accessible' do
value = { 'my' => 'data' }
subject.data = value
expect(subject.data).to eq(value)
end
end
describe '#metadata' do
it 'is accessible' do
subject.metadata = {
claim_id: 123,
veteran: {
icn: 'VET1234',
is_veteran: true
},
primary_caregiver: {
icn: 'PC1234'
},
secondary_caregiver_one: {
icn: 'SCO1234'
},
secondary_caregiver_two: {
icn: 'SCT1234'
}
}
# metadata
expect(subject.metadata).to be_instance_of(described_class::Metadata)
expect(subject.metadata.claim_id).to eq(123)
# metadata.veteran
expect(subject.metadata.veteran).to be_instance_of(described_class::Metadata::Veteran)
expect(subject.metadata.veteran.icn).to eq('VET1234')
expect(subject.metadata.veteran.is_veteran).to eq(true)
# metadata.primary_caregiver
expect(subject.metadata.primary_caregiver).to be_instance_of(described_class::Metadata::Caregiver)
expect(subject.metadata.primary_caregiver.icn).to eq('PC1234')
# metadata.secondary_caregiver_one
expect(subject.metadata.secondary_caregiver_one).to be_instance_of(described_class::Metadata::Caregiver)
expect(subject.metadata.secondary_caregiver_one.icn).to eq('SCO1234')
# metadata.secondary_caregiver_two
expect(subject.metadata.secondary_caregiver_two).to be_instance_of(described_class::Metadata::Caregiver)
expect(subject.metadata.secondary_caregiver_two.icn).to eq('SCT1234')
end
end
describe '::new' do
it 'initializes with defaults' do
expect(subject.carma_case_id).to eq(nil)
expect(subject.submitted_at).to eq(nil)
expect(subject.data).to eq(nil)
# metadata
expect(subject.metadata).to be_instance_of(described_class::Metadata)
expect(subject.metadata.claim_id).to eq(nil)
# metadata.veteran
expect(subject.metadata.veteran).to be_instance_of(described_class::Metadata::Veteran)
expect(subject.metadata.veteran.icn).to eq(nil)
expect(subject.metadata.veteran.is_veteran).to eq(nil)
# metadata.primary_caregiver
expect(subject.metadata.primary_caregiver).to be_instance_of(described_class::Metadata::Caregiver)
expect(subject.metadata.primary_caregiver.icn).to eq(nil)
# metadata.secondary_caregiver_one
expect(subject.metadata.secondary_caregiver_one).to eq(nil)
# metadata.secondary_caregiver_two
expect(subject.metadata.secondary_caregiver_two).to eq(nil)
end
it 'accepts :carma_case_id, :submitted_at, :data, and :metadata' do
expected = {
carma_case_id: 'aB935000000A9GoCAK',
submitted_at: DateTime.now.iso8601,
data: {
'my' => 'data'
}
}
subject = described_class.new(
carma_case_id: expected[:carma_case_id],
submitted_at: expected[:submitted_at],
data: expected[:data],
metadata: {
claim_id: 123,
veteran: {
icn: 'VET1234',
is_veteran: true
},
primary_caregiver: {
icn: 'PC1234'
},
secondary_caregiver_one: {
icn: 'SCO1234'
},
secondary_caregiver_two: {
icn: 'SCT1234'
}
}
)
expect(subject.carma_case_id).to eq(expected[:carma_case_id])
expect(subject.submitted_at).to eq(expected[:submitted_at])
expect(subject.data).to eq(expected[:data])
# metadata
expect(subject.metadata).to be_instance_of(described_class::Metadata)
expect(subject.metadata.claim_id).to eq(123)
# metadata.veteran
expect(subject.metadata.veteran).to be_instance_of(described_class::Metadata::Veteran)
expect(subject.metadata.veteran.icn).to eq('VET1234')
expect(subject.metadata.veteran.is_veteran).to eq(true)
# metadata.primary_caregiver
expect(subject.metadata.primary_caregiver).to be_instance_of(described_class::Metadata::Caregiver)
expect(subject.metadata.primary_caregiver.icn).to eq('PC1234')
# metadata.secondary_caregiver_one
expect(subject.metadata.secondary_caregiver_one).to be_instance_of(described_class::Metadata::Caregiver)
expect(subject.metadata.secondary_caregiver_one.icn).to eq('SCO1234')
# metadata.secondary_caregiver_two
expect(subject.metadata.secondary_caregiver_two).to be_instance_of(described_class::Metadata::Caregiver)
expect(subject.metadata.secondary_caregiver_two.icn).to eq('SCT1234')
end
end
describe '::from_claim' do
it 'transforms a CaregiversAssistanceClaim to a new CARMA::Model::Submission' do
claim = build(:caregivers_assistance_claim)
submission = described_class.from_claim(claim)
expect(submission).to be_instance_of(described_class)
expect(submission.data).to eq(claim.parsed_form)
expect(submission.carma_case_id).to eq(nil)
expect(submission.submitted_at).to eq(nil)
expect(submission.metadata).to be_instance_of(described_class::Metadata)
expect(submission.metadata.claim_id).to eq(claim.id)
end
it 'will override :claim_id when passed in metadata and use claim.id instead' do
claim = build(:caregivers_assistance_claim)
submission = described_class.from_claim(claim, claim_id: 99)
expect(submission).to be_instance_of(described_class)
expect(submission.data).to eq(claim.parsed_form)
expect(submission.carma_case_id).to eq(nil)
expect(submission.submitted_at).to eq(nil)
expect(submission.metadata).to be_instance_of(described_class::Metadata)
expect(submission.metadata.claim_id).to eq(claim.id)
end
end
describe '::request_payload_keys' do
it 'inherits fron Base' do
expect(described_class.ancestors).to include(CARMA::Models::Base)
end
it 'sets request_payload_keys' do
expect(described_class.request_payload_keys).to eq(%i[data metadata])
end
end
describe '#to_request_payload' do
it 'can receive :to_request_payload' do
subject = described_class.new(
data: {
'my' => 'data'
},
metadata: {
claim_id: 123,
veteran: {
icn: 'VET1234',
is_veteran: true
},
primary_caregiver: {
icn: 'PC1234'
},
secondary_caregiver_one: {
icn: 'SCO1234'
},
secondary_caregiver_two: {
icn: 'SCT1234'
}
}
)
expect(subject.to_request_payload).to eq(
{
'data' => {
'my' => 'data'
},
'metadata' => {
'claimId' => 123,
'veteran' => {
'icn' => 'VET1234',
'isVeteran' => true
},
'primaryCaregiver' => {
'icn' => 'PC1234'
},
'secondaryCaregiverOne' => {
'icn' => 'SCO1234'
},
'secondaryCaregiverTwo' => {
'icn' => 'SCT1234'
}
}
}
)
end
end
describe '#submit!' do
let(:submission) do
CARMA::Models::Submission.from_claim(
build(:caregivers_assistance_claim),
{
veteran: {
icn: 'VET1234',
is_veteran: true
},
primary_caregiver: {
icn: 'PC1234'
},
secondary_caregiver_one: {
icn: 'SCO1234'
},
secondary_caregiver_two: {
icn: 'SCT1234'
}
}
)
end
context 'when already submitted' do
it 'raises an exception' do
submission.submitted_at = DateTime.now.iso8601
submission.carma_case_id = 'aB935000000A9GoCAK'
expect_any_instance_of(CARMA::Client::Client).not_to receive(:create_submission_stub)
expect { submission.submit! }.to raise_error('This submission has already been submitted to CARMA')
end
end
context 'when submission is valid' do
it 'submits to CARMA, and updates :carma_case_id and :submitted_at' do
expect(submission.carma_case_id).to eq(nil)
expect(submission.submitted_at).to eq(nil)
expect(submission.submitted?).to eq(false)
expected_res_body = {
'data' => {
'carmacase' => {
'id' => 'aB935000000F3VnCAK',
'createdAt' => '2020-03-09T10:48:59Z'
}
}
}
VCR.use_cassette 'carma/submissions/create/201' do
submission.submit!
end
expect(submission.carma_case_id).to eq(expected_res_body['data']['carmacase']['id'])
expect(submission.submitted_at).to eq(expected_res_body['data']['carmacase']['createdAt'])
expect(submission.submitted?).to eq(true)
end
end
end
end
| 32.077419 | 110 | 0.632542 |
f72d7234479b91f31b13c4abc08a39a20044c276 | 1,196 | require_relative "boot"
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_mailbox/engine"
require "action_text/engine"
require "action_view/railtie"
require "action_cable/engine"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module OurWarehouse
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.1
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
# Don't generate system test files.
config.generators.system_tests = nil
end
end
| 30.666667 | 79 | 0.766722 |
ac38a882b9e345603a000f116495481a0a990996 | 8,355 | # frozen_string_literal: true
require "active_record"
require "rails"
require "active_model/railtie"
# For now, action_controller must always be present with
# Rails, so let's make sure that it gets required before
# here. This is needed for correctly setting up the middleware.
# In the future, this might become an optional require.
require "action_controller/railtie"
module ActiveRecord
# = Active Record Railtie
class Railtie < Rails::Railtie # :nodoc:
config.active_record = ActiveSupport::OrderedOptions.new
config.app_generators.orm :active_record, migration: true,
timestamps: true
config.action_dispatch.rescue_responses.merge!(
"ActiveRecord::RecordNotFound" => :not_found,
"ActiveRecord::StaleObjectError" => :conflict,
"ActiveRecord::RecordInvalid" => :unprocessable_entity,
"ActiveRecord::RecordNotSaved" => :unprocessable_entity
)
config.active_record.use_schema_cache_dump = true
config.active_record.maintain_test_schema = true
config.active_record.sqlite3 = ActiveSupport::OrderedOptions.new
config.active_record.sqlite3.represent_boolean_as_integer = nil
config.eager_load_namespaces << ActiveRecord
rake_tasks do
namespace :db do
task :load_config do
ActiveRecord::Tasks::DatabaseTasks.database_configuration = Rails.application.config.database_configuration
if defined?(ENGINE_ROOT) && engine = Rails::Engine.find(ENGINE_ROOT)
if engine.paths["db/migrate"].existent
ActiveRecord::Tasks::DatabaseTasks.migrations_paths += engine.paths["db/migrate"].to_a
end
end
end
end
load "active_record/railties/databases.rake"
end
# When loading console, force ActiveRecord::Base to be loaded
# to avoid cross references when loading a constant for the
# first time. Also, make it output to STDERR.
console do |app|
require "active_record/railties/console_sandbox" if app.sandbox?
require "active_record/base"
unless ActiveSupport::Logger.logger_outputs_to?(Rails.logger, STDERR, STDOUT)
console = ActiveSupport::Logger.new(STDERR)
Rails.logger.extend ActiveSupport::Logger.broadcast console
end
ActiveRecord::Base.verbose_query_logs = false
end
runner do
require "active_record/base"
end
initializer "active_record.initialize_timezone" do
ActiveSupport.on_load(:active_record) do
self.time_zone_aware_attributes = true
self.default_timezone = :utc
end
end
initializer "active_record.logger" do
ActiveSupport.on_load(:active_record) { self.logger ||= ::Rails.logger }
end
initializer "active_record.migration_error" do
if config.active_record.delete(:migration_error) == :page_load
config.app_middleware.insert_after ::ActionDispatch::Callbacks,
ActiveRecord::Migration::CheckPending
end
end
initializer "active_record.check_schema_cache_dump" do
if config.active_record.delete(:use_schema_cache_dump)
config.after_initialize do |app|
ActiveSupport.on_load(:active_record) do
filename = File.join(app.config.paths["db"].first, "schema_cache.yml")
if File.file?(filename)
current_version = ActiveRecord::Migrator.current_version
next if current_version.nil?
cache = YAML.load(File.read(filename))
if cache.version == current_version
connection.schema_cache = cache
connection_pool.schema_cache = cache.dup
else
warn "Ignoring db/schema_cache.yml because it has expired. The current schema version is #{current_version}, but the one in the cache is #{cache.version}."
end
end
end
end
end
end
initializer "active_record.warn_on_records_fetched_greater_than" do
if config.active_record.warn_on_records_fetched_greater_than
ActiveSupport.on_load(:active_record) do
require "active_record/relation/record_fetch_warning"
end
end
end
initializer "active_record.set_configs" do |app|
ActiveSupport.on_load(:active_record) do
configs = app.config.active_record.dup
configs.delete(:sqlite3)
configs.each do |k, v|
send "#{k}=", v
end
end
end
# This sets the database configuration from Configuration#database_configuration
# and then establishes the connection.
initializer "active_record.initialize_database" do
ActiveSupport.on_load(:active_record) do
self.configurations = Rails.application.config.database_configuration
begin
establish_connection
rescue ActiveRecord::NoDatabaseError
warn <<-end_warning
Oops - You have a database configured, but it doesn't exist yet!
Here's how to get started:
1. Configure your database in config/database.yml.
2. Run `bin/rails db:create` to create the database.
3. Run `bin/rails db:setup` to load your database schema.
end_warning
raise
end
end
end
# Expose database runtime to controller for logging.
initializer "active_record.log_runtime" do
require "active_record/railties/controller_runtime"
ActiveSupport.on_load(:action_controller) do
include ActiveRecord::Railties::ControllerRuntime
end
end
initializer "active_record.set_reloader_hooks" do
ActiveSupport.on_load(:active_record) do
ActiveSupport::Reloader.before_class_unload do
if ActiveRecord::Base.connected?
ActiveRecord::Base.clear_cache!
ActiveRecord::Base.clear_reloadable_connections!
end
end
end
end
initializer "active_record.set_executor_hooks" do
ActiveRecord::QueryCache.install_executor_hooks
end
initializer "active_record.add_watchable_files" do |app|
path = app.paths["db"].first
config.watchable_files.concat ["#{path}/schema.rb", "#{path}/structure.sql"]
end
initializer "active_record.clear_active_connections" do
config.after_initialize do
ActiveSupport.on_load(:active_record) do
# Ideally the application doesn't connect to the database during boot,
# but sometimes it does. In case it did, we want to empty out the
# connection pools so that a non-database-using process (e.g. a master
# process in a forking server model) doesn't retain a needless
# connection. If it was needed, the incremental cost of reestablishing
# this connection is trivial: the rest of the pool would need to be
# populated anyway.
clear_active_connections!
flush_idle_connections!
end
end
end
initializer "active_record.check_represent_sqlite3_boolean_as_integer" do
config.after_initialize do
ActiveSupport.on_load(:active_record_sqlite3adapter) do
represent_boolean_as_integer = Rails.application.config.active_record.sqlite3.delete(:represent_boolean_as_integer)
unless represent_boolean_as_integer.nil?
ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer = represent_boolean_as_integer
end
unless ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer
ActiveSupport::Deprecation.warn <<-MSG
Leaving `ActiveRecord::ConnectionAdapters::SQLite3Adapter.represent_boolean_as_integer`
set to false is deprecated. SQLite databases have used 't' and 'f' to serialize
boolean values and must have old data converted to 1 and 0 (its native boolean
serialization) before setting this flag to true. Conversion can be accomplished
by setting up a rake task which runs
ExampleModel.where("boolean_column = 't'").update_all(boolean_column: 1)
ExampleModel.where("boolean_column = 'f'").update_all(boolean_column: 0)
for all models and all boolean columns, after which the flag must be set to
true by adding the following to your application.rb file:
Rails.application.config.active_record.sqlite3.represent_boolean_as_integer = true
MSG
end
end
end
end
end
end
| 36.806167 | 171 | 0.702214 |
8775826877b0dbb9d460e42b99c112c95936347c | 252 | require 'rails_helper'
module Lama
describe CartController, type: :controller do
routes { Lama::Engine.routes }
render_views
it 'can add to cart' do
get 'add', format: :json
expect(parsed_body).to be_empty
end
end
end
| 18 | 47 | 0.670635 |
6a0a9690929733d4dc8533bcae5b47869b9d8eb7 | 3,110 | require "support"
class TestList < Omise::Test
setup do
attributes = JSON.parse(JSON.generate({
object: "list",
location: "/charges",
offset: 0,
limit: 20,
total: 40,
data: 20.times.map { |i| { object: "charge", id: "chrg_#{i}" } },
}))
@parent = Object.new
@list = Omise::List.new(attributes, parent: @parent)
end
def test_that_we_can_initialize_a_list
assert_instance_of Omise::List, @list
end
def test_that_we_can_reload_a_list
assert @list.reload
end
def test_that_we_can_get_the_parent
assert_equal @parent, @list.parent
end
def test_that_we_know_if_we_are_on_the_first_page
assert make_paginated_list(00, 20, 60).first_page?
refute make_paginated_list(20, 20, 60).first_page?
refute make_paginated_list(40, 20, 60).first_page?
end
def test_that_we_know_if_we_are_on_the_last_page
refute make_paginated_list(00, 20, 60).last_page?
refute make_paginated_list(20, 20, 60).last_page?
assert make_paginated_list(40, 20, 60).last_page?
end
def test_that_we_know_on_which_page_we_are
assert_equal 1, make_paginated_list(00, 20, 60).page
assert_equal 2, make_paginated_list(20, 20, 60).page
assert_equal 3, make_paginated_list(40, 20, 60).page
end
def test_that_we_know_how_many_page_there_is
assert_equal 0, make_paginated_list(00, 20, 00).total_pages
assert_equal 1, make_paginated_list(00, 20, 10).total_pages
assert_equal 1, make_paginated_list(00, 20, 20).total_pages
assert_equal 2, make_paginated_list(00, 20, 30).total_pages
assert_equal 2, make_paginated_list(00, 20, 40).total_pages
assert_equal 3, make_paginated_list(00, 20, 50).total_pages
assert_equal 3, make_paginated_list(00, 20, 60).total_pages
assert_equal 4, make_paginated_list(00, 20, 70).total_pages
end
def test_that_we_can_go_to_the_next_page
assert_nil make_paginated_list(0, 20, 10).next_page
assert_instance_of Omise::List, make_paginated_list(0, 20, 30).next_page
end
def test_that_we_can_go_to_the_previous_page
assert_nil make_paginated_list(0, 20, 30).previous_page
assert_instance_of Omise::List, make_paginated_list(20, 20, 30).previous_page
end
def test_that_we_can_go_to_a_specific_page
assert_nil make_paginated_list(0, 20, 100).jump_to_page(6)
assert_instance_of Omise::List, make_paginated_list(0, 20, 100).jump_to_page(2)
end
def test_we_can_get_enumerate_a_list
assert_instance_of Enumerator, @list.each
end
def test_we_can_get_the_array
assert_instance_of Array, @list.to_a
assert_instance_of Omise::Charge, @list.to_a.first
end
def test_that_we_can_get_the_last_element_of_the_array
assert_equal "chrg_19", @list.last.id
end
private
def make_paginated_list(offset, limit, total)
attributes = JSON.parse(JSON.generate({
object: "list",
location: "/charges",
offset: offset,
limit: limit,
total: total,
data: limit.times.map { { object: "charge" } },
}))
Omise::List.new(attributes)
end
end
| 30.490196 | 83 | 0.727974 |
d537b4b488f1d02b2a4d8d6010d9c23724e92b30 | 2,648 | # frozen_string_literal: true
module Boring
module Pundit
class InstallGenerator < Rails::Generators::Base
desc "Adds pundit to the application"
class_option :skip_ensuring_policies, type: :boolean, aliases: "-s",
desc: "Skip before_action to ensure user is authorized"
class_option :skip_rescue, type: :boolean, aliases: "-sr",
desc: "Skip adding rescue for Pundit::NotAuthorizedError"
class_option :skip_generator, type: :boolean, aliases: "-sg",
desc: "Skip running Pundit install generator"
def add_pundit_gem
say "Adding Pundit gem", :green
Bundler.with_unbundled_env do
run "bundle add pundit"
end
end
def run_pundit_generator
return if options[:skip_generator]
say "Running Pundit Generator", :green
run "DISABLE_SPRING=1 rails generate pundit:install"
end
def inject_pundit_to_controller
say "Adding Pundit module into ApplicationController", :green
inject_into_file 'app/controllers/application_controller.rb', after: "class ApplicationController < ActionController::Base\n" do
" include Pundit\n"
end
end
def ensure_policies
return if options[:skip_ensuring_policies]
say "Force ensuring policies", :green
inject_into_file 'app/controllers/application_controller.rb', after: "include Pundit\n" do
" after_action :verify_authorized\n"
end
end
def rescue_from_not_authorized
return if options[:skip_rescue]
say "Adding rescue from Pundit::NotAuthorizedError", :green
after = if File.read('app/controllers/application_controller.rb') =~ (/:verify_authorized/)
"after_action :verify_authorized\n"
else
"include Pundit\n"
end
inject_into_file 'app/controllers/application_controller.rb', after: after do
<<~RUBY
\trescue_from Pundit::NotAuthorizedError, with: :user_not_authorized
\tprivate
\tdef user_not_authorized
\t flash[:alert] = "You are not authorized to perform this action."
\t redirect_to(request.referrer || root_path)
\tend
RUBY
end
end
def after_run
unless options[:skip_rescue]
say "\nPlease check the `application_controller.rb` file and fix any potential issues"
end
say "\nDon't forget, that you can generate policies with \nrails g pundit:policy Model\n"
end
end
end
end
| 33.1 | 136 | 0.637085 |
ed35aea228a1cd9acadcefd312a9909384d2def2 | 546 | cask 'instead' do
version '3.1.2'
sha256 '880d2e8f77a99cab8bac110cfb8473acf951c87b2bc26f8ac81870537e4174ae'
# sourceforge.net/instead was verified as official when first introduced to the cask
url "https://downloads.sourceforge.net/instead/instead/#{version}/Instead-#{version}.dmg"
appcast 'https://sourceforge.net/projects/instead/rss?path=/instead',
checkpoint: 'b0bb1b1c52f94f3661376bee867fea09f538a2ef0c5674b3f9acacc7364cdd0a'
name 'INSTEAD'
homepage 'https://instead.syscall.ru/index.html'
app 'Instead.app'
end
| 39 | 91 | 0.78022 |
e2192d683d17eeb25b3297524cde18c05053d25d | 268 | # frozen_string_literal: true
FactoryBot.define do
factory :channel_instagram_fb_page, class: 'Channel::FacebookPage' do
page_access_token { SecureRandom.uuid }
user_access_token { SecureRandom.uuid }
page_id { SecureRandom.uuid }
account
end
end
| 24.363636 | 71 | 0.757463 |
087fe29f13e0b3c8ee05bb5a0b961dc3dcd39c80 | 945 | module FloodRiskEngine
class SendEnrollmentSubmittedEmail
def initialize(enrollment)
@enrollment = enrollment
end
def call
validate_enrollment
distinct_recipients.each do |recipient|
EnrollmentMailer.submitted(enrollment_id: enrollment.id,
recipient_address: recipient).deliver_later
end
end
private
attr_reader :enrollment
def validate_enrollment
raise MissingEmailAddressError, "Missing contact email address" if primary_contact_email.blank?
end
def distinct_recipients
[primary_contact_email, secondary_contact_email]
.select(&:present?)
.map(&:strip)
.map(&:downcase)
.uniq
end
def primary_contact_email
enrollment.correspondence_contact.try :email_address
end
def secondary_contact_email
enrollment.secondary_contact.try :email_address
end
end
end
| 22.5 | 101 | 0.686772 |
5d7d679ae183e67e90c6ff742d3baafddd781cfd | 1,492 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/dialogflow/v2/conversation_event.proto
require 'google/api/annotations_pb'
require 'google/cloud/dialogflow/v2/participant_pb'
require 'google/rpc/status_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/cloud/dialogflow/v2/conversation_event.proto", :syntax => :proto3) do
add_message "google.cloud.dialogflow.v2.ConversationEvent" do
optional :conversation, :string, 1
optional :type, :enum, 2, "google.cloud.dialogflow.v2.ConversationEvent.Type"
optional :error_status, :message, 3, "google.rpc.Status"
oneof :payload do
optional :new_message_payload, :message, 4, "google.cloud.dialogflow.v2.Message"
end
end
add_enum "google.cloud.dialogflow.v2.ConversationEvent.Type" do
value :TYPE_UNSPECIFIED, 0
value :CONVERSATION_STARTED, 1
value :CONVERSATION_FINISHED, 2
value :HUMAN_INTERVENTION_NEEDED, 3
value :NEW_MESSAGE, 5
value :UNRECOVERABLE_ERROR, 4
end
end
end
module Google
module Cloud
module Dialogflow
module V2
ConversationEvent = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dialogflow.v2.ConversationEvent").msgclass
ConversationEvent::Type = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dialogflow.v2.ConversationEvent.Type").enummodule
end
end
end
end
| 37.3 | 154 | 0.743298 |
21e4bfc1c6bcb17cdeca02f916d00af528b1f008 | 872 | # encoding: UTF-8
require 'blogger_api'
class BloggerService < ActionWebService::Base
web_service_api BloggerAPI
def initialize
@postid = 0
end
def newPost(key, id, user, pw, content, publish)
$stderr.puts "id=#{id} user=#{user} pw=#{pw}, content=#{content.inspect} [#{publish}]"
(@postid += 1).to_s
end
def editPost(key, post_id, user, pw, content, publish)
$stderr.puts "id=#{post_id} user=#{user} pw=#{pw} content=#{content.inspect} [#{publish}]"
true
end
def getUsersBlogs(key, user, pw)
$stderr.puts "getting blogs for #{user}"
blog = Blog::Blog.new(
:url =>'http://blog',
:blogid => 'myblog',
:blogName => 'My Blog'
)
[blog]
end
def getUserInfo(key, user, pw)
$stderr.puts "getting user info for #{user}"
Blog::User.new(:nickname => 'user', :email => '[email protected]')
end
end
| 24.222222 | 94 | 0.619266 |
e2e1aa75d11960c5548e6010cb1a95c2acf52105 | 347 | require "tasks/data_hygiene/schema_validator"
namespace :schema_validation do
desc "validates all content items for a given format producing a report of errors"
task :errors_for_format, [:schema_name] => :environment do |_t, args|
schema_name = args[:schema_name]
DataHygiene::SchemaValidator.new(schema_name).report_errors
end
end
| 34.7 | 84 | 0.78098 |
2830a47d71abfa2f6c45575cce69c76c9297d43a | 2,982 | require 'vagrant/systems/linux/error'
require 'vagrant/systems/linux/config'
module Vagrant
module Systems
class Linux < Base
def distro_dispatch
vm.ssh.execute do |ssh|
if ssh.test?("cat /etc/debian_version")
return :debian if ssh.test?("cat /proc/version | grep 'Debian'")
return :ubuntu if ssh.test?("cat /proc/version | grep 'Ubuntu'")
end
return :gentoo if ssh.test?("cat /etc/gentoo-release")
return :redhat if ssh.test?("cat /etc/redhat-release")
return :suse if ssh.test?("cat /etc/SuSE-release")
return :arch if ssh.test?("cat /etc/arch-release")
end
# Can't detect the distro, assume vanilla linux
nil
end
def halt
vm.env.ui.info I18n.t("vagrant.systems.linux.attempting_halt")
vm.ssh.execute do |ssh|
ssh.exec!("sudo halt")
end
# Wait until the VM's state is actually powered off. If this doesn't
# occur within a reasonable amount of time (15 seconds by default),
# then simply return and allow Vagrant to kill the machine.
count = 0
while vm.vm.state != :powered_off
count += 1
return if count >= vm.env.config.linux.halt_timeout
sleep vm.env.config.linux.halt_check_interval
end
end
def mount_shared_folder(ssh, name, guestpath, owner, group)
ssh.exec!("sudo mkdir -p #{guestpath}")
mount_folder(ssh, name, guestpath, owner, group)
ssh.exec!("sudo chown `id -u #{owner}`:`id -g #{group}` #{guestpath}")
end
def mount_nfs(ip, folders)
# TODO: Maybe check for nfs support on the guest, since its often
# not installed by default
folders.each do |name, opts|
vm.ssh.execute do |ssh|
ssh.exec!("sudo mkdir -p #{opts[:guestpath]}")
ssh.exec!("sudo mount #{ip}:'#{opts[:hostpath]}' #{opts[:guestpath]}", :_error_class => LinuxError, :_key => :mount_nfs_fail)
end
end
end
#-------------------------------------------------------------------
# "Private" methods which assist above methods
#-------------------------------------------------------------------
def mount_folder(ssh, name, guestpath, owner, group, sleeptime=5)
# Determine the permission string to attach to the mount command
options = "-o uid=`id -u #{owner}`,gid=`id -g #{group}`"
attempts = 0
while true
result = ssh.exec!("sudo mount -t vboxsf #{options} #{name} #{guestpath}") do |ch, type, data|
# net/ssh returns the value in ch[:result] (based on looking at source)
ch[:result] = !!(type == :stderr && data =~ /No such device/i)
end
break unless result
attempts += 1
raise LinuxError, :mount_fail if attempts >= 10
sleep sleeptime
end
end
end
end
end
| 35.927711 | 137 | 0.558685 |
21308ca2f0b1280581bb6beafb27e2aa4e7f83b2 | 2,929 | # frozen_string_literal: false
module Hyperlapse
class Generator
def initialize(config_manager)
@config = config_manager.config
@pics_dir = config_manager.pics_dir
@maps_dir = config_manager.maps_dir
@empty_dir = config_manager.empty_dir
@output_dir = config_manager.output_dir
@pics_scale_dir = config_manager.pics_scale_dir
@maps_scale_dir = config_manager.maps_scale_dir
@composite_dir = config_manager.composite_dir
end
def generate
fail 'Downloads aren\'t complete.' unless downloads_ok?
scale_pics
scale_maps
place_maps_over_pics
generate_video
end
private
def downloads_ok?
empty_count = get_files(@empty_dir).length
pic_count = get_files(@pics_dir).length + empty_count
map_count = get_files(@maps_dir).length + empty_count
waypoint_count = [@config[:limit], @config[:waypoints].length].min
pic_count == waypoint_count && map_count == waypoint_count
end
def scale_pics
pics = get_files(@pics_dir)
pics.each do |pic|
input = File.join(@pics_dir, pic)
output = File.join(@pics_scale_dir, pic)
width = Hyperlapse::WIDTH
height = Hyperlapse::HEIGHT
command = "convert #{input} -resize #{width}x#{height}! #{output}"
ok = File.file?(output) ? true : system(command)
fail "Error scaling pic: #{pic}." unless ok
end
end
def scale_maps
maps = get_files(@maps_dir)
maps.each do |map|
input = File.join(@maps_dir, map)
output = File.join(@maps_scale_dir, map)
width = (Hyperlapse::WIDTH * Hyperlapse::MAP_SCALE).round
height = (Hyperlapse::HEIGHT * Hyperlapse::MAP_SCALE).round
command = "convert #{input} -resize #{width}x#{height}! #{output}"
ok = File.file?(output) ? true : system(command)
fail "Error scaling map: #{map}." unless ok
end
end
def place_maps_over_pics
images = get_files(@pics_dir)
images.each do |image|
map = File.join(@maps_scale_dir, image)
pic = File.join(@pics_scale_dir, image)
output = File.join(@composite_dir, image)
map_pos = Hyperlapse::MAP_POS
command = "composite -gravity #{map_pos} #{map} #{pic} #{output}"
ok = File.file?(output) ? true : system(command)
fail "Error placing #{map} over #{pic}." unless ok
end
end
def generate_video
input = File.join(@composite_dir, '%d.jpg')
output = File.join(@output_dir, 'out.mp4')
command = <<~END
ffmpeg -f image2 -framerate #{@config[:fps]} -i #{input} #{output}
END
ok = system(command.chomp)
fail 'Error generating video.' unless ok
end
def get_files(dir)
Dir.entries(dir).select do |entry|
entry_path = File.join(dir, entry)
File.file?(entry_path)
end
end
end
end
| 30.831579 | 74 | 0.632981 |
261e91b65056b6bd5ec06e06a93c2395bc1b59ea | 1,043 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::Settings::RepositoryController do
let(:project) { create(:project_empty_repo, :public) }
let(:user) { create(:user) }
before do
project.add_maintainer(user)
sign_in(user)
end
describe 'GET show' do
context 'push rule' do
subject(:push_rule) { assigns(:push_rule) }
it 'is created' do
get :show, params: { namespace_id: project.namespace, project_id: project }
is_expected.to be_persisted
end
it 'is connected to project_settings' do
get :show, params: { namespace_id: project.namespace, project_id: project }
expect(project.project_setting.push_rule).to eq(subject)
end
context 'unlicensed' do
before do
stub_licensed_features(push_rules: false)
end
it 'is not created' do
get :show, params: { namespace_id: project.namespace, project_id: project }
is_expected.to be_nil
end
end
end
end
end
| 23.704545 | 85 | 0.651965 |
e246e08372f6bedbc1e3955f911cc95b886f9a34 | 2,655 |
class SectionRowConverter < BaseRowConverter
@model_class = :Section
@@slugs = []
def setup_object
object = setup_object_by_slug(attrs)
if object.directive.present? && object.directive != @importer.options[:directive]
add_error(:slug, "Code is used in #{object.directive.meta_kind.to_s.titleize}: #{object.directive.slug}")
else
object.directive = @importer.options[:directive]
if @@slugs.include? object.directive.slug
add_error(:slug, "Code is duplicated")
else
@@slugs << object.directive.slug
end
end
end
def reify
handle(:slug, SlugColumnHandler)
handle_date(:created_at, :no_import => true)
handle_date(:updated_at, :no_import => true)
handle_text_or_html(:description)
handle_text_or_html(:notes)
handle(:controls, LinkControlsHandler)
handle_raw_attr(:title)
end
end
class SectionsConverter < BaseConverter
@metadata_map = Hash[*%w(
Type type
Directive\ Code slug
Directive\ Title title
Directive\ Description description
Version version
Start start_date
Stop stop_date
Kind kind
Audit\ Start audit_start_date
Audit\ Frequency audit_frequency
Audit\ Duration audit_duration
Created created_at
Updated updated_at
)]
@object_map = Hash[*%w(
Section\ Code slug
Section\ Title title
Section\ Description description
Abstract notes
Controls controls
Created created_at
Updated updated_at
)]
@row_converter = SectionRowConverter
def directive
options[:directive]
end
def metadata_map
# Change 'Directive' to 'Contract', 'Policy', or 'Regulation'
Hash[*self.class.metadata_map.map do |k,v|
[k.sub("Directive", directive.meta_kind.to_s.titleize),v]
end.flatten]
end
def object_map
# Change 'Section' to 'Clause' in some cases
Hash[*self.class.object_map.map do |k,v|
[k.sub("Section", directive.section_meta_kind.to_s.titleize),v]
end.flatten]
end
def validate_metadata(attrs)
validate_metadata_type(attrs, directive.meta_kind.to_s.titleize)
if !attrs.has_key?(:slug)
errors.push("Missing \"#{directive.meta_kind.to_s.titleize} Code\" heading")
elsif attrs[:slug].upcase != directive.slug.upcase
errors.push("#{directive.meta_kind.to_s.titleize} Code must be #{directive.slug}")
end
end
def do_export_metadata
yield CSV.generate_line(metadata_map.keys)
yield CSV.generate_line([directive.meta_kind.to_s.titleize, directive.slug])
yield CSV.generate_line([])
yield CSV.generate_line([])
yield CSV.generate_line(object_map.keys)
end
end
| 26.029412 | 111 | 0.700942 |
b91a6f2c2c792bcddd0e02ffdeaed080af875343 | 365 | class Topic < ActiveRecord::Base
validates_presence_of :author_name
validates :title, numericality: { only_integer: true }, on: :context_test
has_many :books, inverse_of: :topic
belongs_to :parent, class_name: "Topic"
composed_of :description, mapping: [%w(title title), %w(author_name author_name)], allow_nil: true, class_name: "TopicDescription"
end
| 36.5 | 132 | 0.761644 |
f8b12507966b4f41013261720162e0e68fa0db66 | 539 | class UsersController < ApplicationController
before_action :require_user!
def edit
@timezones = ActiveSupport::TimeZone.all.collect(&:name).sort.map! { |zone| [zone, zone] }
end
def update
if @current_user.update(user_params)
@current_user.save!
flash[:success] = I18n.t('users.update.success')
else
flash[:error] = I18n.t('users.update.error')
end
redirect_to action: :edit
end
private
def user_params
params.require(:user).permit(:id, :name, :timezone, :updated_at)
end
end
| 22.458333 | 94 | 0.679035 |
08048314d006ff3ba198dea998404d74eecda134 | 378 | require 'rails/railtie'
require 'mandrill_action_mailer/delivery_handler'
module MandrillActionMailer
class Railtie < Rails::Railtie
config.mandrill_action_mailer = MandrillActionMailer.config
initializer 'mandrill_action_mailer.add_delivery_method' do
ActionMailer::Base.add_delivery_method(:mandrill, MandrillActionMailer::DeliveryHandler)
end
end
end
| 29.076923 | 94 | 0.81746 |
388feb71912faca778bdf7a1b1b982c65bfe0e72 | 7,301 | require 'spec_helper'
WORKING_SPEC_VERSION = '2.19.0'.freeze
describe 'cfndsl', type: :aruba do
let(:usage) do
<<-USAGE.gsub(/^ {6}/, '').chomp
Usage: cfndsl [options] FILE
-o, --output FILE Write output to file
-y, --yaml FILE Import yaml file as local variables
-r, --ruby FILE Evaluate ruby file before template
-j, --json FILE Import json file as local variables
-p, --pretty Pretty-format output JSON
-f, --format FORMAT Specify the output format (JSON default)
-D, --define "VARIABLE=VALUE" Directly set local VARIABLE as VALUE
-v, --verbose Turn on verbose ouptut
-b, --disable-binding Disable binding configuration
-m, --disable-deep-merge Disable deep merging of yaml
-s, --specification-file FILE Location of Cloudformation Resource Specification file
-u [VERSION], Update the Resource Specification file to latest, or specific version
--update-specification
-g RESOURCE_TYPE,RESOURCE_LOGICAL_NAME,
--generate Add resource type and logical name
-a, --assetversion Print out the specification version
-l, --list List supported resources
-h, --help Display this screen
USAGE
end
let(:template_content) do
<<-TEMPLATE.gsub(/^ {6}/, '')
CloudFormation do
Description(external_parameters[:DESC] || 'default')
end
TEMPLATE
end
before(:each) { write_file('template.rb', template_content) }
context "cfndsl -u #{WORKING_SPEC_VERSION}" do
it 'updates the specification file' do
run_command "cfndsl -u #{WORKING_SPEC_VERSION}"
expect(last_command_started).to have_output_on_stderr(<<-OUTPUT.gsub(/^ {8}/, '').chomp)
Updating specification file
Specification successfully written to #{ENV['HOME']}/.cfndsl/resource_specification.json
OUTPUT
expect(last_command_started).to have_exit_status(0)
end
end
context 'cfndsl -a' do
it 'prints out the specification file version' do
run_command 'cfndsl -a'
expect(last_command_started).to have_output_on_stderr(/([0-9]+\.){2}[0-9]+/)
expect(last_command_started).to have_exit_status(0)
end
end
context 'cfndsl' do
it 'displays the usage' do
run_command 'cfndsl'
expect(last_command_started).to have_output(usage)
expect(last_command_started).to have_exit_status(1)
end
end
context 'cfndsl --help' do
it 'displays the usage' do
run_command_and_stop 'cfndsl --help'
expect(last_command_started).to have_output(usage)
end
end
context 'cfndsl FILE' do
it 'gives a deprecation warning about bindings' do
run_command_and_stop 'cfndsl template.rb'
expect(last_command_started).to have_output_on_stderr(<<-WARN.gsub(/^ {8}/, '').chomp)
The creation of constants as config is deprecated!
Please switch to the #external_parameters method within your templates to access variables
See https://github.com/cfndsl/cfndsl/issues/170
Use the --disable-binding flag to suppress this message
WARN
end
it 'generates a JSON CloudFormation template' do
run_command_and_stop 'cfndsl template.rb'
expect(last_command_started).to have_output_on_stdout('{"AWSTemplateFormatVersion":"2010-09-09","Description":"default"}')
end
end
context 'cfndsl FILE --pretty' do
it 'generates a pretty JSON CloudFormation template' do
run_command_and_stop 'cfndsl template.rb --pretty'
expect(last_command_started).to have_output_on_stdout(<<-OUTPUT.gsub(/^ {8}/, '').chomp)
{
"AWSTemplateFormatVersion": "2010-09-09",
"Description": "default"
}
OUTPUT
end
end
context 'cfndsl FILE --output FILE' do
it 'writes the JSON CloudFormation template to a file' do
run_command_and_stop 'cfndsl template.rb --output template.json'
expect(read('template.json')).to eq(['{"AWSTemplateFormatVersion":"2010-09-09","Description":"default"}'])
end
end
context 'cfndsl FILE --yaml FILE' do
before { write_file('params.yaml', 'DESC: yaml') }
it 'interpolates the YAML file in the CloudFormation template' do
run_command_and_stop 'cfndsl template.rb --yaml params.yaml'
expect(last_command_started).to have_output_on_stdout('{"AWSTemplateFormatVersion":"2010-09-09","Description":"yaml"}')
end
end
context 'cfndsl FILE --json FILE' do
before { write_file('params.json', '{"DESC":"json"}') }
it 'interpolates the JSON file in the CloudFormation template' do
run_command_and_stop 'cfndsl template.rb --json params.json'
expect(last_command_started).to have_output_on_stdout('{"AWSTemplateFormatVersion":"2010-09-09","Description":"json"}')
end
end
context 'cfndsl FILE --ruby FILE' do
let(:template_content) do
<<-TEMPLATE.gsub(/^ {8}/, '')
CloudFormation do
DESC = 'default' unless defined? DESC
Description DESC
end
TEMPLATE
end
before(:each) { write_file('params.rb', 'DESC = "ruby"') }
it 'interpolates the Ruby file in the CloudFormation template' do
run_command_and_stop 'cfndsl template.rb --ruby params.rb'
expect(last_command_started).to have_output_on_stdout('{"AWSTemplateFormatVersion":"2010-09-09","Description":"ruby"}')
end
it 'gives a deprecation warning and does not interpolate if bindings are disabled' do
run_command_and_stop 'cfndsl template.rb --ruby params.rb --disable-binding --verbose'
deprecation_warning = /Interpreting Ruby files was disabled\. .*params.rb will not be read/
expect(last_command_started).to have_output_on_stderr(deprecation_warning)
expect(last_command_started).to have_output_on_stdout('{"AWSTemplateFormatVersion":"2010-09-09","Description":"default"}')
end
end
context 'cfndsl FILE --define VARIABLE=VALUE' do
it 'interpolates the command line variables in the CloudFormation template' do
run_command "cfndsl template.rb --define \"DESC='cli'\""
expect(last_command_started).to have_output_on_stdout("{\"AWSTemplateFormatVersion\":\"2010-09-09\",\"Description\":\"'cli'\"}")
end
end
context 'cfndsl FILE --verbose' do
before { write_file('params.yaml', 'DESC: yaml') }
it 'displays the variables as they are interpolated in the CloudFormation template' do
run_command_and_stop 'cfndsl template.rb --yaml params.yaml --verbose'
verbose = /
Using \s specification \s file .* \.json \n
Loading \s YAML \s file \s .* params\.yaml \n
Setting \s local \s variable \s DESC \s to \s yaml \n
Loading \s template \s file \s .* template.rb \n
Writing \s to \s STDOUT
/x
template = '{"AWSTemplateFormatVersion":"2010-09-09","Description":"yaml"}'
expect(last_command_started).to have_output_on_stderr(verbose)
expect(last_command_started).to have_output_on_stdout(template)
end
end
end
| 41.016854 | 134 | 0.658129 |
abace6ee84acc090262b4fd8c9aa6c6a2b7c2a60 | 74 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'tome'
| 24.666667 | 58 | 0.716216 |
217b10d1bfec87578ccdd7f2961f614f785800eb | 556 | class TweetParser
attr_reader :tweets
def initialize(user)
@tweets = (user.user_tweet ? user.user_tweet.tweets : nil)
end
def parse
results = []
@tweets.each do |tweet|
results << {
display_name: tweet["user"]["name"],
user_name: tweet["user"]["screen_name"],
avatar: tweet["user"]["profile_image_url"],
body: tweet["text"],
created_at: tweet["created_at"],
retweets: tweet["retweet_count"],
favorites: tweet["favorite_count"]
}.to_json
end
results
end
end | 24.173913 | 62 | 0.604317 |
e2d9234a8afa3578d3d9679ef73a4afb2ebb25d3 | 3,956 | module ActiveScaffold
module Helpers
# A bunch of helper methods to produce the common view ids
module IdHelpers
def id_from_controller(controller)
controller.to_s.gsub("/", "__").html_safe
end
def controller_id(controller = (params[:eid] || params[:parent_controller] || params[:controller]))
controller_id ||= 'as_' + id_from_controller(controller)
end
def active_scaffold_id
"#{controller_id}-active-scaffold"
end
def active_scaffold_content_id
"#{controller_id}-content"
end
def active_scaffold_tbody_id
"#{controller_id}-tbody"
end
def active_scaffold_messages_id(options = {})
"#{options[:controller_id] || controller_id}-messages"
end
def active_scaffold_calculations_id(column = nil)
"#{controller_id}-calculations#{'-' + column.name.to_s if column}"
end
def empty_message_id
"#{controller_id}-empty-message"
end
def before_header_id
"#{controller_id}-search-container"
end
def search_input_id
"#{controller_id}-search-input"
end
def action_link_id(link_action,link_id)
"#{controller_id}-#{link_action}-#{link_id}-link"
end
def active_scaffold_column_header_id(column)
name = column.respond_to?(:name) ? column.name : column.to_s
clean_id "#{controller_id}-#{name}-column"
end
def element_row_id(options = {})
options[:action] ||= params[:action]
options[:id] ||= params[:id]
options[:id] ||= params[:parent_id]
clean_id "#{options[:controller_id] || controller_id}-#{options[:action]}-#{options[:id]}-row"
end
def element_cell_id(options = {})
options[:action] ||= params[:action]
options[:id] ||= params[:id]
options[:id] ||= params[:parent_id]
options[:name] ||= params[:name]
clean_id "#{controller_id}-#{options[:action]}-#{options[:id]}-#{options[:name]}-cell"
end
def element_form_id(options = {})
options[:action] ||= params[:action]
options[:id] ||= params[:id]
options[:id] ||= params[:parent_id]
clean_id "#{controller_id}-#{options[:action]}-#{options[:id]}-form"
end
def association_subform_id(column)
klass = column.association.klass.to_s.underscore
clean_id "#{controller_id}-associated-#{klass}"
end
def loading_indicator_id(options = {})
options[:action] ||= params[:action]
clean_id "#{controller_id}-#{options[:action]}-#{options[:id]}-loading-indicator"
end
def sub_section_id(options = {})
options[:id] ||= params[:id]
options[:id] ||= params[:parent_id]
clean_id "#{controller_id}-#{options[:id]}-#{options[:sub_section]}-subsection"
end
def sub_form_id(options = {})
options[:id] ||= params[:id]
options[:id] ||= params[:parent_id]
clean_id "#{controller_id}-#{options[:id]}-#{options[:association]}-subform"
end
def sub_form_list_id(options = {})
options[:id] ||= params[:id]
options[:id] ||= params[:parent_id]
clean_id "#{controller_id}-#{options[:id]}-#{options[:association]}-subform-list"
end
def element_messages_id(options = {})
options[:action] ||= params[:action]
options[:id] ||= params[:id]
options[:id] ||= params[:parent_id]
clean_id "#{controller_id}-#{options[:action]}-#{options[:id]}-messages"
end
def action_iframe_id(options)
"#{controller_id}-#{options[:action]}-#{options[:id]}-iframe"
end
def scope_id(scope)
scope.gsub(/(\[|\])/, '_').gsub('__', '_').gsub(/_$/, '')
end
private
# whitelists id-safe characters
def clean_id(val)
val.gsub /[^-_0-9a-zA-Z]/, '-'
end
end
end
end
| 30.90625 | 105 | 0.593023 |
e9df1f6cebdd7420c52fb7b68c0487877429e975 | 7,622 | # Copyright 2011-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
module AWS
class CloudFormation
# @attr_reader [String] template Returns the stack's template as a JSON
# string.
#
# @attr_reader [Time] creation_time The time the stack was created.
#
# @attr_reader [Time,nil] last_updated_time The time the stack was
# last updated.
#
# @attr_reader [String] stack_id Unique stack identifier.
#
# @attr_reader [String] status The status of the stack.
#
# @attr_reader [String] status_reason Success/Failure message
# associated with the +status+.
#
# @attr_reader [Array<String>] capabilities The capabilities
# allowed in the stack.
#
# @attr_reader [String] description User defined description
# associated with the stack.
#
# @attr_reader [Boolean] disable_rollback Specifies if the stack
# is rolled back due to stack creation errors.
#
# @attr_reader [Array<String>] notification_arns
# SNS topic ARNs to which stack related events are published.
#
# @attr_reader [Hash] parameters Returns a hash of stack parameters.
#
# @attr_reader [Integer] timeout
# The number of minutes within the stack creation should complete.
#
class Stack < Core::Resource
include StackOptions
# @private
def initialize name, options = {}
@name = name
super
end
# @return [String] Returns the stack name.
attr_reader :name
define_attribute_type :template
define_attribute_type :describe
## returned by GetTemplate
template_attribute :template, :from => :template_body
alias_method :template_body, :template
## returned by DescribeStacks
describe_attribute :creation_time, :static => true
describe_attribute :last_updated_time
describe_attribute :stack_id, :static => true
describe_attribute :status, :from => :stack_status
describe_attribute :status_reason, :from => :stack_status_reason
describe_attribute :capabilities
describe_attribute :description
describe_attribute :disable_rollback, :from => :disable_rollback?
alias_method :disable_rollback?, :disable_rollback
describe_attribute :notification_arns
describe_attribute :output_details, :from => :outputs
protected :output_details
describe_attribute :parameters do
translates_output do |params|
params.inject({}) do |hash,param|
hash.merge(param[:parameter_key] => param[:parameter_value])
end
end
end
describe_attribute :timeout, :from => :timeout_in_minutes
alias_method :timeout_in_minutes, :timeout
## attribute providers
provider(:describe_stacks) do |provider|
provider.find do |resp|
resp.data[:stacks].find{|stack| stack[:stack_name] == name }
end
provider.provides *describe_attributes.keys
end
provider(:get_template) do |provider|
provider.find do |resp|
resp if resp.request_options[:stack_name] == name
end
provider.provides *template_attributes.keys
end
# @return [Array<StackOutput>]
def outputs
output_details.collect do |o|
key, value, desc = o.values_at(:output_key, :output_value, :description)
StackOutput.new(self, key, value, desc)
end
end
# @return [StackEventCollection] Returns a collection that represents
# all events for this stack.
def events
StackEventCollection.new(self)
end
# Returns a stack resource collection that enumerates all resources
# for this stack.
#
# stack.resources.each do |resource|
# puts "#{resource.resource_type}: #{resource.physical_resource_id}"
# end
#
# If you want a specific resource and you know its logical resource
# id, you can use this collection to return a reference to it.
#
# resource = stack.resources['logical-resource-id']
#
# @return [StackResourceCollection]
#
def resources
StackResourceCollection.new(self)
end
# Returns a stack resource summary collection, that when enumerated
# yields summary hashes. Each hash has the following keys:
#
# * +:last_updated_timestamp+
# * +:logical_resource_id+
# * +:physical_resource_id+
# * +:resource_status+
# * +:resource_status_reason+
# * +:resource_type+
#
# @return [StackResourceSummaryCollection]
#
def resource_summaries
StackResourceSummaryCollection.new(self)
end
# @param [Hash] options
#
# @option options [String,URI,S3::S3Object,Object] :template
# A new stack template. This may be provided in a number of formats
# including:
#
# * a String, containing the template as a JSON document.
# * a URL String pointing to the document in S3.
# * a URI object pointing to the document in S3.
# * an {S3::S3Object} which contains the template.
# * an Object which responds to #to_json and returns the template.
#
# @option options [Hash] :parameters A hash that specifies the
# input parameters of the new stack.
#
# @option options[Array<String>] :capabilities The list of capabilities
# that you want to allow in the stack. If your stack contains IAM
# resources, you must specify the CAPABILITY_IAM value for this
# parameter; otherwise, this action returns an
# InsufficientCapabilities error. IAM resources are the following:
#
# * AWS::IAM::AccessKey
# * AWS::IAM::Group
# * AWS::IAM::Policy
# * AWS::IAM::User
# * AWS::IAM::UserToGroupAddition
#
# @return [nil]
#
def update options = {}
client_opts = options.dup
apply_stack_name(name, client_opts)
apply_template(client_opts)
apply_parameters(client_opts)
client.update_stack(client_opts)
nil
end
# @return (see CloudFormation#estimate_template_cost)
def estimate_template_cost
cloud_formation = CloudFormation.new(:config => config)
cloud_formation.estimate_template_cost(template, parameters)
end
# Deletes the current stack.
# @return [nil]
def delete
client.delete_stack(:stack_name => name)
nil
end
# @return [Boolean]
def exists?
begin
client.describe_stacks(resource_options)
true
rescue Errors::ValidationError
false
end
end
protected
def resource_identifiers
[[:stack_name, name]]
end
def get_resource attribute
if attribute.name == :template
client.get_template(resource_options)
else
client.describe_stacks(resource_options)
end
end
end
end
end
| 29.773438 | 82 | 0.643007 |
39eda9ecfdf3bc104f36c3044aa103128186605b | 425 | require 'spec_helper'
describe GeoNamesAPI::CountrySubdivision do
describe "::find" do
it "should find one subdivision" do
result = GeoNamesAPI::CountrySubdivision.find("50.01","10.2")
result.should be_present
end
end
describe "::all" do
it "should find multiple subdivisions" do
result = GeoNamesAPI::CountrySubdivision.all("50.01","10.2")
result.size.should > 0
end
end
end | 25 | 67 | 0.684706 |
91e15ad0128b5a346e4ed0c6f0d996f50ae420a2 | 1,055 | require 'curb'
require 'github_api'
require 'json'
require 'nokogiri'
class Feed
attr_reader :version
attr_reader :url, :sha256
def initialize(name)
@name = name
end
def get_binding
binding()
end
def github(owner, repo, options = {})
options[:draft] = false if options[:draft].nil?
options[:prerelease] = false if options[:prerelease].nil?
hub = Github.new oauth_token: Brewski.config["github_token"]
releases = hub.repos.releases.list owner: owner, repo: repo
releases = releases.select do |release|
release.draft == options[:draft] and release.prerelease == options[:prerelease]
end
end
def html(url)
curl = Curl::Easy.new(url) do |c|
c.follow_location = true
c.headers['User-Agent'] = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/600.8.9 (KHTML, like Gecko) Version/8.0.8 Safari/600.8.9'
#c.verbose = true
end
curl.perform
Nokogiri::HTML(curl.body_str)
end
def json(url)
resp = Curl.get(url)
JSON.parse(resp.body_str)
end
end
| 24.534884 | 150 | 0.670142 |
1aeb4964eb88f5fec9b0bb0903a13320af436560 | 2,069 | # frozen_string_literal: true
module RuboCop
module Cop
module Style
# This cop checks for trailing comma in hash literals.
# The configuration options are:
#
# * `consistent_comma`: Requires a comma after the
# last item of all non-empty, multiline hash literals.
# * `comma`: Requires a comma after the last item in a hash,
# but only when each item is on its own line.
# * `no_comma`: Does not requires a comma after the
# last item in a hash
#
# @example EnforcedStyleForMultiline: consistent_comma
#
# # bad
# a = { foo: 1, bar: 2, }
#
# # good
# a = { foo: 1, bar: 2 }
#
# # good
# a = {
# foo: 1, bar: 2,
# qux: 3,
# }
#
# # good
# a = {
# foo: 1, bar: 2, qux: 3,
# }
#
# # good
# a = {
# foo: 1,
# bar: 2,
# }
#
# @example EnforcedStyleForMultiline: comma
#
# # bad
# a = { foo: 1, bar: 2, }
#
# # good
# a = { foo: 1, bar: 2 }
#
# # bad
# a = {
# foo: 1, bar: 2,
# qux: 3,
# }
#
# # good
# a = {
# foo: 1, bar: 2,
# qux: 3
# }
#
# # bad
# a = {
# foo: 1, bar: 2, qux: 3,
# }
#
# # good
# a = {
# foo: 1, bar: 2, qux: 3
# }
#
# # good
# a = {
# foo: 1,
# bar: 2,
# }
#
# @example EnforcedStyleForMultiline: no_comma (default)
#
# # bad
# a = { foo: 1, bar: 2, }
#
# # good
# a = {
# foo: 1,
# bar: 2
# }
class TrailingCommaInHashLiteral < Base
include TrailingComma
extend AutoCorrector
def on_hash(node)
check_literal(node, 'item of %<article>s hash')
end
end
end
end
end
| 21.112245 | 66 | 0.389077 |
919c749cb9d52dd179c88d407b7dc87b4d523125 | 3,420 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.default_url_options = { host: 'inschef.com' }
end
| 39.767442 | 104 | 0.758187 |
114b25348c8cbb7f059b5e5024628c696e5938db | 1,000 | require File.dirname(__FILE__) + '/../../spec_helper'
describe "Fixnum#quo" do
it "returns the result of self divided by the given Integer as a Float" do
2.quo(2.5).should == 0.8
5.quo(2).should == 2.5
45.quo(0xffffffff).should_be_close(1.04773789668636e-08, TOLERANCE)
end
it "does not raise a ZeroDivisionError when the given Integer is 0" do
0.quo(0).to_s.should == "NaN"
10.quo(0).to_s.should == "Infinity"
-10.quo(0).to_s.should == "-Infinity"
end
it "does not raise a FloatDomainError when the given Integer is 0 and a Float" do
0.quo(0.0).to_s.should == "NaN"
10.quo(0.0).to_s.should == "Infinity"
-10.quo(0.0).to_s.should == "-Infinity"
end
it "raises a TypeError when given a non-Integer" do
should_raise(TypeError) do
(obj = Object.new).should_not_receive(:to_int)
13.quo(obj)
end
should_raise(TypeError) do
13.quo("10")
end
should_raise(TypeError) do
13.quo(:symbol)
end
end
end
| 27.027027 | 83 | 0.648 |
79f5cab92f1666be07b3af8dbb2983ceb4b75455 | 1,571 | =begin
#RadioManager
#RadioManager
OpenAPI spec version: 2.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.3.0
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for RadioManagerClient::TagRelationsContacts
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'TagRelationsContacts' do
before do
# run before each test
@instance = RadioManagerClient::TagRelationsContacts.new
end
after do
# run after each test
end
describe 'test an instance of TagRelationsContacts' do
it 'should create an instance of TagRelationsContacts' do
expect(@instance).to be_instance_of(RadioManagerClient::TagRelationsContacts)
end
end
describe 'test attribute "href"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "model"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "operation"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "params"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 25.754098 | 103 | 0.733291 |
033fc33902fd741cf8d4fc20c1db29da73aef78e | 2,776 | describe "StoreExtension" do
before do
NanoStore.shared_store = NanoStore.store
end
after do
NanoStore.shared_store = nil
end
class Animal < NanoStore::Model
attribute :name
end
it "should open and close store" do
NanoStore.shared_store.open
NanoStore.shared_store.closed?.should.be.false
NanoStore.shared_store.close
NanoStore.shared_store.closed?.should.be.true
NanoStore.shared_store.open
NanoStore.shared_store.closed?.should.be.false
end
it "should add, delete objects and count them" do
store = NanoStore.shared_store
obj1 = Animal.new
obj1.name = "Cat"
obj2 = Animal.new
obj2.name = "Dog"
obj3 = Animal.new
obj3.name = "Cow"
obj4 = Animal.new
obj4.name = "Duck"
store << obj1
store << [obj2, obj3]
store += obj4
store.save
Animal.count.should == 4
store.delete(obj1)
Animal.count.should == 3
store.delete_keys([obj2.key])
Animal.count.should == 2
store.clear
Animal.count.should == 0
end
it "should discard unsave changes" do
store = NanoStore.shared_store = NanoStore.store
store.save_interval = 1000 # must use save_interval= to set auto save interval first
store.engine.synchronousMode = SynchronousModeFull
Animal.count.should == 0
obj1 = Animal.new
obj1.name = "Cat"
obj2 = Animal.new
obj2.name = "Dog"
store << [obj1, obj2]
store.changed?.should.be.true
store.discard
store.changed?.should.be.false
Animal.count.should == 0
end
it "should create a transaction and commit" do
store = NanoStore.shared_store = NanoStore.store
store.transaction do |the_store|
Animal.count.should == 0
obj1 = Animal.new
obj1.name = "Cat"
obj1.save
obj2 = Animal.new
obj2.name = "Dog"
obj2.save
Animal.count.should == 2
end
store.save
Animal.count.should == 2
end
it "should create a transaction and rollback when fail" do
store = NanoStore.shared_store = NanoStore.store
begin
store.transaction do |the_store|
Animal.count.should == 0
obj1 = Animal.new
obj1.name = "Cat"
obj1.save
obj2 = Animal.new
obj2.name = "Dog"
obj2.save
Animal.count.should == 2
raise "error"
end
rescue
end
store.save
Animal.count.should == 0
end
it "should save in batch" do
store = NanoStore.shared_store = NanoStore.store
store.save_interval = 1000
Animal.count.should == 0
obj1 = Animal.new
obj1.name = "Cat"
store << obj1
obj2 = Animal.new
obj2.name = "Dog"
store << obj2
store.save
Animal.count.should == 2
end
end | 21.6875 | 88 | 0.628963 |
79bb93cbbe0ad2079160c442f66070e4520db340 | 1,358 | #
# Be sure to run `pod lib lint UMQCWorking.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'UMQCWorking'
s.version = '1.3.3'
s.summary = 'just a demo'
s.description = <<-DESC
jusr a demo
DESC
s.homepage = 'https://github.com/liyongfei12138/UMQCWorking'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'liyongfei12138' => '[email protected]' }
s.source = { :git => 'https://github.com/liyongfei12138/UMQCWorking.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '10.0'
s.source_files = 'UMQCWorking/QCKJLib.framework/Headers/*.{h}'
s.resources = 'UMQCWorking/QCKJBundle.bundle'
s.vendored_frameworks = 'UMQCWorking/QCKJLib.framework'
s.public_header_files = 'UMQCWorking/QCKJLib.framework/Headers/QCKJLib.h'
s.dependency 'WebViewJavascriptBridge'
s.dependency 'AFNetworking'
s.dependency 'JPush'
s.dependency 'UMengAnalytics-NO-IDFA'
end
| 32.333333 | 110 | 0.662003 |
f8bd1750411fd4898ab5b4cfd3a759fb59b121ee | 434 | require 'rails_helper'
feature 'cac proofing verify info step' do
include CacProofingHelper
before do
sign_in_and_2fa_user
complete_cac_proofing_steps_before_verify_step
end
it 'is on the correct page' do
expect(page).to have_current_path(idv_cac_proofing_verify_step)
end
it 'proceeds to the next page' do
click_continue
expect(page).to have_current_path(idv_cac_proofing_success_step)
end
end
| 20.666667 | 68 | 0.781106 |
28a294e61f759bd68d954307d5ca43fba31784eb | 1,031 | Gem::Specification.new do |s|
s.name = 'rbthemis'
s.version = '0.11.0'
s.date = '2019-03-28'
s.summary = 'Data security library for network communication and data storage for Ruby'
s.description = 'Themis is a data security library, providing users with high-quality security services for secure messaging of any kinds and flexible data storage. Themis is aimed at modern developers, with high level OOP wrappers for Ruby, Python, PHP, Java / Android and iOS / OSX. It is designed with ease of use in mind, high security and cross-platform availability.'
s.authors = ['CossackLabs']
s.email = '[email protected]'
s.files = ['lib/rbthemis.rb', 'lib/rubythemis.rb']
s.homepage = 'http://cossacklabs.com/'
s.license = 'Apache-2.0'
s.add_runtime_dependency 'ffi', '~> 1.9', '>= 1.9.8'
s.requirements << 'libthemis, v0.11.0'
s.post_install_message = 'If you were using rubythemis before, please uninstall it from your system using `gem uninstall rubythemis`'
end
| 64.4375 | 375 | 0.695441 |
01b127397fec3058c99561532ae0c4d91e41a3a7 | 2,111 | # frozen_string_literal: true
class Fisk
module Instructions
# Instruction VFRCZPD: Extract Fraction Packed Double-Precision Floating-Point
VFRCZPD = Instruction.new("VFRCZPD", [
# vfrczpd: xmm, xmm
Form.new([
OPERAND_TYPES[26],
OPERAND_TYPES[24],
].freeze, [
Class.new(Fisk::Encoding) {
def encode buffer, operands
add_VEX(buffer, operands)
add_opcode(buffer, 0x81, 0) +
add_modrm(buffer,
3,
operands[0].op_value,
operands[1].op_value, operands) +
0
end
}.new.freeze,
].freeze).freeze,
# vfrczpd: xmm, m128
Form.new([
OPERAND_TYPES[26],
OPERAND_TYPES[25],
].freeze, [
Class.new(Fisk::Encoding) {
def encode buffer, operands
add_VEX(buffer, operands)
add_opcode(buffer, 0x81, 0) +
add_modrm(buffer,
0,
operands[0].op_value,
operands[1].op_value, operands) +
0
end
}.new.freeze,
].freeze).freeze,
# vfrczpd: ymm, ymm
Form.new([
OPERAND_TYPES[65],
OPERAND_TYPES[60],
].freeze, [
Class.new(Fisk::Encoding) {
def encode buffer, operands
add_VEX(buffer, operands)
add_opcode(buffer, 0x81, 0) +
add_modrm(buffer,
3,
operands[0].op_value,
operands[1].op_value, operands) +
0
end
}.new.freeze,
].freeze).freeze,
# vfrczpd: ymm, m256
Form.new([
OPERAND_TYPES[65],
OPERAND_TYPES[66],
].freeze, [
Class.new(Fisk::Encoding) {
def encode buffer, operands
add_VEX(buffer, operands)
add_opcode(buffer, 0x81, 0) +
add_modrm(buffer,
0,
operands[0].op_value,
operands[1].op_value, operands) +
0
end
}.new.freeze,
].freeze).freeze,
].freeze).freeze
end
end
| 27.064103 | 82 | 0.503553 |
91b4ebd8fa19f55aa81c9ecb0318983468d27a90 | 3,603 | class User < ApplicationRecord
has_many :microposts, dependent: :destroy
has_many :active_relationships, class_name: "Relationship",
foreign_key: "follower_id",
dependent: :destroy
has_many :passive_relationships, class_name: "Relationship",
foreign_key: "followed_id",
dependent: :destroy
has_many :following, through: :active_relationships, source: :followed
has_many :followers, through: :passive_relationships, source: :follower
attr_accessor :remember_token, :activation_token, :reset_token
before_save :downcase_email
before_create :create_activation_digest
validates :name, presence: true, length: { maximum: 29 }
VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-.]+\.[a-z]+\z/i
validates :email, presence: true, length: { maximum: 41 }, format: { with: VALID_EMAIL_REGEX }, uniqueness: { case_sensitive: false }
has_secure_password
validates :password, presence: true, length: { minimum: 6 }, allow_nil: true
# Returns the hash digest of the given string.
def User.digest(string)
cost = ActiveModel::SecurePassword.min_cost ? BCrypt::Engine::MIN_COST :
BCrypt::Engine.cost
BCrypt::Password.create(string, cost: cost)
end
# Returns a random token.
def User.new_token
SecureRandom.urlsafe_base64
end
# Remembers a user in the database for use in persistent sessions.
def remember
self.remember_token = User.new_token
update_attribute(:remember_digest, User.digest(remember_token))
end
# Returns true if the given token matches the digest.
def authenticated?(attribute, token)
digest = send("#{attribute}_digest")
return false if digest.nil?
BCrypt::Password.new(digest).is_password?(token)
end
# Forgets a user.
def forget
update_attribute(:remember_digest, nil)
end
# Activates an account.
def activate
update_attribute(:activated, true)
update_attribute(:activated_at, Time.zone.now)
end
# Sends activation email.
def send_activation_email
UserMailer.account_activation(self).deliver_now
end
# Sets the password reset attributes.
def create_reset_digest
self.reset_token = User.new_token
update_attribute(:reset_digest, User.digest(reset_token))
update_attribute(:reset_sent_at, Time.zone.now)
end
# Sends password reset email.
def send_password_reset_email
UserMailer.password_reset(self).deliver_now
end
# Returns true if a password reset has expired.
def password_reset_expired?
reset_sent_at < 2.hours.ago
end
# Returns a user's status feed.
def feed
following_ids = "SELECT followed_id FROM relationships
WHERE follower_id = :user_id"
Micropost.where("user_id IN (#{following_ids})
OR user_id = :user_id", user_id: id)
end
# Follows a user.
def follow(other_user)
following << other_user
end
# Unfollows a user.
def unfollow(other_user)
following.delete(other_user)
end
# Returns true if the current user is following the other user.
def following?(other_user)
following.include?(other_user)
end
private
# Converts email to all lower-case.
def downcase_email
self.email = email.downcase
end
# Creates and assigns the activation token and digest.
def create_activation_digest
self.activation_token = User.new_token
self.activation_digest = User.digest(activation_token)
end
end | 31.884956 | 135 | 0.68443 |
ff221bf27b50aa98330fe2f3c5cc7e3a32bf4c0d | 144 | require 'test_helper'
class MmsIntegrationTest < ActiveSupport::TestCase
test "truth" do
assert_kind_of Module, MmsIntegration
end
end
| 18 | 50 | 0.784722 |
21571de24179fc5f06423e56547790b922219cb9 | 8,732 | namespace :spec do
# This set of tasks will run ci and retrieve the proper version of specs.
# The specs below this section will run specs, but they will not retrieve
# the specs they run against. This is so we can run the similiar mspec
# runs against a stable and head version of the rubyspecs.
desc "Run rubyspecs expected to pass in interpreted mode (version-frozen)"
task :ruby => :ci_interpreted_18
desc "Run rubyspecs expected to pass in interpreted mode (version-frozen)"
task :'ruby:int' => :ci_interpreted_18
desc "Run rubyspecs expected to pass in interpreted 1.9 mode (version-frozen)"
task :ruby19 => :ci_interpreted_19
desc "Run rubyspecs expected to pass in interpreted 1.9 mode (version-frozen)"
task :'ruby19:int' => :ci_interpreted_19
desc "Run rubyspecs expected to pass in compiled mode (version-frozen)"
task :'ruby:jit' => :ci_compiled_18
desc "Run rubyspecs expected to pass in compiled mode (version-frozen)"
task :'ruby19:jit' => :ci_compiled_19
desc "Run rubyspecs expected to pass in precompiled mode (version-frozen)"
task :'ruby:aot' => :ci_precompiled_18
desc "Run rubyspecs expected to pass in precompiled mode (version-frozen)"
task :'ruby19:aot' => :ci_precompiled_19
desc "Run simple set of tests over both 1.8 and 1.9 modes"
task :short => 'spec:ci_interpreted_18_19'
desc "Run rubyspecs expected to pass (version-frozen)"
task :ci => ['spec:tagged_18']
task :ci_18 => :ci
desc "Run rubyspecs expected to pass (version-frozen)"
task :ci_19 => ['spec:tagged_19']
task :ci_interpreted_18 => ['spec:interpreted_18']
task :ci_interpreted_19 => ['spec:interpreted_19']
task :ci_compiled_18 => ['spec:compiled_18']
task :ci_compiled_19 => ['spec:compiled_19']
task :ci_precompiled_18 => ['spec:precompiled_18']
task :ci_precompiled_19 => ['spec:precompiled_19']
desc "Run rubyspecs expected to pass in interpreted mode (version-frozen, both 1.8 and 1.9)"
task :ci_interpreted_18_19 => ['spec:interpreted_18', 'spec:interpreted_19']
desc "Run all the specs including failures (version-frozen)"
task :ci_all => ['spec:all_18']
desc "Run all the specs including failures (version-frozen)"
task :ci_all_19 => ['spec:all_19']
desc "Run all the specs in precompiled mode (version-frozen)"
task :ci_all_precompiled_18 => ['spec:all_precompiled_18']
desc "Run rubyspecs expected to pass (against latest rubyspec version)"
task :ci_latest => ['spec:fast_forward_to_rubyspec_head', 'spec:tagged_18']
desc "Run rubyspecs expected to pass (against latest rubyspec version)"
task :ci_latest_19 => ['spec:fast_forward_to_rubyspec_head', 'spec:tagged_19']
desc "Run optional C API rubyspecs"
task :ci_cext => ['spec:fast_forward_to_rubyspec_head'] do
mspec :spec_config => CEXT_MSPEC_FILE
fail "One or more Ruby spec runs have failed" if spec_run_error
end
# Note: For this point below it is your reponsibility to make sure specs
# are checked out.
desc "Run 1.8 tagged specs in interpreted, JIT, and pre-compiled modes"
task :tagged_18 => [:interpreted_18, :compiled_18, :precompiled_18] do
fail "One or more Ruby spec runs have failed" if spec_run_error
end
desc "Run all 1.8 specs in interpreted, JIT, and pre-compiled modes"
task :all_18 => [:all_interpreted_18, :all_compiled_18, :all_precompiled_18] do
fail "One or more Ruby spec runs have failed" if spec_run_error
end
desc "Run 1.9 tagged specs in interpreted, JIT, and pre-compiled modes"
task :tagged_19 => [:interpreted_19, :compiled_19, :precompiled_19] do
fail "One or more Ruby spec runs have failed" if spec_run_error
end
desc "Run all 1.9 specs in interpreted, JIT, and pre-compiled modes"
task :all_19 => [:all_interpreted_19, :all_compiled_19, :all_precompiled_19] do
fail "One or more Ruby spec runs have failed" if spec_run_error
end
desc "Tagged 1.8 specs in interpreted mode only"
task :interpreted_18 do
mspec :compile_mode => "OFF", :spec_config => RUBY18_MSPEC_FILE,
:compat => "1.8", :format => 'd'
end
desc "Tagged 1.8 specs in interpreted (IR) mode only"
task :interpreted_ir_18 do
mspec :compile_mode => "OFFIR", :spec_config => RUBY18_MSPEC_FILE,
:compat => "1.8", :format => 'd'
end
desc "Tagged 1.8 specs in JIT mode only (threshold=0)"
task :compiled_18 do
mspec :compile_mode => "JIT", :spec_config => RUBY18_MSPEC_FILE,
:jit_threshold => 0,
:compat => "1.8", :format => 'd'
end
desc "Tagged 1.8 specs in AOT mode only"
task :precompiled_18 do
mspec :compile_mode => "FORCE", :spec_config => RUBY18_MSPEC_FILE,
:jit_threshold => 0,
:compat => "1.8", :format => 'd'
end
desc "All 1.8 specs in interpreted mode only"
task :all_interpreted_18 do
mspec :compile_mode => "OFF",
:compat => "1.8", :format => 'd'
end
desc "All 1.8 specs in interpreted IR mode only"
task :all_interpreted_ir_18 do
mspec :compile_mode => "OFFIR",
:compat => "1.8", :format => 'd'
end
desc "All 1.8 specs in JIT mode only (threshold=0)"
task :all_compiled_18 do
mspec :compile_mode => "JIT", :jit_threshold => 0,
:compat => "1.8", :format => 'd'
end
desc "All 1.8 specs in AOT mode only"
task :all_precompiled_18 do
mspec :compile_mode => "FORCE", :jit_threshold => 0,
:compat => "1.8", :format => 'd'
end
desc "Tagged 1.9 specs in interpreted mode only"
task :interpreted_19 do
mspec :compile_mode => "OFF", :spec_config => RUBY19_MSPEC_FILE,
:compat => "1.9", :format => 'd'
end
desc "Tagged 1.9 specs in JIT mode only (threshold=0)"
task :compiled_19 do
mspec :compile_mode => "JIT", :spec_config => RUBY19_MSPEC_FILE,
:jit_threshold => 0, :compat => "1.9", :format => 'd'
end
desc "Tagged 1.9 specs in AOT mode only"
task :precompiled_19 do
mspec :compile_mode => "FORCE", :spec_config => RUBY19_MSPEC_FILE,
:jit_threshold => 0, :compat => "1.9", :format => 'd'
end
desc "All 1.9 specs in interpreted mode only"
task :all_interpreted_19 do
mspec :compile_mode => "OFF", :compat => "1.9", :format => 'd'
end
desc "All 1.9 specs in interpreted IR mode only"
task :all_interpreted_ir_19 do
mspec :compile_mode => "OFFIR", :compat => "1.9", :format => 'd'
end
desc "All 1.9 specs in JIT mode only (threshold=0)"
task :all_compiled_19 do
mspec :compile_mode => "JIT", :jit_threshold => 0, :compat => "1.9", :format => 'd'
end
# Parameterized rubyspec runs for e.g. TravisCI
desc "Run RubySpec in interpreted mode under the language compat version ENV['RUBYSPEC_LANG_VER']"
task :ci_interpreted_via_env do
ENV['RUBYSPEC_LANG_VER'] ||= '1.9'
case
when ENV['RUBYSPEC_LANG_VER'] == '1.8'
spec_config_file = RUBY18_MSPEC_FILE
else
spec_config_file = RUBY19_MSPEC_FILE
end
mspec :compile_mode => 'OFF', :compat => ENV['RUBYSPEC_LANG_VER'],
:spec_config => spec_config_file, :format => 's'
end
# Complimentary tasks for running specs
task :fetch_latest_specs => [:install_build_gems, :fetch_latest_rubyspec_repo, :fetch_latest_mspec_repo]
task :fetch_stable_specs => :install_build_gems do
puts "Rolling rubyspec to stable version"
git_submodule_update('spec/ruby')
puts "Rolling mspec to stable version"
git_submodule_update('spec/mspec')
end
task :fast_forward_to_rubyspec_head => :fetch_latest_specs do
puts "Rolling to rubyspec to latest version"
git_checkout('rubyspec', 'origin/HEAD', RUBYSPEC_DIR)
git_move_to_head_detached('rubyspec', RUBYSPEC_GIT_REPO, RUBYSPEC_DIR)
end
desc "Retrieve latest tagged rubyspec git repository"
task :fetch_latest_rubyspec_repo do
unless git_repo_exists? RUBYSPEC_DIR
clean_spec_dirs
git_clone('rubyspec', RUBYSPEC_GIT_REPO, RUBYSPEC_DIR)
else
git_fetch('rubyspec', RUBYSPEC_DIR, ignore_error = true)
end
end
desc "Retrieve latest tagged mspec git repository"
task :fetch_latest_mspec_repo do
unless git_repo_exists? MSPEC_DIR
git_clone('mspec', MSPEC_GIT_REPO, MSPEC_DIR)
else
git_fetch('mspec', MSPEC_DIR, ignore_error = true)
end
end
desc "Clean up spec dirs"
task :clean_specs do
clean_spec_dirs(true)
end
def clean_spec_dirs(wipe_spec_dir = false)
rm_rf RUBYSPEC_DIR if wipe_spec_dir
rm_rf MSPEC_DIR
rm_f RUBYSPEC_TAR_FILE
rm_f MSPEC_TAR_FILE
rm_f File.join(SPEC_DIR, "rubyspecs.current.revision")
end
def spec_run_error
# Obtuseriffic - If any previous spec runs were non-zero return we failed
['OFF', 'JIT', 'FORCE'].any? {|n| ant.properties["spec.status.#{n}"] != "0"}
end
end
| 36.383333 | 106 | 0.698923 |
ab8d9317c18245074de7a2ce0ae610d8091b1aa1 | 3,061 | class GhcAT88 < Formula
desc "Glorious Glasgow Haskell Compilation System"
homepage "https://haskell.org/ghc/"
url "https://downloads.haskell.org/~ghc/8.8.4/ghc-8.8.4-src.tar.xz"
sha256 "f0505e38b2235ff9f1090b51f44d6c8efd371068e5a6bb42a2a6d8b67b5ffc2d"
license "BSD-3-Clause"
revision 1
bottle do
sha256 "78a806d8c18645588e55422e2d67e19f1caaf8e869e98c7327a716a1ead63926" => :big_sur
sha256 "de4d4235c849b5c8f07a3b4604b1e1e3c50b88f0deb4e97f9846ab8dde0d5d56" => :catalina
sha256 "96b82af24e29043cd4f4c66b6871d40913ac58e30e2c0fced9ca3cc043408778" => :mojave
sha256 "9d5a52d029125c10744cf20c500ff84d9602fd32f6d81e9ca0137aba508a7ec8" => :high_sierra
end
keg_only :versioned_formula
depends_on "[email protected]" => :build
depends_on "sphinx-doc" => :build
depends_on arch: :x86_64
resource "gmp" do
url "https://ftp.gnu.org/gnu/gmp/gmp-6.1.2.tar.xz"
mirror "https://gmplib.org/download/gmp/gmp-6.1.2.tar.xz"
mirror "https://ftpmirror.gnu.org/gmp/gmp-6.1.2.tar.xz"
sha256 "87b565e89a9a684fe4ebeeddb8399dce2599f9c9049854ca8c0dfbdea0e21912"
end
# https://www.haskell.org/ghc/download_ghc_8_8_3.html#macosx_x86_64
# "This is a distribution for Mac OS X, 10.7 or later."
# A binary of ghc is needed to bootstrap ghc
resource "binary" do
on_macos do
url "https://downloads.haskell.org/~ghc/8.8.3/ghc-8.8.3-x86_64-apple-darwin.tar.xz"
sha256 "7016de90dd226b06fc79d0759c5d4c83c2ab01d8c678905442c28bd948dbb782"
end
on_linux do
url "https://downloads.haskell.org/~ghc/8.8.3/ghc-8.8.3-x86_64-deb8-linux.tar.xz"
sha256 "92b9fadc442976968d2c190c14e000d737240a7d721581cda8d8741b7bd402f0"
end
end
def install
ENV["CC"] = ENV.cc
ENV["LD"] = "ld"
# Build a static gmp rather than in-tree gmp, otherwise all ghc-compiled
# executables link to Homebrew's GMP.
gmp = libexec/"integer-gmp"
# GMP *does not* use PIC by default without shared libs so --with-pic
# is mandatory or else you'll get "illegal text relocs" errors.
resource("gmp").stage do
system "./configure", "--prefix=#{gmp}", "--with-pic", "--disable-shared",
"--build=#{Hardware.oldest_cpu}-apple-darwin#{OS.kernel_version.major}"
system "make"
system "make", "install"
end
args = ["--with-gmp-includes=#{gmp}/include",
"--with-gmp-libraries=#{gmp}/lib"]
resource("binary").stage do
binary = buildpath/"binary"
system "./configure", "--prefix=#{binary}", *args
ENV.deparallelize { system "make", "install" }
ENV.prepend_path "PATH", binary/"bin"
end
system "./configure", "--prefix=#{prefix}", *args
system "make"
ENV.deparallelize { system "make", "install" }
Dir.glob(lib/"*/package.conf.d/package.cache") { |f| rm f }
end
def post_install
system "#{bin}/ghc-pkg", "recache"
end
test do
(testpath/"hello.hs").write('main = putStrLn "Hello Homebrew"')
assert_match "Hello Homebrew", shell_output("#{bin}/runghc hello.hs")
end
end
| 34.393258 | 99 | 0.690297 |
383b28f0e58ce1d276a0176953ba8f815b2fbac2 | 1,233 | control 'PHTN-67-000063' do
title "The Photon operating system RPM package management tool must
cryptographically verify the authenticity of all software packages during
installation."
desc "Installation of any non-trusted software, patches, service packs,
device drivers, or operating system components can significantly affect the
overall security of the operating system. This requirement ensures the software
has not been tampered with and has been provided by a trusted vendor."
desc 'rationale', ''
desc 'check', "
At the command line, execute the following command:
# grep \"^gpgcheck\" /etc/tdnf/tdnf.conf
If \"gpgcheck\" is not set to \"1\", this is a finding.
"
desc 'fix', "
Open /etc/tdnf/tdnf.conf with a text editor.
Remove any existing gpgcheck setting and add the following line:
gpgcheck=1
"
impact 0.5
tag severity: 'medium'
tag gtitle: 'SRG-OS-000366-GPOS-00153'
tag gid: 'V-239134'
tag rid: 'SV-239134r675210_rule'
tag stig_id: 'PHTN-67-000063'
tag fix_id: 'F-42304r675209_fix'
tag cci: ['CCI-001749']
tag nist: ['CM-5 (3)']
describe command('grep "^gpgcheck" /etc/tdnf/tdnf.conf') do
its('stdout.strip') { should cmp 'gpgcheck=1' }
end
end
| 32.447368 | 79 | 0.714517 |
abf57c9d93b4110d320072f5d59c8030ed870271 | 1,024 | require 'test_helper'
module MoCo
describe CompilerRegister do
before { Singleton.__init__(CompilerRegister) }
after { reset_register }
it 'registers the compiler class for the source extension' do
HtmlCompiler.register('haml')
assert_equal HtmlCompiler, MoCo.compiler_for('haml')
end
describe 'compiler lookup' do
before do
MoCo.register(CssCompiler, 'sass')
MoCo.register(CssCompiler, 'scss')
MoCo.register(HtmlCompiler, 'md')
end
it 'works when more than one compiler is registered' do
assert_equal CssCompiler, MoCo.compiler_for('scss')
assert_equal HtmlCompiler, MoCo.compiler_for('md')
assert_equal CssCompiler, MoCo.compiler_for('sass')
end
it 'accepts a filename' do
assert_equal CssCompiler, MoCo.compiler_for('/dir/a style.css.sass')
end
it 'returns nil when the extension is unregistered' do
assert_nil MoCo.compiler_for('dummy')
end
end
end
end
| 24.380952 | 76 | 0.671875 |
ac95bac4adcee84af924cd7f2591d1a37d71e178 | 8,951 | require 'cases/helper'
require 'models/topic' # For booleans
require 'models/pirate' # For timestamps
require 'models/parrot'
require 'models/person' # For optimistic locking
class Pirate # Just reopening it, not defining it
attr_accessor :detected_changes_in_after_update # Boolean for if changes are detected
attr_accessor :changes_detected_in_after_update # Actual changes
after_update :check_changes
private
# after_save/update in sweepers, observers, and the model itself
# can end up checking dirty status and acting on the results
def check_changes
if self.changed?
self.detected_changes_in_after_update = true
self.changes_detected_in_after_update = self.changes
end
end
end
class NumericData < ActiveRecord::Base
self.table_name = 'numeric_data'
end
class DirtyTest < ActiveRecord::TestCase
def test_attribute_changes
# New record - no changes.
pirate = Pirate.new
assert !pirate.catchphrase_changed?
assert_nil pirate.catchphrase_change
# Change catchphrase.
pirate.catchphrase = 'arrr'
assert pirate.catchphrase_changed?
assert_nil pirate.catchphrase_was
assert_equal [nil, 'arrr'], pirate.catchphrase_change
# Saved - no changes.
pirate.save!
assert !pirate.catchphrase_changed?
assert_nil pirate.catchphrase_change
# Same value - no changes.
pirate.catchphrase = 'arrr'
assert !pirate.catchphrase_changed?
assert_nil pirate.catchphrase_change
end
def test_aliased_attribute_changes
# the actual attribute here is name, title is an
# alias setup via alias_attribute
parrot = Parrot.new
assert !parrot.title_changed?
assert_nil parrot.title_change
parrot.name = 'Sam'
assert parrot.title_changed?
assert_nil parrot.title_was
assert_equal parrot.name_change, parrot.title_change
end
def test_nullable_number_not_marked_as_changed_if_new_value_is_blank
pirate = Pirate.new
["", nil].each do |value|
pirate.parrot_id = value
assert !pirate.parrot_id_changed?
assert_nil pirate.parrot_id_change
end
end
def test_nullable_decimal_not_marked_as_changed_if_new_value_is_blank
numeric_data = NumericData.new
["", nil].each do |value|
numeric_data.bank_balance = value
assert !numeric_data.bank_balance_changed?
assert_nil numeric_data.bank_balance_change
end
end
def test_nullable_float_not_marked_as_changed_if_new_value_is_blank
numeric_data = NumericData.new
["", nil].each do |value|
numeric_data.temperature = value
assert !numeric_data.temperature_changed?
assert_nil numeric_data.temperature_change
end
end
def test_nullable_integer_zero_to_string_zero_not_marked_as_changed
pirate = Pirate.new
pirate.parrot_id = 0
pirate.catchphrase = 'arrr'
assert pirate.save!
assert !pirate.changed?
pirate.parrot_id = '0'
assert !pirate.changed?
end
def test_zero_to_blank_marked_as_changed
pirate = Pirate.new
pirate.catchphrase = "Yarrrr, me hearties"
pirate.parrot_id = 1
pirate.save
# check the change from 1 to ''
pirate = Pirate.find_by_catchphrase("Yarrrr, me hearties")
pirate.parrot_id = ''
assert pirate.parrot_id_changed?
assert_equal([1, nil], pirate.parrot_id_change)
pirate.save
# check the change from nil to 0
pirate = Pirate.find_by_catchphrase("Yarrrr, me hearties")
pirate.parrot_id = 0
assert pirate.parrot_id_changed?
assert_equal([nil, 0], pirate.parrot_id_change)
pirate.save
# check the change from 0 to ''
pirate = Pirate.find_by_catchphrase("Yarrrr, me hearties")
pirate.parrot_id = ''
assert pirate.parrot_id_changed?
assert_equal([0, nil], pirate.parrot_id_change)
end
def test_object_should_be_changed_if_any_attribute_is_changed
pirate = Pirate.new
assert !pirate.changed?
assert_equal [], pirate.changed
assert_equal Hash.new, pirate.changes
pirate.catchphrase = 'arrr'
assert pirate.changed?
assert_nil pirate.catchphrase_was
assert_equal %w(catchphrase), pirate.changed
assert_equal({'catchphrase' => [nil, 'arrr']}, pirate.changes)
pirate.save
assert !pirate.changed?
assert_equal [], pirate.changed
assert_equal Hash.new, pirate.changes
end
def test_attribute_will_change!
pirate = Pirate.create!(:catchphrase => 'arr')
pirate.catchphrase << ' matey'
assert !pirate.catchphrase_changed?
assert pirate.catchphrase_will_change!
assert pirate.catchphrase_changed?
assert_equal ['arr matey', 'arr matey'], pirate.catchphrase_change
pirate.catchphrase << '!'
assert pirate.catchphrase_changed?
assert_equal ['arr matey', 'arr matey!'], pirate.catchphrase_change
end
def test_association_assignment_changes_foreign_key
pirate = Pirate.create!(:catchphrase => 'jarl')
pirate.parrot = Parrot.create!(:name => 'Lorre')
assert pirate.changed?
assert_equal %w(parrot_id), pirate.changed
end
def test_attribute_should_be_compared_with_type_cast
topic = Topic.new
assert topic.approved?
assert !topic.approved_changed?
# Coming from web form.
params = {:topic => {:approved => 1}}
# In the controller.
topic.attributes = params[:topic]
assert topic.approved?
assert !topic.approved_changed?
end
def test_partial_update
pirate = Pirate.new(:catchphrase => 'foo')
old_updated_on = 1.hour.ago.beginning_of_day
with_partial_updates Pirate, false do
assert_queries(2) { 2.times { pirate.save! } }
Pirate.update_all({ :updated_on => old_updated_on }, :id => pirate.id)
end
with_partial_updates Pirate, true do
assert_queries(0) { 2.times { pirate.save! } }
assert_equal old_updated_on, pirate.reload.updated_on
assert_queries(1) { pirate.catchphrase = 'bar'; pirate.save! }
assert_not_equal old_updated_on, pirate.reload.updated_on
end
end
def test_partial_update_with_optimistic_locking
person = Person.new(:first_name => 'foo')
old_lock_version = 1
with_partial_updates Person, false do
assert_queries(2) { 2.times { person.save! } }
Person.update_all({ :first_name => 'baz' }, :id => person.id)
end
with_partial_updates Person, true do
assert_queries(0) { 2.times { person.save! } }
assert_equal old_lock_version, person.reload.lock_version
assert_queries(1) { person.first_name = 'bar'; person.save! }
assert_not_equal old_lock_version, person.reload.lock_version
end
end
def test_changed_attributes_should_be_preserved_if_save_failure
pirate = Pirate.new
pirate.parrot_id = 1
assert !pirate.save
check_pirate_after_save_failure(pirate)
pirate = Pirate.new
pirate.parrot_id = 1
assert_raise(ActiveRecord::RecordInvalid) { pirate.save! }
check_pirate_after_save_failure(pirate)
end
def test_reload_should_clear_changed_attributes
pirate = Pirate.create!(:catchphrase => "shiver me timbers")
pirate.catchphrase = "*hic*"
assert pirate.changed?
pirate.reload
assert !pirate.changed?
end
def test_reverted_changes_are_not_dirty
phrase = "shiver me timbers"
pirate = Pirate.create!(:catchphrase => phrase)
pirate.catchphrase = "*hic*"
assert pirate.changed?
pirate.catchphrase = phrase
assert !pirate.changed?
end
def test_reverted_changes_are_not_dirty_after_multiple_changes
phrase = "shiver me timbers"
pirate = Pirate.create!(:catchphrase => phrase)
10.times do |i|
pirate.catchphrase = "*hic*" * i
assert pirate.changed?
end
assert pirate.changed?
pirate.catchphrase = phrase
assert !pirate.changed?
end
def test_reverted_changes_are_not_dirty_going_from_nil_to_value_and_back
pirate = Pirate.create!(:catchphrase => "Yar!")
pirate.parrot_id = 1
assert pirate.changed?
assert pirate.parrot_id_changed?
assert !pirate.catchphrase_changed?
pirate.parrot_id = nil
assert !pirate.changed?
assert !pirate.parrot_id_changed?
assert !pirate.catchphrase_changed?
end
def test_save_should_store_serialized_attributes_even_with_partial_updates
with_partial_updates(Topic) do
topic = Topic.create!(:content => {:a => "a"})
topic.content[:b] = "b"
#assert topic.changed? # Known bug, will fail
topic.save!
assert_equal "b", topic.content[:b]
topic.reload
assert_equal "b", topic.content[:b]
end
end
private
def with_partial_updates(klass, on = true)
old = klass.partial_updates?
klass.partial_updates = on
yield
ensure
klass.partial_updates = old
end
def check_pirate_after_save_failure(pirate)
assert pirate.changed?
assert pirate.parrot_id_changed?
assert_equal %w(parrot_id), pirate.changed
assert_nil pirate.parrot_id_was
end
end
| 29.156352 | 87 | 0.720366 |
4a29675957091d4c13a8e065f4b946c1ce7db341 | 4,140 | require File.expand_path('../spec_helper', __FILE__)
describe Rack::Parser do
it "allows you to setup parsers for content types" do
middleware = Rack::Parser.new ParserApp, :parsers => { 'foo' => 'bar' }
assert_equal 'bar', middleware.parsers['foo']
end
it "should not remove fields from options in setup" do
options = {:parsers => { 'foo' => 'bar' }}
middleware = Rack::Parser.new ParserApp, options
refute_nil options[:parsers]
end
it "allows you to setup error handlers" do
stack = Rack::Parser.new ParserApp, :handlers => { 'foo' => 'bar' }
assert_equal 'bar', stack.handlers['foo']
end
it "parses a Content-Type" do
payload = JSON.dump(:a => 1)
parser = proc { |data| JSON.parse data }
stack Rack::Parser, :parsers => { 'application/json' => parser }
post '/post', payload, { 'CONTENT_TYPE' => 'application/json' }
assert last_response.ok?
assert_equal "{\"a\"=>1}", last_response.body
end
it "does nothing if unmatched Content-Type" do
payload = JSON.dump(:a => 1)
parser = proc { |data| JSON.parse data }
stack Rack::Parser, :parsers => { 'application/json' => parser }
post '/post', payload, { 'CONTENT_TYPE' => 'application/xml' }
assert last_response.ok?
assert_equal "{}", last_response.body # request.params won't pick up this content type
end
it "matches Content-Type by regex" do
payload = JSON.dump(:a => 2)
parser = proc { |data| JSON.parse data }
stack Rack::Parser, :parsers => { %r{json} => parser }
post '/post', payload, { 'CONTENT_TYPE' => 'application/vnd.foo+json' }
assert last_response.ok?
assert_equal "{\"a\"=>2}", last_response.body
end
it 'matches ambiguous string Content-Type and forces explicit regex' do
payload = JSON.dump(:a => 2)
parser = proc { |data| JSON.parse data }
stack Rack::Parser, :parsers => { 'application/vnd.foo+json' => parser }
post '/post', payload, { 'CONTENT_TYPE' => 'application/vnd.foo+json' }
assert last_response.ok?
assert_equal "{\"a\"=>2}", last_response.body
end
it "handles upstream errors" do
assert_raises StandardError, 'error!' do
parser = proc { |data| JSON.parse data }
stack Rack::Parser, :parsers => { %r{json} => parser }
post '/error', '{}', { 'CONTENT_TYPE' => 'application/json' }
end
end
it "returns a default error" do
parser = proc { |data| raise StandardError, 'wah wah' }
stack Rack::Parser, :parsers => { %r{json} => parser }
post '/post', '{}', { 'CONTENT_TYPE' => 'application/vnd.foo+json' }
assert_equal 400, last_response.status
end
it "returns a custom error message" do
parser = proc { |data| raise StandardError, "wah wah" }
handler = proc { |err, type| [500, {}, "%s : %s" % [type, err]] }
stack Rack::Parser, :parsers => { %r{json} => parser },
:handlers => { %r{json} => handler }
post '/post', '{}', { 'CONTENT_TYPE' => 'application/vnd.foo+json' }
assert_equal 500, last_response.status
assert_equal 'application/vnd.foo+json : wah wah', last_response.body
end
it 'returns a custome error for ambiguous string Content-Type and forces explicit regex' do
parser = proc { |data| raise StandardError, "wah wah" }
handler = proc { |err, type| [500, {}, "%s : %s" % [type, err]] }
stack Rack::Parser, :parsers => { %r{json} => parser },
:handlers => { 'application/vnd.foo+json' => handler }
post '/post', '{}', { 'CONTENT_TYPE' => 'application/vnd.foo+json' }
assert_equal 500, last_response.status
assert_equal 'application/vnd.foo+json : wah wah', last_response.body
end
it "parses an array but do not set it to params" do
payload = JSON.dump([1,2,3])
parser = proc { |data| JSON.parse data }
stack Rack::Parser, :parsers => { 'application/json' => parser }
post '/post', payload, { 'CONTENT_TYPE' => 'application/json' }
assert last_response.ok?
assert_equal last_request.env['rack.parser.result'], [1, 2, 3]
assert_equal last_request.env['rack.request.form_hash'], nil
end
end
| 37.636364 | 93 | 0.628986 |
339a57a1f209b02dfbb69e0816628c7587ac8df1 | 40,121 | require 'spec_helper'
describe API::Runner do
include StubGitlabCalls
let(:registration_token) { 'abcdefg123456' }
before do
stub_gitlab_calls
stub_application_setting(runners_registration_token: registration_token)
end
describe '/api/v4/runners' do
describe 'POST /api/v4/runners' do
context 'when no token is provided' do
it 'returns 400 error' do
post api('/runners')
expect(response).to have_http_status 400
end
end
context 'when invalid token is provided' do
it 'returns 403 error' do
post api('/runners'), token: 'invalid'
expect(response).to have_http_status 403
end
end
context 'when valid token is provided' do
it 'creates runner with default values' do
post api('/runners'), token: registration_token
runner = Ci::Runner.first
expect(response).to have_http_status 201
expect(json_response['id']).to eq(runner.id)
expect(json_response['token']).to eq(runner.token)
expect(runner.run_untagged).to be true
expect(runner.token).not_to eq(registration_token)
end
context 'when project token is used' do
let(:project) { create(:empty_project) }
it 'creates runner' do
post api('/runners'), token: project.runners_token
expect(response).to have_http_status 201
expect(project.runners.size).to eq(1)
expect(Ci::Runner.first.token).not_to eq(registration_token)
expect(Ci::Runner.first.token).not_to eq(project.runners_token)
end
end
end
context 'when runner description is provided' do
it 'creates runner' do
post api('/runners'), token: registration_token,
description: 'server.hostname'
expect(response).to have_http_status 201
expect(Ci::Runner.first.description).to eq('server.hostname')
end
end
context 'when runner tags are provided' do
it 'creates runner' do
post api('/runners'), token: registration_token,
tag_list: 'tag1, tag2'
expect(response).to have_http_status 201
expect(Ci::Runner.first.tag_list.sort).to eq(%w(tag1 tag2))
end
end
context 'when option for running untagged jobs is provided' do
context 'when tags are provided' do
it 'creates runner' do
post api('/runners'), token: registration_token,
run_untagged: false,
tag_list: ['tag']
expect(response).to have_http_status 201
expect(Ci::Runner.first.run_untagged).to be false
expect(Ci::Runner.first.tag_list.sort).to eq(['tag'])
end
end
context 'when tags are not provided' do
it 'returns 404 error' do
post api('/runners'), token: registration_token,
run_untagged: false
expect(response).to have_http_status 404
end
end
end
context 'when option for locking Runner is provided' do
it 'creates runner' do
post api('/runners'), token: registration_token,
locked: true
expect(response).to have_http_status 201
expect(Ci::Runner.first.locked).to be true
end
end
%w(name version revision platform architecture).each do |param|
context "when info parameter '#{param}' info is present" do
let(:value) { "#{param}_value" }
it "updates provided Runner's parameter" do
post api('/runners'), token: registration_token,
info: { param => value }
expect(response).to have_http_status 201
expect(Ci::Runner.first.read_attribute(param.to_sym)).to eq(value)
end
end
end
end
describe 'DELETE /api/v4/runners' do
context 'when no token is provided' do
it 'returns 400 error' do
delete api('/runners')
expect(response).to have_http_status 400
end
end
context 'when invalid token is provided' do
it 'returns 403 error' do
delete api('/runners'), token: 'invalid'
expect(response).to have_http_status 403
end
end
context 'when valid token is provided' do
let(:runner) { create(:ci_runner) }
it 'deletes Runner' do
delete api('/runners'), token: runner.token
expect(response).to have_http_status 204
expect(Ci::Runner.count).to eq(0)
end
end
end
describe 'POST /api/v4/runners/verify' do
let(:runner) { create(:ci_runner) }
context 'when no token is provided' do
it 'returns 400 error' do
post api('/runners/verify')
expect(response).to have_http_status :bad_request
end
end
context 'when invalid token is provided' do
it 'returns 403 error' do
post api('/runners/verify'), token: 'invalid-token'
expect(response).to have_http_status 403
end
end
context 'when valid token is provided' do
it 'verifies Runner credentials' do
post api('/runners/verify'), token: runner.token
expect(response).to have_http_status 200
end
end
end
end
describe '/api/v4/jobs' do
let(:project) { create(:empty_project, shared_runners_enabled: false) }
let(:pipeline) { create(:ci_pipeline_without_jobs, project: project, ref: 'master') }
let(:runner) { create(:ci_runner) }
let!(:job) do
create(:ci_build, :artifacts, :extended_options,
pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0, commands: "ls\ndate")
end
before do
project.runners << runner
end
describe 'POST /api/v4/jobs/request' do
let!(:last_update) {}
let!(:new_update) { }
let(:user_agent) { 'gitlab-runner 9.0.0 (9-0-stable; go1.7.4; linux/amd64)' }
before do
stub_container_registry_config(enabled: false)
end
shared_examples 'no jobs available' do
before do
request_job
end
context 'when runner sends version in User-Agent' do
context 'for stable version' do
it 'gives 204 and set X-GitLab-Last-Update' do
expect(response).to have_http_status(204)
expect(response.header).to have_key('X-GitLab-Last-Update')
end
end
context 'when last_update is up-to-date' do
let(:last_update) { runner.ensure_runner_queue_value }
it 'gives 204 and set the same X-GitLab-Last-Update' do
expect(response).to have_http_status(204)
expect(response.header['X-GitLab-Last-Update']).to eq(last_update)
end
end
context 'when last_update is outdated' do
let(:last_update) { runner.ensure_runner_queue_value }
let(:new_update) { runner.tick_runner_queue }
it 'gives 204 and set a new X-GitLab-Last-Update' do
expect(response).to have_http_status(204)
expect(response.header['X-GitLab-Last-Update']).to eq(new_update)
end
end
context 'when beta version is sent' do
let(:user_agent) { 'gitlab-runner 9.0.0~beta.167.g2b2bacc (master; go1.7.4; linux/amd64)' }
it { expect(response).to have_http_status(204) }
end
context 'when pre-9-0 version is sent' do
let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0 (1-6-stable; go1.6.3; linux/amd64)' }
it { expect(response).to have_http_status(204) }
end
context 'when pre-9-0 beta version is sent' do
let(:user_agent) { 'gitlab-ci-multi-runner 1.6.0~beta.167.g2b2bacc (master; go1.6.3; linux/amd64)' }
it { expect(response).to have_http_status(204) }
end
end
end
context 'when no token is provided' do
it 'returns 400 error' do
post api('/jobs/request')
expect(response).to have_http_status 400
end
end
context 'when invalid token is provided' do
it 'returns 403 error' do
post api('/jobs/request'), token: 'invalid'
expect(response).to have_http_status 403
end
end
context 'when valid token is provided' do
context 'when Runner is not active' do
let(:runner) { create(:ci_runner, :inactive) }
it 'returns 204 error' do
request_job
expect(response).to have_http_status 204
end
end
context 'when jobs are finished' do
before do
job.success
end
it_behaves_like 'no jobs available'
end
context 'when other projects have pending jobs' do
before do
job.success
create(:ci_build, :pending)
end
it_behaves_like 'no jobs available'
end
context 'when shared runner requests job for project without shared_runners_enabled' do
let(:runner) { create(:ci_runner, :shared) }
it_behaves_like 'no jobs available'
end
context 'when there is a pending job' do
let(:expected_job_info) do
{ 'name' => job.name,
'stage' => job.stage,
'project_id' => job.project.id,
'project_name' => job.project.name }
end
let(:expected_git_info) do
{ 'repo_url' => job.repo_url,
'ref' => job.ref,
'sha' => job.sha,
'before_sha' => job.before_sha,
'ref_type' => 'branch' }
end
let(:expected_steps) do
[{ 'name' => 'script',
'script' => %w(ls date),
'timeout' => job.timeout,
'when' => 'on_success',
'allow_failure' => false },
{ 'name' => 'after_script',
'script' => %w(ls date),
'timeout' => job.timeout,
'when' => 'always',
'allow_failure' => true }]
end
let(:expected_variables) do
[{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true },
{ 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true },
{ 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true }]
end
let(:expected_artifacts) do
[{ 'name' => 'artifacts_file',
'untracked' => false,
'paths' => %w(out/),
'when' => 'always',
'expire_in' => '7d' }]
end
let(:expected_cache) do
[{ 'key' => 'cache_key',
'untracked' => false,
'paths' => ['vendor/*'] }]
end
it 'picks a job' do
request_job info: { platform: :darwin }
expect(response).to have_http_status(201)
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
expect(runner.reload.platform).to eq('darwin')
expect(json_response['id']).to eq(job.id)
expect(json_response['token']).to eq(job.token)
expect(json_response['job_info']).to eq(expected_job_info)
expect(json_response['git_info']).to eq(expected_git_info)
expect(json_response['image']).to eq({ 'name' => 'ruby:2.1', 'entrypoint' => '/bin/sh' })
expect(json_response['services']).to eq([{ 'name' => 'postgres', 'entrypoint' => nil,
'alias' => nil, 'command' => nil },
{ 'name' => 'docker:dind', 'entrypoint' => '/bin/sh',
'alias' => 'docker', 'command' => 'sleep 30' }])
expect(json_response['steps']).to eq(expected_steps)
expect(json_response['artifacts']).to eq(expected_artifacts)
expect(json_response['cache']).to eq(expected_cache)
expect(json_response['variables']).to include(*expected_variables)
end
context 'when job is made for tag' do
let!(:job) { create(:ci_build_tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
it 'sets branch as ref_type' do
request_job
expect(response).to have_http_status(201)
expect(json_response['git_info']['ref_type']).to eq('tag')
end
end
context 'when job is made for branch' do
it 'sets tag as ref_type' do
request_job
expect(response).to have_http_status(201)
expect(json_response['git_info']['ref_type']).to eq('branch')
end
end
it 'updates runner info' do
expect { request_job }.to change { runner.reload.contacted_at }
end
%w(name version revision platform architecture).each do |param|
context "when info parameter '#{param}' is present" do
let(:value) { "#{param}_value" }
it "updates provided Runner's parameter" do
request_job info: { param => value }
expect(response).to have_http_status(201)
expect(runner.reload.read_attribute(param.to_sym)).to eq(value)
end
end
end
context 'when concurrently updating a job' do
before do
expect_any_instance_of(Ci::Build).to receive(:run!)
.and_raise(ActiveRecord::StaleObjectError.new(nil, nil))
end
it 'returns a conflict' do
request_job
expect(response).to have_http_status(409)
expect(response.headers).not_to have_key('X-GitLab-Last-Update')
end
end
context 'when project and pipeline have multiple jobs' do
let!(:job) { create(:ci_build_tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build_tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
before do
job.success
job2.success
end
it 'returns dependent jobs' do
request_job
expect(response).to have_http_status(201)
expect(json_response['id']).to eq(test_job.id)
expect(json_response['dependencies'].count).to eq(2)
expect(json_response['dependencies']).to include(
{ 'id' => job.id, 'name' => job.name, 'token' => job.token },
{ 'id' => job2.id, 'name' => job2.name, 'token' => job2.token })
end
end
context 'when pipeline have jobs with artifacts' do
let!(:job) { create(:ci_build_tag, :artifacts, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:test_job) { create(:ci_build, pipeline: pipeline, name: 'deploy', stage: 'deploy', stage_idx: 1) }
before do
job.success
end
it 'returns dependent jobs' do
request_job
expect(response).to have_http_status(201)
expect(json_response['id']).to eq(test_job.id)
expect(json_response['dependencies'].count).to eq(1)
expect(json_response['dependencies']).to include(
{ 'id' => job.id, 'name' => job.name, 'token' => job.token,
'artifacts_file' => { 'filename' => 'ci_build_artifacts.zip', 'size' => 106365 } })
end
end
context 'when explicit dependencies are defined' do
let!(:job) { create(:ci_build_tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build_tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
let!(:test_job) do
create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'deploy',
stage: 'deploy', stage_idx: 1,
options: { dependencies: [job2.name] })
end
before do
job.success
job2.success
end
it 'returns dependent jobs' do
request_job
expect(response).to have_http_status(201)
expect(json_response['id']).to eq(test_job.id)
expect(json_response['dependencies'].count).to eq(1)
expect(json_response['dependencies'][0]).to include('id' => job2.id, 'name' => job2.name, 'token' => job2.token)
end
end
context 'when dependencies is an empty array' do
let!(:job) { create(:ci_build_tag, pipeline: pipeline, name: 'spinach', stage: 'test', stage_idx: 0) }
let!(:job2) { create(:ci_build_tag, pipeline: pipeline, name: 'rubocop', stage: 'test', stage_idx: 0) }
let!(:empty_dependencies_job) do
create(:ci_build, pipeline: pipeline, token: 'test-job-token', name: 'empty_dependencies_job',
stage: 'deploy', stage_idx: 1,
options: { dependencies: [] })
end
before do
job.success
job2.success
end
it 'returns an empty array' do
request_job
expect(response).to have_http_status(201)
expect(json_response['id']).to eq(empty_dependencies_job.id)
expect(json_response['dependencies'].count).to eq(0)
end
end
context 'when job has no tags' do
before do
job.update(tags: [])
end
context 'when runner is allowed to pick untagged jobs' do
before do
runner.update_column(:run_untagged, true)
end
it 'picks job' do
request_job
expect(response).to have_http_status 201
end
end
context 'when runner is not allowed to pick untagged jobs' do
before do
runner.update_column(:run_untagged, false)
end
it_behaves_like 'no jobs available'
end
end
context 'when triggered job is available' do
let(:expected_variables) do
[{ 'key' => 'CI_JOB_NAME', 'value' => 'spinach', 'public' => true },
{ 'key' => 'CI_JOB_STAGE', 'value' => 'test', 'public' => true },
{ 'key' => 'CI_PIPELINE_TRIGGERED', 'value' => 'true', 'public' => true },
{ 'key' => 'DB_NAME', 'value' => 'postgres', 'public' => true },
{ 'key' => 'SECRET_KEY', 'value' => 'secret_value', 'public' => false },
{ 'key' => 'TRIGGER_KEY_1', 'value' => 'TRIGGER_VALUE_1', 'public' => false }]
end
before do
trigger = create(:ci_trigger, project: project)
create(:ci_trigger_request_with_variables, pipeline: pipeline, builds: [job], trigger: trigger)
project.variables << Ci::Variable.new(key: 'SECRET_KEY', value: 'secret_value')
end
it 'returns variables for triggers' do
request_job
expect(response).to have_http_status(201)
expect(json_response['variables']).to include(*expected_variables)
end
end
describe 'registry credentials support' do
let(:registry_url) { 'registry.example.com:5005' }
let(:registry_credentials) do
{ 'type' => 'registry',
'url' => registry_url,
'username' => 'gitlab-ci-token',
'password' => job.token }
end
context 'when registry is enabled' do
before do
stub_container_registry_config(enabled: true, host_port: registry_url)
end
it 'sends registry credentials key' do
request_job
expect(json_response).to have_key('credentials')
expect(json_response['credentials']).to include(registry_credentials)
end
end
context 'when registry is disabled' do
before do
stub_container_registry_config(enabled: false, host_port: registry_url)
end
it 'does not send registry credentials' do
request_job
expect(json_response).to have_key('credentials')
expect(json_response['credentials']).not_to include(registry_credentials)
end
end
end
end
def request_job(token = runner.token, **params)
new_params = params.merge(token: token, last_update: last_update)
post api('/jobs/request'), new_params, { 'User-Agent' => user_agent }
end
end
end
describe 'PUT /api/v4/jobs/:id' do
let(:job) { create(:ci_build, :pending, :trace, pipeline: pipeline, runner_id: runner.id) }
before do
job.run!
end
context 'when status is given' do
it 'mark job as succeeded' do
update_job(state: 'success')
expect(job.reload.status).to eq 'success'
end
it 'mark job as failed' do
update_job(state: 'failed')
expect(job.reload.status).to eq 'failed'
end
end
context 'when tace is given' do
it 'updates a running build' do
update_job(trace: 'BUILD TRACE UPDATED')
expect(response).to have_http_status(200)
expect(job.reload.trace.raw).to eq 'BUILD TRACE UPDATED'
end
end
context 'when no trace is given' do
it 'does not override trace information' do
update_job
expect(job.reload.trace.raw).to eq 'BUILD TRACE'
end
end
context 'when job has been erased' do
let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
it 'responds with forbidden' do
update_job
expect(response).to have_http_status(403)
end
end
def update_job(token = job.token, **params)
new_params = params.merge(token: token)
put api("/jobs/#{job.id}"), new_params
end
end
describe 'PATCH /api/v4/jobs/:id/trace' do
let(:job) { create(:ci_build, :running, :trace, runner_id: runner.id, pipeline: pipeline) }
let(:headers) { { API::Helpers::Runner::JOB_TOKEN_HEADER => job.token, 'Content-Type' => 'text/plain' } }
let(:headers_with_range) { headers.merge({ 'Content-Range' => '11-20' }) }
let(:update_interval) { 10.seconds.to_i }
before do
initial_patch_the_trace
end
context 'when request is valid' do
it 'gets correct response' do
expect(response.status).to eq 202
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Job-Status'
end
context 'when job has been updated recently' do
it { expect{ patch_the_trace }.not_to change { job.updated_at }}
it "changes the job's trace" do
patch_the_trace
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when Runner makes a force-patch' do
it { expect{ force_patch_the_trace }.not_to change { job.updated_at }}
it "doesn't change the build.trace" do
force_patch_the_trace
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
end
end
end
context 'when job was not updated recently' do
let(:update_interval) { 15.minutes.to_i }
it { expect { patch_the_trace }.to change { job.updated_at } }
it 'changes the job.trace' do
patch_the_trace
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended appended'
end
context 'when Runner makes a force-patch' do
it { expect { force_patch_the_trace }.to change { job.updated_at } }
it "doesn't change the job.trace" do
force_patch_the_trace
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
end
end
end
context 'when project for the build has been deleted' do
let(:job) do
create(:ci_build, :running, :trace, runner_id: runner.id, pipeline: pipeline) do |job|
job.project.update(pending_delete: true)
end
end
it 'responds with forbidden' do
expect(response.status).to eq(403)
end
end
end
context 'when Runner makes a force-patch' do
before do
force_patch_the_trace
end
it 'gets correct response' do
expect(response.status).to eq 202
expect(job.reload.trace.raw).to eq 'BUILD TRACE appended'
expect(response.header).to have_key 'Range'
expect(response.header).to have_key 'Job-Status'
end
end
context 'when content-range start is too big' do
let(:headers_with_range) { headers.merge({ 'Content-Range' => '15-20' }) }
it 'gets 416 error response with range headers' do
expect(response.status).to eq 416
expect(response.header).to have_key 'Range'
expect(response.header['Range']).to eq '0-11'
end
end
context 'when content-range start is too small' do
let(:headers_with_range) { headers.merge({ 'Content-Range' => '8-20' }) }
it 'gets 416 error response with range headers' do
expect(response.status).to eq 416
expect(response.header).to have_key 'Range'
expect(response.header['Range']).to eq '0-11'
end
end
context 'when Content-Range header is missing' do
let(:headers_with_range) { headers }
it { expect(response.status).to eq 400 }
end
context 'when job has been errased' do
let(:job) { create(:ci_build, runner_id: runner.id, erased_at: Time.now) }
it { expect(response.status).to eq 403 }
end
def patch_the_trace(content = ' appended', request_headers = nil)
unless request_headers
job.trace.read do |stream|
offset = stream.size
limit = offset + content.length - 1
request_headers = headers.merge({ 'Content-Range' => "#{offset}-#{limit}" })
end
end
Timecop.travel(job.updated_at + update_interval) do
patch api("/jobs/#{job.id}/trace"), content, request_headers
job.reload
end
end
def initial_patch_the_trace
patch_the_trace(' appended', headers_with_range)
end
def force_patch_the_trace
2.times { patch_the_trace('') }
end
end
describe 'artifacts' do
let(:job) { create(:ci_build, :pending, pipeline: pipeline, runner_id: runner.id) }
let(:jwt_token) { JWT.encode({ 'iss' => 'gitlab-workhorse' }, Gitlab::Workhorse.secret, 'HS256') }
let(:headers) { { 'GitLab-Workhorse' => '1.0', Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER => jwt_token } }
let(:headers_with_token) { headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.token) }
let(:file_upload) { fixture_file_upload(Rails.root + 'spec/fixtures/banana_sample.gif', 'image/gif') }
let(:file_upload2) { fixture_file_upload(Rails.root + 'spec/fixtures/dk.png', 'image/gif') }
before do
job.run!
end
describe 'POST /api/v4/jobs/:id/artifacts/authorize' do
context 'when using token as parameter' do
it 'authorizes posting artifacts to running job' do
authorize_artifacts_with_token_in_params
expect(response).to have_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).not_to be_nil
end
it 'fails to post too large artifact' do
stub_application_setting(max_artifacts_size: 0)
authorize_artifacts_with_token_in_params(filesize: 100)
expect(response).to have_http_status(413)
end
end
context 'when using token as header' do
it 'authorizes posting artifacts to running job' do
authorize_artifacts_with_token_in_headers
expect(response).to have_http_status(200)
expect(response.content_type.to_s).to eq(Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE)
expect(json_response['TempPath']).not_to be_nil
end
it 'fails to post too large artifact' do
stub_application_setting(max_artifacts_size: 0)
authorize_artifacts_with_token_in_headers(filesize: 100)
expect(response).to have_http_status(413)
end
end
context 'when using runners token' do
it 'fails to authorize artifacts posting' do
authorize_artifacts(token: job.project.runners_token)
expect(response).to have_http_status(403)
end
end
it 'reject requests that did not go through gitlab-workhorse' do
headers.delete(Gitlab::Workhorse::INTERNAL_API_REQUEST_HEADER)
authorize_artifacts
expect(response).to have_http_status(500)
end
context 'authorization token is invalid' do
it 'responds with forbidden' do
authorize_artifacts(token: 'invalid', filesize: 100 )
expect(response).to have_http_status(403)
end
end
def authorize_artifacts(params = {}, request_headers = headers)
post api("/jobs/#{job.id}/artifacts/authorize"), params, request_headers
end
def authorize_artifacts_with_token_in_params(params = {}, request_headers = headers)
params = params.merge(token: job.token)
authorize_artifacts(params, request_headers)
end
def authorize_artifacts_with_token_in_headers(params = {}, request_headers = headers_with_token)
authorize_artifacts(params, request_headers)
end
end
describe 'POST /api/v4/jobs/:id/artifacts' do
context 'when artifacts are being stored inside of tmp path' do
before do
# by configuring this path we allow to pass temp file from any path
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return('/')
end
context 'when job has been erased' do
let(:job) { create(:ci_build, erased_at: Time.now) }
before do
upload_artifacts(file_upload, headers_with_token)
end
it 'responds with forbidden' do
upload_artifacts(file_upload, headers_with_token)
expect(response).to have_http_status(403)
end
end
context 'when job is running' do
shared_examples 'successful artifacts upload' do
it 'updates successfully' do
expect(response).to have_http_status(201)
end
end
context 'when uses regular file post' do
before do
upload_artifacts(file_upload, headers_with_token, false)
end
it_behaves_like 'successful artifacts upload'
end
context 'when uses accelerated file post' do
before do
upload_artifacts(file_upload, headers_with_token, true)
end
it_behaves_like 'successful artifacts upload'
end
context 'when updates artifact' do
before do
upload_artifacts(file_upload2, headers_with_token)
upload_artifacts(file_upload, headers_with_token)
end
it_behaves_like 'successful artifacts upload'
end
context 'when using runners token' do
it 'responds with forbidden' do
upload_artifacts(file_upload, headers.merge(API::Helpers::Runner::JOB_TOKEN_HEADER => job.project.runners_token))
expect(response).to have_http_status(403)
end
end
end
context 'when artifacts file is too large' do
it 'fails to post too large artifact' do
stub_application_setting(max_artifacts_size: 0)
upload_artifacts(file_upload, headers_with_token)
expect(response).to have_http_status(413)
end
end
context 'when artifacts post request does not contain file' do
it 'fails to post artifacts without file' do
post api("/jobs/#{job.id}/artifacts"), {}, headers_with_token
expect(response).to have_http_status(400)
end
end
context 'GitLab Workhorse is not configured' do
it 'fails to post artifacts without GitLab-Workhorse' do
post api("/jobs/#{job.id}/artifacts"), { token: job.token }, {}
expect(response).to have_http_status(403)
end
end
context 'when setting an expire date' do
let(:default_artifacts_expire_in) {}
let(:post_data) do
{ 'file.path' => file_upload.path,
'file.name' => file_upload.original_filename,
'expire_in' => expire_in }
end
before do
stub_application_setting(default_artifacts_expire_in: default_artifacts_expire_in)
post(api("/jobs/#{job.id}/artifacts"), post_data, headers_with_token)
end
context 'when an expire_in is given' do
let(:expire_in) { '7 days' }
it 'updates when specified' do
expect(response).to have_http_status(201)
expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(7.days.from_now)
end
end
context 'when no expire_in is given' do
let(:expire_in) { nil }
it 'ignores if not specified' do
expect(response).to have_http_status(201)
expect(job.reload.artifacts_expire_at).to be_nil
end
context 'with application default' do
context 'when default is 5 days' do
let(:default_artifacts_expire_in) { '5 days' }
it 'sets to application default' do
expect(response).to have_http_status(201)
expect(job.reload.artifacts_expire_at).to be_within(5.minutes).of(5.days.from_now)
end
end
context 'when default is 0' do
let(:default_artifacts_expire_in) { '0' }
it 'does not set expire_in' do
expect(response).to have_http_status(201)
expect(job.reload.artifacts_expire_at).to be_nil
end
end
end
end
end
context 'posts artifacts file and metadata file' do
let!(:artifacts) { file_upload }
let!(:metadata) { file_upload2 }
let(:stored_artifacts_file) { job.reload.artifacts_file.file }
let(:stored_metadata_file) { job.reload.artifacts_metadata.file }
let(:stored_artifacts_size) { job.reload.artifacts_size }
before do
post(api("/jobs/#{job.id}/artifacts"), post_data, headers_with_token)
end
context 'when posts data accelerated by workhorse is correct' do
let(:post_data) do
{ 'file.path' => artifacts.path,
'file.name' => artifacts.original_filename,
'metadata.path' => metadata.path,
'metadata.name' => metadata.original_filename }
end
it 'stores artifacts and artifacts metadata' do
expect(response).to have_http_status(201)
expect(stored_artifacts_file.original_filename).to eq(artifacts.original_filename)
expect(stored_metadata_file.original_filename).to eq(metadata.original_filename)
expect(stored_artifacts_size).to eq(71759)
end
end
context 'when there is no artifacts file in post data' do
let(:post_data) do
{ 'metadata' => metadata }
end
it 'is expected to respond with bad request' do
expect(response).to have_http_status(400)
end
it 'does not store metadata' do
expect(stored_metadata_file).to be_nil
end
end
end
end
context 'when artifacts are being stored outside of tmp path' do
before do
# by configuring this path we allow to pass file from @tmpdir only
# but all temporary files are stored in system tmp directory
@tmpdir = Dir.mktmpdir
allow(ArtifactUploader).to receive(:artifacts_upload_path).and_return(@tmpdir)
end
after do
FileUtils.remove_entry @tmpdir
end
it' "fails to post artifacts for outside of tmp path"' do
upload_artifacts(file_upload, headers_with_token)
expect(response).to have_http_status(400)
end
end
def upload_artifacts(file, headers = {}, accelerated = true)
params = if accelerated
{ 'file.path' => file.path, 'file.name' => file.original_filename }
else
{ 'file' => file }
end
post api("/jobs/#{job.id}/artifacts"), params, headers
end
end
describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token }
before do
download_artifact
end
context 'when job has artifacts' do
let(:job) { create(:ci_build, :artifacts) }
let(:download_headers) do
{ 'Content-Transfer-Encoding' => 'binary',
'Content-Disposition' => 'attachment; filename=ci_build_artifacts.zip' }
end
context 'when using job token' do
it 'download artifacts' do
expect(response).to have_http_status(200)
expect(response.headers).to include download_headers
end
end
context 'when using runnners token' do
let(:token) { job.project.runners_token }
it 'responds with forbidden' do
expect(response).to have_http_status(403)
end
end
end
context 'when job does not has artifacts' do
it 'responds with not found' do
expect(response).to have_http_status(404)
end
end
def download_artifact(params = {}, request_headers = headers)
params = params.merge(token: token)
get api("/jobs/#{job.id}/artifacts"), params, request_headers
end
end
end
end
end
| 34.797051 | 129 | 0.565664 |
abb560f1b4c641b25b10452b9e87a0f43d4d6c3d | 39 | module Polycom
VERSION = "0.1.0"
end
| 9.75 | 19 | 0.666667 |
ac3c03bf7a7e911b957da5cf56413563cc35c571 | 7,529 | =begin
#Kubernetes
#No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.2.3
=end
require 'date'
module Kubernetes
# MetricStatus describes the last-read state of a single metric.
class V2beta1MetricStatus
# external refers to a global metric that is not associated with any Kubernetes object. It allows autoscaling based on information coming from components running outside of cluster (for example length of queue in cloud messaging service, or QPS from loadbalancer running outside of cluster).
attr_accessor :external
# object refers to a metric describing a single kubernetes object (for example, hits-per-second on an Ingress object).
attr_accessor :object
# pods refers to a metric describing each pod in the current scale target (for example, transactions-processed-per-second). The values will be averaged together before being compared to the target value.
attr_accessor :pods
# resource refers to a resource metric (such as those specified in requests and limits) known to Kubernetes describing each pod in the current scale target (e.g. CPU or memory). Such metrics are built in to Kubernetes, and have special scaling options on top of those available to normal per-pod metrics using the \"pods\" source.
attr_accessor :resource
# type is the type of metric source. It will be one of \"Object\", \"Pods\" or \"Resource\", each corresponds to a matching field in the object.
attr_accessor :type
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'external' => :'external',
:'object' => :'object',
:'pods' => :'pods',
:'resource' => :'resource',
:'type' => :'type'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'external' => :'V2beta1ExternalMetricStatus',
:'object' => :'V2beta1ObjectMetricStatus',
:'pods' => :'V2beta1PodsMetricStatus',
:'resource' => :'V2beta1ResourceMetricStatus',
:'type' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'external')
self.external = attributes[:'external']
end
if attributes.has_key?(:'object')
self.object = attributes[:'object']
end
if attributes.has_key?(:'pods')
self.pods = attributes[:'pods']
end
if attributes.has_key?(:'resource')
self.resource = attributes[:'resource']
end
if attributes.has_key?(:'type')
self.type = attributes[:'type']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @type.nil?
invalid_properties.push("invalid value for 'type', type cannot be nil.")
end
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @type.nil?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
external == o.external &&
object == o.object &&
pods == o.pods &&
resource == o.resource &&
type == o.type
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[external, object, pods, resource, type].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = Kubernetes.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.038298 | 334 | 0.634214 |
e8ee0814ab17d9e9722e7312c1ea806af08d1fae | 303 | Sequel.migration do
up do
create_table(:contestants) do
primary_key :id
Integer :contest_id
String :first_name
String :last_name
DateTime :start_time_at
DateTime :end_time_at
String :end_time
end
end
down do
drop_table(:contestants)
end
end | 17.823529 | 33 | 0.660066 |
6a561d408bdd01b44ad10ece1e033d8914507312 | 1,274 | gem 'minitest', '~> 5.2'
require 'minitest/autorun'
require 'minitest/pride'
require_relative 'dragon'
class DragonTest < Minitest::Test
def test_it_has_a_name
dragon = Dragon.new("Ramoth", :gold, "Lessa")
assert_equal "Ramoth", dragon.name
end
def test_it_has_a_rider
dragon = Dragon.new("Ramoth", :gold, "Lessa")
assert_equal "Lessa", dragon.rider
end
def test_it_has_a_color
dragon = Dragon.new("Ramoth", :gold, "Lessa")
assert_equal :gold, dragon.color
end
def test_a_different_dragon
dragon = Dragon.new("Mnementh", :bronze, "F'lar")
assert_equal "Mnementh", dragon.name
end
def test_a_different_dragons_rider
dragon = Dragon.new("Mnementh", :bronze, "F'lar")
assert_equal "F'lar", dragon.rider
end
def test_a_different_dragons_color
dragon = Dragon.new("Mnementh", :bronze, "F'lar")
assert_equal :bronze, dragon.color
end
def test_dragons_are_born_hungry
dragon = Dragon.new("Canth", :brown, "F'nor")
assert dragon.hungry?
end
def test_dragons_eat_a_lot
dragon = Dragon.new("Canth", :brown, "F'nor")
assert dragon.hungry?
dragon.eat
assert dragon.hungry?
dragon.eat
assert dragon.hungry?
dragon.eat
refute dragon.hungry?
end
end
| 21.233333 | 53 | 0.688383 |
ed7ece23bb56b31e17311fe83fa023ab73196d65 | 1,679 | # frozen_string_literal: true
RSpec.describe Mutant::Expression::Parser do
let(:object) { Mutant::Config::DEFAULT.expression_parser }
describe '#call' do
subject { object.call(input) }
context 'on nonsense' do
let(:input) { 'foo bar' }
it 'raises an exception' do
expect { subject }.to raise_error(
Mutant::Expression::Parser::InvalidExpressionError,
'Expression: "foo bar" is not valid'
)
end
end
context 'on a valid expression' do
let(:input) { 'Foo' }
it { should eql(Mutant::Expression::Namespace::Exact.new(scope_name: 'Foo')) }
end
end
describe '.try_parse' do
subject { object.try_parse(input) }
context 'on nonsense' do
let(:input) { 'foo bar' }
it { should be(nil) }
end
context 'on a valid expression' do
let(:input) { 'Foo' }
it { should eql(Mutant::Expression::Namespace::Exact.new(scope_name: 'Foo')) }
end
context 'on ambiguous expression' do
let(:object) { described_class.new([test_a, test_b]) }
let(:test_a) do
Class.new(Mutant::Expression) do
include Anima.new
const_set(:REGEXP, /\Atest-syntax\z/.freeze)
end
end
let(:test_b) do
Class.new(Mutant::Expression) do
include Anima.new
const_set(:REGEXP, /^test-syntax$/.freeze)
end
end
let(:input) { 'test-syntax' }
it 'raises expected exception' do
expect { subject }.to raise_error(
Mutant::Expression::Parser::AmbiguousExpressionError,
'Ambiguous expression: "test-syntax"'
)
end
end
end
end
| 23.985714 | 84 | 0.595593 |
26596afe415dfdf241c1cbc54bc5d6c8da8c7ff8 | 944 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Apis
module ServicedirectoryV1
# Version of the google-apis-servicedirectory_v1 gem
GEM_VERSION = "0.4.0"
# Version of the code generator used to generate this client
GENERATOR_VERSION = "0.1.2"
# Revision of the discovery document this client was generated from
REVISION = "20210203"
end
end
end
| 32.551724 | 74 | 0.733051 |
0323d48406f5a8a2ce0eb42ba8e7136fc2ea2203 | 149 | class RemoveNumTotalEpisodeFromSeason < ActiveRecord::Migration[5.1]
def change
remove_column :seasons, :num_total_episode, :integer
end
end
| 24.833333 | 68 | 0.791946 |
03ad7c116ed15d8a3465d43e8d72c76320711128 | 2,217 | class CrystalLang < Formula
desc "Fast and statically typed, compiled language with Ruby-like syntax"
homepage "http://crystal-lang.org/"
url "https://github.com/manastech/crystal/archive/0.10.1.tar.gz"
sha256 "f567866ea4cf7d0ca1356806f4871c964eca939ddcd93796ecc0e3f3889cd7f3"
head "https://github.com/manastech/crystal.git"
bottle do
sha256 "70711d694b0906c1d67a82f44357c70b568a16a3f1860e14c7fe643b197a76e8" => :el_capitan
sha256 "be67aa7cfea8d5531541c7c1baf9293fcf26a54e3a41a0be262f8636470f40c0" => :yosemite
sha256 "181a4a048d438a1cd1d3dc7e9f4bb729be5499d92d9f80d35e8daa6c677fe7c6" => :mavericks
end
resource "boot" do
url "https://github.com/manastech/crystal/releases/download/0.10.0/crystal-0.10.0-1-darwin-x86_64.tar.gz"
sha256 "a94562c2e683a6149accb6ec52f30e96ff2cd5a4cdbf3d0785181c9ec561f003"
end
resource "shards" do
url "https://github.com/ysbaddaden/shards/archive/v0.5.4.tar.gz"
sha256 "759a925347fa69a9fbd070e0ba7d9be2d5fe409a9bc9a6d1d29090f2045e63c1"
end
option "without-release", "Do not build the compiler in release mode"
option "without-shards", "Do not include `shards` dependency manager"
depends_on "libevent"
depends_on "libpcl"
depends_on "bdw-gc"
depends_on "llvm" => :build
depends_on "libyaml" if build.with?("shards")
def install
(buildpath/"boot").install resource("boot")
if build.head?
ENV["CRYSTAL_CONFIG_VERSION"] = `git rev-parse --short HEAD`.strip
else
ENV["CRYSTAL_CONFIG_VERSION"] = version
end
ENV["CRYSTAL_CONFIG_PATH"] = prefix/"src:libs"
ENV.append_path "PATH", "boot/bin"
if build.with? "release"
system "make", "crystal", "release=true"
else
system "make", "llvm_ext"
(buildpath/".build").mkpath
system "bin/crystal", "build", "-o", ".build/crystal", "src/compiler/crystal.cr"
end
if build.with? "shards"
resource("shards").stage do
system buildpath/"bin/crystal", "build", "-o", buildpath/".build/shards", "src/shards.cr"
end
bin.install ".build/shards"
end
bin.install ".build/crystal"
prefix.install "src"
end
test do
system "#{bin}/crystal", "eval", "puts 1"
end
end
| 32.602941 | 109 | 0.713577 |
e9d6748c4420f16f9875103cf087a2204a390950 | 101 | json.extract! @strategy, :id, :name, :draw_type, :range, :interest, :sigma, :created_at, :updated_at
| 50.5 | 100 | 0.712871 |
bbd31d8b55385639b4af62cc199c76ed89746229 | 85 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'google_timeline'
| 28.333333 | 58 | 0.752941 |
aba46eb6405e90c0c90247c48a72bdf521319a68 | 29,336 |
Point = Struct.new(:x, :y)
Claim = Struct.new(:id, :x, :y, :w, :h) do
include Enumerable
def each
return unless block_given?
(x...(x + w)).each do |xp|
(y...(y + h)).each do |yp|
yield Point.new xp, yp
end
end
end
def top_left
Point.new x, y
end
def bottom_right
Point.new x + w - 1, y + h - 1
end
def overlap?(other)
return false if top_left.x > other.bottom_right.x
return false if other.top_left.x > bottom_right.x
return false if top_left.y > other.bottom_right.y
return false if other.top_left.y > bottom_right.y
true
end
def find_overlaps(others)
others.select { |o| overlap? o }
end
def self.parse(line)
Claim.new *line.scan(/\d+/).map(&:to_i)
end
end
INPUT = DATA.each_line.map { |l| Claim.parse l }.to_a.freeze
def part1
claimed = {}
INPUT.each do |claim|
claim.each do |p|
claimed[p] = 0 unless claimed.has_key? p
claimed[p] += 1
end
end
p claimed.count { |k, v| v > 1 }
end
def part2
INPUT.each do |claim|
overlaps = claim.find_overlaps INPUT.select { |o| claim.id != o.id }
p claim if overlaps.empty?
end
end
part1
part2
__END__
#1 @ 146,196: 19x14
#2 @ 641,817: 27x28
#3 @ 604,922: 11x17
#4 @ 323,671: 19x17
#5 @ 885,701: 18x27
#6 @ 636,818: 18x14
#7 @ 165,289: 22x12
#8 @ 431,568: 16x23
#9 @ 490,861: 24x25
#10 @ 529,203: 10x21
#11 @ 594,338: 18x26
#12 @ 772,19: 27x28
#13 @ 507,629: 23x26
#14 @ 257,387: 29x19
#15 @ 21,553: 10x13
#16 @ 979,609: 10x19
#17 @ 227,159: 13x23
#18 @ 588,359: 22x15
#19 @ 466,456: 10x18
#20 @ 659,166: 25x16
#21 @ 519,590: 14x12
#22 @ 125,80: 21x18
#23 @ 915,145: 21x14
#24 @ 245,510: 25x23
#25 @ 247,662: 26x20
#26 @ 549,725: 22x12
#27 @ 353,258: 16x29
#28 @ 485,172: 28x10
#29 @ 308,562: 17x17
#30 @ 586,318: 11x12
#31 @ 890,928: 20x13
#32 @ 614,596: 29x20
#33 @ 468,147: 15x23
#34 @ 806,441: 18x24
#35 @ 416,517: 26x26
#36 @ 98,622: 16x29
#37 @ 929,411: 22x25
#38 @ 935,464: 27x21
#39 @ 26,452: 12x15
#40 @ 595,454: 15x21
#41 @ 23,677: 14x11
#42 @ 762,918: 13x16
#43 @ 311,886: 13x29
#44 @ 585,653: 22x10
#45 @ 604,108: 15x13
#46 @ 344,46: 13x11
#47 @ 290,285: 25x24
#48 @ 332,616: 23x20
#49 @ 568,363: 20x15
#50 @ 643,370: 18x16
#51 @ 352,554: 14x25
#52 @ 489,37: 26x23
#53 @ 155,714: 19x15
#54 @ 421,219: 23x19
#55 @ 818,194: 18x13
#56 @ 75,592: 29x29
#57 @ 225,963: 26x24
#58 @ 598,699: 27x19
#59 @ 21,575: 10x10
#60 @ 247,676: 28x26
#61 @ 966,376: 11x10
#62 @ 280,310: 12x13
#63 @ 771,215: 29x18
#64 @ 857,95: 16x17
#65 @ 696,567: 16x14
#66 @ 804,322: 15x24
#67 @ 272,396: 12x29
#68 @ 613,606: 22x10
#69 @ 848,938: 22x27
#70 @ 300,643: 12x23
#71 @ 756,61: 27x25
#72 @ 386,231: 25x17
#73 @ 559,105: 19x29
#74 @ 597,109: 28x12
#75 @ 397,138: 16x10
#76 @ 37,691: 10x23
#77 @ 17,702: 21x18
#78 @ 738,82: 17x10
#79 @ 516,200: 23x14
#80 @ 34,48: 15x16
#81 @ 555,322: 11x22
#82 @ 84,648: 28x15
#83 @ 549,744: 13x13
#84 @ 147,344: 22x12
#85 @ 378,215: 28x18
#86 @ 726,620: 16x18
#87 @ 800,599: 15x20
#88 @ 128,858: 23x28
#89 @ 31,271: 10x28
#90 @ 671,75: 28x18
#91 @ 279,425: 19x21
#92 @ 145,76: 10x13
#93 @ 419,600: 27x28
#94 @ 69,400: 25x20
#95 @ 580,309: 21x22
#96 @ 933,544: 28x18
#97 @ 854,524: 22x15
#98 @ 668,574: 16x25
#99 @ 322,416: 21x21
#100 @ 371,716: 23x20
#101 @ 639,429: 11x26
#102 @ 561,698: 12x15
#103 @ 222,344: 12x23
#104 @ 929,671: 21x11
#105 @ 901,233: 24x19
#106 @ 135,238: 15x29
#107 @ 727,794: 16x23
#108 @ 700,103: 15x18
#109 @ 375,762: 21x14
#110 @ 514,666: 20x10
#111 @ 465,148: 14x17
#112 @ 388,181: 24x18
#113 @ 457,310: 13x13
#114 @ 83,142: 17x26
#115 @ 442,271: 23x23
#116 @ 30,551: 15x10
#117 @ 477,420: 16x26
#118 @ 853,534: 11x25
#119 @ 612,692: 29x25
#120 @ 322,557: 10x15
#121 @ 119,759: 11x10
#122 @ 469,281: 25x13
#123 @ 232,458: 24x14
#124 @ 887,251: 19x14
#125 @ 270,398: 12x20
#126 @ 906,657: 18x13
#127 @ 300,54: 16x16
#128 @ 87,498: 19x28
#129 @ 113,617: 29x19
#130 @ 501,636: 21x12
#131 @ 110,505: 23x17
#132 @ 156,262: 16x28
#133 @ 30,879: 25x20
#134 @ 966,232: 14x23
#135 @ 785,913: 14x19
#136 @ 558,202: 26x11
#137 @ 963,444: 14x10
#138 @ 531,537: 16x28
#139 @ 421,323: 25x23
#140 @ 73,105: 29x14
#141 @ 979,617: 14x18
#142 @ 7,559: 15x29
#143 @ 852,404: 14x19
#144 @ 638,95: 10x14
#145 @ 290,486: 12x27
#146 @ 589,818: 24x23
#147 @ 245,844: 13x13
#148 @ 64,663: 12x18
#149 @ 439,459: 27x26
#150 @ 962,777: 15x14
#151 @ 243,560: 13x28
#152 @ 85,813: 26x10
#153 @ 85,808: 14x16
#154 @ 579,284: 11x20
#155 @ 832,703: 21x23
#156 @ 299,578: 26x28
#157 @ 194,539: 25x25
#158 @ 634,576: 15x16
#159 @ 790,688: 10x21
#160 @ 507,247: 28x26
#161 @ 181,278: 10x18
#162 @ 756,815: 26x12
#163 @ 533,450: 19x23
#164 @ 905,643: 26x12
#165 @ 497,161: 14x18
#166 @ 406,168: 24x22
#167 @ 727,120: 20x10
#168 @ 800,555: 14x22
#169 @ 277,399: 13x13
#170 @ 928,426: 29x13
#171 @ 927,961: 16x21
#172 @ 348,499: 16x11
#173 @ 561,927: 10x17
#174 @ 701,671: 19x12
#175 @ 56,339: 18x13
#176 @ 929,761: 16x24
#177 @ 367,228: 4x4
#178 @ 451,919: 16x13
#179 @ 41,885: 10x12
#180 @ 687,59: 23x13
#181 @ 285,377: 16x20
#182 @ 818,801: 14x18
#183 @ 255,641: 25x25
#184 @ 474,672: 20x18
#185 @ 283,401: 3x8
#186 @ 781,19: 12x12
#187 @ 482,965: 28x28
#188 @ 941,472: 17x26
#189 @ 770,57: 19x20
#190 @ 296,151: 12x25
#191 @ 0,328: 17x18
#192 @ 247,462: 15x23
#193 @ 582,602: 24x27
#194 @ 24,715: 23x24
#195 @ 901,178: 11x16
#196 @ 482,533: 16x20
#197 @ 404,184: 15x13
#198 @ 50,915: 21x20
#199 @ 136,449: 25x16
#200 @ 491,975: 15x10
#201 @ 737,451: 19x29
#202 @ 192,46: 21x15
#203 @ 562,316: 18x22
#204 @ 326,586: 15x16
#205 @ 448,117: 16x20
#206 @ 634,875: 17x28
#207 @ 716,737: 24x19
#208 @ 907,711: 24x14
#209 @ 745,361: 22x21
#210 @ 254,644: 6x9
#211 @ 342,931: 25x18
#212 @ 234,169: 13x16
#213 @ 134,905: 20x12
#214 @ 735,577: 17x10
#215 @ 953,804: 23x19
#216 @ 969,630: 23x25
#217 @ 681,443: 11x11
#218 @ 338,296: 13x24
#219 @ 780,699: 17x28
#220 @ 721,598: 23x23
#221 @ 426,135: 23x20
#222 @ 367,526: 19x29
#223 @ 626,893: 24x14
#224 @ 446,268: 21x19
#225 @ 470,108: 22x21
#226 @ 977,7: 13x25
#227 @ 835,111: 29x26
#228 @ 196,682: 20x18
#229 @ 101,717: 28x17
#230 @ 284,40: 11x10
#231 @ 454,521: 22x12
#232 @ 642,889: 24x26
#233 @ 49,378: 22x17
#234 @ 474,15: 11x14
#235 @ 292,921: 22x10
#236 @ 747,868: 21x19
#237 @ 759,202: 23x25
#238 @ 647,580: 14x15
#239 @ 664,68: 28x12
#240 @ 693,240: 11x29
#241 @ 897,678: 24x14
#242 @ 456,510: 23x16
#243 @ 173,780: 20x20
#244 @ 409,480: 28x19
#245 @ 10,294: 10x11
#246 @ 250,253: 15x25
#247 @ 613,82: 28x28
#248 @ 660,431: 22x15
#249 @ 825,116: 28x25
#250 @ 938,705: 11x14
#251 @ 663,480: 28x23
#252 @ 360,133: 18x26
#253 @ 437,122: 20x23
#254 @ 702,186: 29x12
#255 @ 61,618: 26x28
#256 @ 696,614: 11x13
#257 @ 12,509: 11x15
#258 @ 210,968: 13x29
#259 @ 471,544: 18x14
#260 @ 786,514: 19x12
#261 @ 824,188: 17x27
#262 @ 959,328: 29x28
#263 @ 741,723: 23x19
#264 @ 55,488: 25x11
#265 @ 669,239: 27x28
#266 @ 622,488: 23x23
#267 @ 161,94: 13x20
#268 @ 920,184: 26x17
#269 @ 560,109: 21x15
#270 @ 265,816: 25x14
#271 @ 404,128: 23x22
#272 @ 26,698: 22x24
#273 @ 973,708: 15x25
#274 @ 53,236: 24x22
#275 @ 966,982: 21x17
#276 @ 869,534: 22x12
#277 @ 7,695: 22x22
#278 @ 460,464: 17x15
#279 @ 692,102: 28x12
#280 @ 714,598: 11x23
#281 @ 281,717: 16x24
#282 @ 944,238: 11x20
#283 @ 653,608: 11x16
#284 @ 898,584: 29x18
#285 @ 900,320: 12x27
#286 @ 14,2: 19x21
#287 @ 778,915: 24x13
#288 @ 50,894: 20x22
#289 @ 754,340: 19x11
#290 @ 928,951: 17x23
#291 @ 773,969: 7x13
#292 @ 916,437: 19x24
#293 @ 494,287: 29x22
#294 @ 498,30: 19x23
#295 @ 50,75: 27x20
#296 @ 740,348: 20x21
#297 @ 33,81: 17x24
#298 @ 848,94: 10x23
#299 @ 404,310: 23x29
#300 @ 517,578: 19x21
#301 @ 218,935: 24x17
#302 @ 831,575: 17x24
#303 @ 124,915: 27x23
#304 @ 871,785: 15x21
#305 @ 580,125: 23x19
#306 @ 241,193: 28x16
#307 @ 871,777: 12x22
#308 @ 781,32: 28x10
#309 @ 533,80: 17x27
#310 @ 255,330: 24x20
#311 @ 771,967: 17x18
#312 @ 356,648: 11x26
#313 @ 626,839: 23x10
#314 @ 335,414: 12x14
#315 @ 143,713: 29x26
#316 @ 53,422: 21x14
#317 @ 64,27: 11x29
#318 @ 74,708: 28x20
#319 @ 790,7: 16x24
#320 @ 347,733: 29x27
#321 @ 732,784: 16x24
#322 @ 4,662: 28x16
#323 @ 715,610: 15x22
#324 @ 868,225: 17x19
#325 @ 722,357: 15x14
#326 @ 60,107: 20x24
#327 @ 513,68: 29x28
#328 @ 447,664: 24x23
#329 @ 194,177: 19x26
#330 @ 95,759: 19x10
#331 @ 781,646: 20x21
#332 @ 36,350: 22x17
#333 @ 953,956: 14x22
#334 @ 599,658: 25x24
#335 @ 108,766: 11x17
#336 @ 776,37: 15x24
#337 @ 272,392: 22x29
#338 @ 70,698: 25x17
#339 @ 817,614: 12x22
#340 @ 834,588: 12x27
#341 @ 248,531: 24x13
#342 @ 290,556: 28x24
#343 @ 358,408: 27x13
#344 @ 325,375: 27x20
#345 @ 408,463: 12x24
#346 @ 823,113: 14x20
#347 @ 874,327: 29x10
#348 @ 513,334: 23x28
#349 @ 864,784: 29x27
#350 @ 355,767: 21x11
#351 @ 878,882: 16x11
#352 @ 608,439: 19x26
#353 @ 672,356: 26x26
#354 @ 173,255: 26x25
#355 @ 433,141: 24x20
#356 @ 652,765: 17x23
#357 @ 327,506: 16x24
#358 @ 967,231: 16x29
#359 @ 70,770: 28x12
#360 @ 545,44: 10x23
#361 @ 350,386: 20x27
#362 @ 538,653: 22x21
#363 @ 592,936: 18x17
#364 @ 847,817: 23x22
#365 @ 725,750: 20x25
#366 @ 151,695: 29x26
#367 @ 742,41: 26x14
#368 @ 481,865: 26x20
#369 @ 520,189: 26x10
#370 @ 487,394: 16x17
#371 @ 220,156: 12x22
#372 @ 880,885: 26x28
#373 @ 727,483: 13x14
#374 @ 608,360: 11x16
#375 @ 750,620: 29x14
#376 @ 697,955: 20x16
#377 @ 239,347: 16x26
#378 @ 803,205: 15x22
#379 @ 60,287: 24x17
#380 @ 73,626: 15x22
#381 @ 919,704: 12x28
#382 @ 660,734: 16x22
#383 @ 741,259: 27x29
#384 @ 519,458: 19x15
#385 @ 269,283: 27x12
#386 @ 583,714: 18x12
#387 @ 84,663: 16x10
#388 @ 351,225: 26x13
#389 @ 549,729: 20x13
#390 @ 9,454: 16x12
#391 @ 959,646: 11x24
#392 @ 531,979: 20x18
#393 @ 80,134: 20x3
#394 @ 894,906: 15x26
#395 @ 19,174: 29x11
#396 @ 46,585: 12x17
#397 @ 206,818: 10x25
#398 @ 796,705: 26x14
#399 @ 544,362: 15x18
#400 @ 544,78: 12x29
#401 @ 307,826: 18x20
#402 @ 911,549: 23x18
#403 @ 608,528: 14x28
#404 @ 315,729: 24x24
#405 @ 654,207: 27x11
#406 @ 922,20: 14x13
#407 @ 959,331: 13x21
#408 @ 635,553: 27x25
#409 @ 943,476: 25x22
#410 @ 831,103: 27x27
#411 @ 821,951: 14x28
#412 @ 685,320: 16x13
#413 @ 1,596: 10x21
#414 @ 878,279: 19x19
#415 @ 584,621: 21x14
#416 @ 922,937: 17x17
#417 @ 343,750: 26x21
#418 @ 805,24: 20x15
#419 @ 719,852: 15x13
#420 @ 606,430: 17x23
#421 @ 726,355: 20x22
#422 @ 779,895: 10x22
#423 @ 652,729: 18x19
#424 @ 106,867: 24x22
#425 @ 17,520: 11x11
#426 @ 145,374: 28x12
#427 @ 129,294: 16x16
#428 @ 586,813: 13x17
#429 @ 194,535: 10x24
#430 @ 836,175: 17x15
#431 @ 184,785: 29x10
#432 @ 926,6: 26x19
#433 @ 606,348: 15x20
#434 @ 169,151: 17x19
#435 @ 104,100: 13x29
#436 @ 13,282: 15x20
#437 @ 664,724: 11x29
#438 @ 822,484: 13x11
#439 @ 938,274: 28x18
#440 @ 134,845: 19x16
#441 @ 517,445: 19x26
#442 @ 622,646: 12x26
#443 @ 504,360: 26x25
#444 @ 492,429: 21x10
#445 @ 122,726: 10x12
#446 @ 198,558: 11x11
#447 @ 712,213: 12x19
#448 @ 830,590: 16x14
#449 @ 607,380: 13x16
#450 @ 56,925: 25x20
#451 @ 459,135: 14x21
#452 @ 850,330: 23x28
#453 @ 854,149: 19x14
#454 @ 881,538: 13x27
#455 @ 328,99: 23x20
#456 @ 246,243: 13x20
#457 @ 655,372: 10x29
#458 @ 493,733: 27x23
#459 @ 12,969: 28x16
#460 @ 57,763: 16x15
#461 @ 46,965: 17x14
#462 @ 768,946: 13x20
#463 @ 0,874: 23x14
#464 @ 218,950: 18x15
#465 @ 341,623: 20x15
#466 @ 911,94: 10x17
#467 @ 485,376: 26x24
#468 @ 977,640: 10x8
#469 @ 971,848: 10x11
#470 @ 889,214: 15x17
#471 @ 206,337: 14x12
#472 @ 4,450: 25x29
#473 @ 21,288: 12x18
#474 @ 959,1: 25x10
#475 @ 113,905: 17x25
#476 @ 325,830: 17x12
#477 @ 973,142: 26x27
#478 @ 330,599: 10x25
#479 @ 840,838: 28x23
#480 @ 489,682: 28x12
#481 @ 567,199: 23x12
#482 @ 504,278: 29x13
#483 @ 816,171: 17x25
#484 @ 98,509: 13x21
#485 @ 664,352: 20x18
#486 @ 313,922: 28x16
#487 @ 756,923: 16x19
#488 @ 238,90: 29x14
#489 @ 428,164: 14x13
#490 @ 213,951: 16x24
#491 @ 796,516: 12x17
#492 @ 954,611: 26x17
#493 @ 356,653: 20x12
#494 @ 863,236: 15x12
#495 @ 772,23: 26x16
#496 @ 4,870: 14x29
#497 @ 457,679: 28x12
#498 @ 94,298: 27x23
#499 @ 388,801: 22x22
#500 @ 226,73: 29x23
#501 @ 417,914: 29x17
#502 @ 89,601: 28x14
#503 @ 881,222: 13x19
#504 @ 281,448: 18x23
#505 @ 157,111: 25x10
#506 @ 719,452: 13x28
#507 @ 823,893: 27x23
#508 @ 294,426: 14x10
#509 @ 166,804: 21x12
#510 @ 829,366: 24x23
#511 @ 124,757: 17x12
#512 @ 654,768: 10x15
#513 @ 900,25: 11x24
#514 @ 518,201: 19x15
#515 @ 172,486: 13x27
#516 @ 292,54: 22x12
#517 @ 706,554: 22x16
#518 @ 785,653: 19x10
#519 @ 815,597: 14x24
#520 @ 682,622: 29x26
#521 @ 145,434: 12x24
#522 @ 778,898: 22x25
#523 @ 667,191: 10x15
#524 @ 195,697: 27x17
#525 @ 297,615: 4x14
#526 @ 792,582: 26x19
#527 @ 306,483: 13x20
#528 @ 108,835: 28x11
#529 @ 143,190: 26x25
#530 @ 866,794: 11x17
#531 @ 405,71: 20x26
#532 @ 511,229: 28x29
#533 @ 746,288: 28x25
#534 @ 525,194: 13x14
#535 @ 412,459: 15x18
#536 @ 43,977: 13x18
#537 @ 169,896: 9x9
#538 @ 8,706: 13x25
#539 @ 66,154: 8x10
#540 @ 59,380: 27x22
#541 @ 961,973: 17x20
#542 @ 820,448: 21x16
#543 @ 871,887: 14x13
#544 @ 867,352: 24x14
#545 @ 871,210: 10x23
#546 @ 232,550: 28x13
#547 @ 47,162: 24x20
#548 @ 887,346: 29x25
#549 @ 884,23: 21x13
#550 @ 138,61: 24x11
#551 @ 236,323: 22x16
#552 @ 156,248: 17x24
#553 @ 541,716: 27x13
#554 @ 479,136: 11x24
#555 @ 887,527: 20x23
#556 @ 265,338: 27x25
#557 @ 198,911: 24x20
#558 @ 763,640: 24x28
#559 @ 463,827: 13x25
#560 @ 909,445: 11x29
#561 @ 744,854: 17x23
#562 @ 959,637: 12x24
#563 @ 288,310: 14x22
#564 @ 836,16: 28x22
#565 @ 120,234: 28x24
#566 @ 935,441: 19x13
#567 @ 421,529: 26x24
#568 @ 392,812: 23x18
#569 @ 765,872: 27x25
#570 @ 902,523: 25x18
#571 @ 494,751: 10x26
#572 @ 15,838: 29x14
#573 @ 159,752: 19x28
#574 @ 941,756: 15x24
#575 @ 916,524: 11x25
#576 @ 350,56: 15x20
#577 @ 487,156: 20x19
#578 @ 269,597: 20x11
#579 @ 525,559: 18x26
#580 @ 107,610: 20x14
#581 @ 64,909: 20x27
#582 @ 410,681: 19x25
#583 @ 126,171: 11x18
#584 @ 206,158: 20x25
#585 @ 338,926: 22x28
#586 @ 662,78: 22x16
#587 @ 229,215: 28x28
#588 @ 919,720: 11x12
#589 @ 793,881: 17x22
#590 @ 762,806: 27x17
#591 @ 0,587: 19x15
#592 @ 456,83: 15x11
#593 @ 299,165: 25x27
#594 @ 209,610: 19x11
#595 @ 8,520: 15x27
#596 @ 91,698: 14x22
#597 @ 229,950: 16x25
#598 @ 414,465: 19x21
#599 @ 355,519: 25x29
#600 @ 331,725: 11x23
#601 @ 783,961: 21x10
#602 @ 681,921: 19x24
#603 @ 697,306: 17x16
#604 @ 807,316: 10x25
#605 @ 957,155: 27x25
#606 @ 748,750: 12x19
#607 @ 374,209: 16x11
#608 @ 290,708: 29x17
#609 @ 591,775: 12x20
#610 @ 374,625: 21x21
#611 @ 110,200: 19x23
#612 @ 925,299: 24x19
#613 @ 935,826: 28x21
#614 @ 164,481: 27x20
#615 @ 535,282: 14x7
#616 @ 867,189: 24x26
#617 @ 700,876: 20x16
#618 @ 465,33: 18x15
#619 @ 672,258: 10x21
#620 @ 666,548: 16x15
#621 @ 830,141: 12x23
#622 @ 985,246: 11x29
#623 @ 494,971: 27x22
#624 @ 666,921: 19x14
#625 @ 254,374: 26x17
#626 @ 141,708: 14x19
#627 @ 685,830: 12x11
#628 @ 53,922: 21x29
#629 @ 192,553: 26x28
#630 @ 367,153: 29x20
#631 @ 742,937: 21x14
#632 @ 123,661: 14x22
#633 @ 723,295: 27x19
#634 @ 49,682: 29x24
#635 @ 556,718: 28x22
#636 @ 281,533: 12x13
#637 @ 943,310: 19x20
#638 @ 837,394: 20x11
#639 @ 297,41: 11x24
#640 @ 585,722: 17x29
#641 @ 7,841: 15x13
#642 @ 92,710: 27x27
#643 @ 54,22: 17x23
#644 @ 952,785: 17x16
#645 @ 32,975: 22x14
#646 @ 974,680: 3x8
#647 @ 605,709: 13x18
#648 @ 805,629: 28x28
#649 @ 517,313: 15x16
#650 @ 528,330: 19x21
#651 @ 547,51: 22x14
#652 @ 807,787: 21x24
#653 @ 773,592: 20x24
#654 @ 1,97: 26x23
#655 @ 353,53: 16x14
#656 @ 441,578: 15x10
#657 @ 36,677: 13x18
#658 @ 782,61: 13x10
#659 @ 648,88: 12x13
#660 @ 3,413: 26x16
#661 @ 280,979: 21x21
#662 @ 234,860: 16x18
#663 @ 568,961: 17x19
#664 @ 740,437: 28x23
#665 @ 545,85: 19x25
#666 @ 295,612: 10x21
#667 @ 657,218: 26x10
#668 @ 161,154: 26x22
#669 @ 11,945: 13x12
#670 @ 185,528: 28x14
#671 @ 746,943: 14x28
#672 @ 321,950: 19x12
#673 @ 689,30: 14x11
#674 @ 934,241: 15x24
#675 @ 309,401: 10x18
#676 @ 956,49: 26x21
#677 @ 895,82: 11x27
#678 @ 557,138: 25x10
#679 @ 950,841: 26x16
#680 @ 761,462: 14x20
#681 @ 657,162: 12x14
#682 @ 762,346: 20x12
#683 @ 168,247: 14x18
#684 @ 288,464: 15x15
#685 @ 38,642: 12x23
#686 @ 683,229: 25x17
#687 @ 527,441: 23x21
#688 @ 136,878: 16x15
#689 @ 929,430: 18x24
#690 @ 435,575: 29x26
#691 @ 705,385: 21x23
#692 @ 204,319: 21x29
#693 @ 331,394: 14x29
#694 @ 377,625: 16x12
#695 @ 105,286: 27x14
#696 @ 253,276: 24x24
#697 @ 773,811: 22x26
#698 @ 112,417: 12x21
#699 @ 666,593: 21x19
#700 @ 817,366: 10x13
#701 @ 90,733: 24x29
#702 @ 538,209: 10x15
#703 @ 860,522: 27x20
#704 @ 31,503: 28x14
#705 @ 219,350: 27x24
#706 @ 335,726: 18x21
#707 @ 42,589: 13x12
#708 @ 937,601: 23x23
#709 @ 868,846: 18x16
#710 @ 487,765: 13x20
#711 @ 637,887: 19x12
#712 @ 773,53: 14x12
#713 @ 179,908: 22x25
#714 @ 29,657: 14x18
#715 @ 670,79: 18x28
#716 @ 15,517: 15x22
#717 @ 254,192: 20x22
#718 @ 771,419: 18x18
#719 @ 92,110: 20x24
#720 @ 950,348: 24x13
#721 @ 439,232: 19x29
#722 @ 506,964: 10x16
#723 @ 493,675: 29x28
#724 @ 736,579: 15x14
#725 @ 511,301: 17x13
#726 @ 198,956: 11x15
#727 @ 365,505: 15x14
#728 @ 252,335: 15x23
#729 @ 499,35: 15x12
#730 @ 568,402: 27x16
#731 @ 792,21: 18x12
#732 @ 272,672: 12x26
#733 @ 205,984: 14x12
#734 @ 187,51: 17x25
#735 @ 598,450: 21x24
#736 @ 36,865: 19x27
#737 @ 944,650: 24x13
#738 @ 594,120: 21x22
#739 @ 878,630: 26x13
#740 @ 657,370: 12x12
#741 @ 456,437: 14x25
#742 @ 337,631: 19x27
#743 @ 874,665: 14x13
#744 @ 367,372: 27x24
#745 @ 906,711: 27x20
#746 @ 124,418: 27x20
#747 @ 581,281: 27x27
#748 @ 152,695: 14x28
#749 @ 920,533: 21x15
#750 @ 630,50: 11x16
#751 @ 679,277: 29x21
#752 @ 970,763: 22x18
#753 @ 368,367: 28x22
#754 @ 851,844: 15x22
#755 @ 342,42: 25x20
#756 @ 252,284: 11x11
#757 @ 509,656: 22x24
#758 @ 976,838: 22x16
#759 @ 46,408: 28x19
#760 @ 446,37: 10x29
#761 @ 649,894: 20x23
#762 @ 857,136: 20x19
#763 @ 475,297: 24x25
#764 @ 78,132: 25x10
#765 @ 244,868: 10x13
#766 @ 869,202: 12x11
#767 @ 471,669: 23x10
#768 @ 665,166: 18x29
#769 @ 65,248: 28x22
#770 @ 470,265: 14x21
#771 @ 734,62: 26x12
#772 @ 360,766: 29x26
#773 @ 759,457: 27x15
#774 @ 700,650: 24x15
#775 @ 666,363: 13x12
#776 @ 755,24: 14x16
#777 @ 777,614: 28x10
#778 @ 176,589: 25x19
#779 @ 934,671: 13x29
#780 @ 291,49: 12x15
#781 @ 934,36: 27x19
#782 @ 74,471: 17x10
#783 @ 949,478: 5x13
#784 @ 798,458: 28x27
#785 @ 203,970: 22x11
#786 @ 948,418: 28x27
#787 @ 230,520: 22x11
#788 @ 80,642: 10x10
#789 @ 840,910: 18x28
#790 @ 742,544: 17x10
#791 @ 685,303: 13x23
#792 @ 968,702: 15x27
#793 @ 93,188: 21x12
#794 @ 333,602: 20x20
#795 @ 519,574: 28x22
#796 @ 6,405: 14x19
#797 @ 279,438: 13x16
#798 @ 973,846: 12x29
#799 @ 325,718: 22x18
#800 @ 325,949: 14x10
#801 @ 645,374: 19x15
#802 @ 763,332: 25x15
#803 @ 720,452: 27x26
#804 @ 815,717: 21x28
#805 @ 755,3: 13x26
#806 @ 487,678: 29x24
#807 @ 127,501: 22x24
#808 @ 735,123: 16x20
#809 @ 245,924: 10x20
#810 @ 92,566: 26x17
#811 @ 274,692: 15x17
#812 @ 249,469: 27x16
#813 @ 61,152: 23x17
#814 @ 299,827: 27x21
#815 @ 76,191: 26x12
#816 @ 673,447: 19x27
#817 @ 312,131: 12x15
#818 @ 143,235: 17x27
#819 @ 57,258: 23x11
#820 @ 357,405: 14x20
#821 @ 858,751: 16x15
#822 @ 978,870: 11x20
#823 @ 916,221: 21x20
#824 @ 467,6: 29x14
#825 @ 757,946: 17x24
#826 @ 604,100: 21x27
#827 @ 734,112: 16x24
#828 @ 816,365: 14x10
#829 @ 303,800: 16x27
#830 @ 453,144: 16x19
#831 @ 782,955: 10x18
#832 @ 442,658: 14x18
#833 @ 804,427: 21x27
#834 @ 328,450: 10x20
#835 @ 728,48: 23x18
#836 @ 683,625: 20x27
#837 @ 598,129: 15x25
#838 @ 191,59: 6x8
#839 @ 532,153: 11x19
#840 @ 637,837: 11x14
#841 @ 746,290: 11x23
#842 @ 856,11: 22x13
#843 @ 617,53: 16x23
#844 @ 76,241: 16x16
#845 @ 582,723: 18x13
#846 @ 848,841: 29x20
#847 @ 20,458: 16x22
#848 @ 663,194: 15x16
#849 @ 573,115: 25x14
#850 @ 742,278: 22x19
#851 @ 218,326: 24x24
#852 @ 319,375: 27x10
#853 @ 694,531: 28x12
#854 @ 210,371: 13x25
#855 @ 539,244: 16x16
#856 @ 912,538: 22x14
#857 @ 3,321: 26x16
#858 @ 540,143: 28x25
#859 @ 857,523: 23x26
#860 @ 533,280: 22x12
#861 @ 344,824: 8x9
#862 @ 922,957: 28x20
#863 @ 649,451: 27x28
#864 @ 287,498: 22x12
#865 @ 106,613: 26x11
#866 @ 815,614: 10x13
#867 @ 548,961: 15x27
#868 @ 872,343: 24x18
#869 @ 72,374: 12x28
#870 @ 634,588: 11x21
#871 @ 744,866: 19x10
#872 @ 84,466: 11x28
#873 @ 655,185: 17x14
#874 @ 311,124: 15x10
#875 @ 814,190: 12x15
#876 @ 885,660: 17x14
#877 @ 750,580: 12x29
#878 @ 211,306: 4x4
#879 @ 137,60: 18x29
#880 @ 433,934: 14x15
#881 @ 757,343: 18x18
#882 @ 847,839: 23x11
#883 @ 3,943: 22x22
#884 @ 847,750: 15x18
#885 @ 167,894: 18x16
#886 @ 909,872: 13x15
#887 @ 747,534: 27x12
#888 @ 961,5: 22x10
#889 @ 919,651: 24x10
#890 @ 925,714: 18x11
#891 @ 51,468: 18x24
#892 @ 242,471: 24x21
#893 @ 590,324: 12x24
#894 @ 46,300: 20x18
#895 @ 658,467: 19x22
#896 @ 769,397: 18x15
#897 @ 100,511: 16x24
#898 @ 633,49: 24x15
#899 @ 752,621: 12x19
#900 @ 380,362: 16x12
#901 @ 746,586: 19x18
#902 @ 236,138: 27x16
#903 @ 958,875: 22x25
#904 @ 366,753: 12x23
#905 @ 916,876: 13x23
#906 @ 214,565: 24x17
#907 @ 45,30: 23x27
#908 @ 390,947: 24x15
#909 @ 933,270: 10x10
#910 @ 125,833: 26x21
#911 @ 104,186: 18x10
#912 @ 576,66: 23x26
#913 @ 798,9: 4x19
#914 @ 406,817: 16x11
#915 @ 893,341: 29x20
#916 @ 62,652: 26x17
#917 @ 666,807: 29x29
#918 @ 757,760: 23x12
#919 @ 584,359: 16x22
#920 @ 68,927: 10x14
#921 @ 939,417: 27x28
#922 @ 60,510: 16x24
#923 @ 472,114: 25x15
#924 @ 497,502: 21x25
#925 @ 933,958: 22x23
#926 @ 212,631: 28x21
#927 @ 781,299: 12x28
#928 @ 304,51: 26x18
#929 @ 411,819: 6x3
#930 @ 341,760: 17x14
#931 @ 73,242: 26x29
#932 @ 350,574: 17x11
#933 @ 320,373: 22x28
#934 @ 932,814: 29x21
#935 @ 692,679: 21x13
#936 @ 670,431: 16x16
#937 @ 448,905: 22x16
#938 @ 68,235: 22x17
#939 @ 282,524: 12x19
#940 @ 268,692: 10x22
#941 @ 612,72: 10x25
#942 @ 476,47: 27x20
#943 @ 387,78: 13x27
#944 @ 337,910: 25x16
#945 @ 322,504: 10x21
#946 @ 96,828: 20x11
#947 @ 245,942: 16x28
#948 @ 433,283: 11x25
#949 @ 926,639: 22x19
#950 @ 277,494: 27x27
#951 @ 875,882: 18x22
#952 @ 173,237: 11x25
#953 @ 974,216: 23x26
#954 @ 648,574: 12x24
#955 @ 91,189: 16x13
#956 @ 175,598: 24x17
#957 @ 221,238: 13x11
#958 @ 685,40: 27x15
#959 @ 736,40: 29x21
#960 @ 973,829: 13x25
#961 @ 826,364: 29x11
#962 @ 270,379: 10x22
#963 @ 277,302: 24x21
#964 @ 721,333: 28x16
#965 @ 272,740: 16x12
#966 @ 209,304: 11x12
#967 @ 546,653: 22x18
#968 @ 306,636: 14x29
#969 @ 925,705: 15x29
#970 @ 121,594: 21x22
#971 @ 313,455: 18x26
#972 @ 64,630: 12x25
#973 @ 909,169: 21x16
#974 @ 843,60: 20x10
#975 @ 754,541: 22x13
#976 @ 877,875: 15x11
#977 @ 502,560: 19x22
#978 @ 717,201: 18x17
#979 @ 873,743: 27x21
#980 @ 798,564: 16x10
#981 @ 597,614: 16x18
#982 @ 421,4: 27x21
#983 @ 401,146: 10x11
#984 @ 338,282: 27x19
#985 @ 206,607: 12x19
#986 @ 867,355: 11x15
#987 @ 947,380: 29x24
#988 @ 200,773: 17x14
#989 @ 494,277: 20x13
#990 @ 47,349: 18x13
#991 @ 790,613: 10x13
#992 @ 292,484: 29x15
#993 @ 177,756: 29x23
#994 @ 115,93: 15x17
#995 @ 597,66: 10x24
#996 @ 234,529: 29x12
#997 @ 494,488: 12x25
#998 @ 668,90: 11x20
#999 @ 824,963: 26x14
#1000 @ 101,714: 18x19
#1001 @ 32,661: 18x20
#1002 @ 311,667: 22x17
#1003 @ 362,276: 19x15
#1004 @ 25,815: 15x25
#1005 @ 234,309: 23x15
#1006 @ 632,54: 11x14
#1007 @ 960,967: 20x23
#1008 @ 386,711: 18x10
#1009 @ 391,82: 28x16
#1010 @ 175,918: 18x7
#1011 @ 681,376: 25x23
#1012 @ 149,735: 13x11
#1013 @ 902,656: 25x23
#1014 @ 414,619: 14x17
#1015 @ 115,306: 25x10
#1016 @ 588,36: 19x14
#1017 @ 590,25: 26x16
#1018 @ 609,371: 28x18
#1019 @ 731,23: 27x20
#1020 @ 307,567: 11x12
#1021 @ 333,615: 28x17
#1022 @ 226,260: 28x27
#1023 @ 695,49: 20x24
#1024 @ 435,447: 18x22
#1025 @ 968,692: 23x19
#1026 @ 962,215: 13x23
#1027 @ 119,468: 10x23
#1028 @ 310,579: 12x12
#1029 @ 749,85: 17x23
#1030 @ 862,196: 13x21
#1031 @ 671,22: 29x12
#1032 @ 937,806: 21x23
#1033 @ 68,91: 17x25
#1034 @ 967,651: 11x13
#1035 @ 96,569: 28x19
#1036 @ 634,717: 29x27
#1037 @ 173,538: 22x13
#1038 @ 957,333: 25x16
#1039 @ 459,443: 27x23
#1040 @ 102,183: 10x26
#1041 @ 612,877: 29x13
#1042 @ 862,622: 21x21
#1043 @ 980,940: 19x10
#1044 @ 154,271: 29x27
#1045 @ 379,833: 26x14
#1046 @ 983,798: 10x29
#1047 @ 485,730: 28x11
#1048 @ 270,305: 21x27
#1049 @ 243,388: 22x14
#1050 @ 873,327: 25x12
#1051 @ 231,72: 19x14
#1052 @ 639,582: 14x22
#1053 @ 335,820: 23x19
#1054 @ 130,711: 16x22
#1055 @ 419,625: 5x3
#1056 @ 687,817: 14x26
#1057 @ 882,561: 11x25
#1058 @ 766,291: 16x20
#1059 @ 127,86: 15x11
#1060 @ 869,851: 19x16
#1061 @ 234,462: 11x27
#1062 @ 196,829: 26x12
#1063 @ 821,142: 23x10
#1064 @ 276,964: 13x28
#1065 @ 650,86: 20x13
#1066 @ 84,162: 25x14
#1067 @ 217,458: 23x12
#1068 @ 538,719: 21x27
#1069 @ 8,698: 11x26
#1070 @ 754,521: 14x20
#1071 @ 907,230: 24x12
#1072 @ 888,287: 15x17
#1073 @ 688,534: 27x17
#1074 @ 678,215: 10x19
#1075 @ 820,871: 10x14
#1076 @ 949,433: 16x25
#1077 @ 279,215: 14x10
#1078 @ 41,80: 15x28
#1079 @ 54,507: 15x25
#1080 @ 14,835: 26x17
#1081 @ 269,506: 29x17
#1082 @ 275,745: 12x15
#1083 @ 416,14: 10x13
#1084 @ 530,338: 16x29
#1085 @ 420,924: 19x17
#1086 @ 807,212: 23x19
#1087 @ 455,62: 26x26
#1088 @ 683,84: 13x23
#1089 @ 884,738: 26x21
#1090 @ 720,834: 21x19
#1091 @ 194,174: 20x16
#1092 @ 240,528: 11x23
#1093 @ 971,678: 11x13
#1094 @ 584,783: 20x11
#1095 @ 40,937: 24x23
#1096 @ 776,430: 21x17
#1097 @ 834,589: 17x28
#1098 @ 465,321: 23x11
#1099 @ 253,130: 14x25
#1100 @ 129,611: 17x10
#1101 @ 909,521: 26x27
#1102 @ 401,944: 17x22
#1103 @ 470,63: 16x24
#1104 @ 616,651: 18x14
#1105 @ 70,691: 16x20
#1106 @ 412,470: 13x25
#1107 @ 219,53: 23x28
#1108 @ 875,876: 12x19
#1109 @ 717,854: 11x27
#1110 @ 249,637: 21x21
#1111 @ 972,274: 18x18
#1112 @ 657,495: 13x25
#1113 @ 530,696: 22x29
#1114 @ 137,107: 19x19
#1115 @ 757,364: 15x20
#1116 @ 936,834: 23x17
#1117 @ 945,441: 27x18
#1118 @ 137,454: 10x29
#1119 @ 121,417: 19x16
#1120 @ 631,107: 14x15
#1121 @ 374,827: 18x24
#1122 @ 451,802: 16x28
#1123 @ 901,305: 22x23
#1124 @ 935,330: 28x26
#1125 @ 7,687: 11x15
#1126 @ 631,494: 14x28
#1127 @ 130,44: 16x29
#1128 @ 713,945: 10x12
#1129 @ 336,905: 18x29
#1130 @ 234,298: 21x15
#1131 @ 393,244: 18x25
#1132 @ 217,448: 11x27
#1133 @ 242,948: 10x22
#1134 @ 516,148: 17x15
#1135 @ 841,735: 16x29
#1136 @ 986,923: 10x22
#1137 @ 269,659: 13x19
#1138 @ 237,851: 29x26
#1139 @ 781,655: 16x19
#1140 @ 522,350: 10x18
#1141 @ 496,482: 18x16
#1142 @ 945,8: 29x10
#1143 @ 527,256: 28x24
#1144 @ 292,224: 15x17
#1145 @ 435,483: 28x24
#1146 @ 854,613: 18x11
#1147 @ 559,930: 17x16
#1148 @ 442,247: 12x26
#1149 @ 400,720: 15x25
#1150 @ 59,651: 18x18
#1151 @ 448,453: 12x15
#1152 @ 257,978: 22x21
#1153 @ 597,550: 24x11
#1154 @ 529,448: 25x23
#1155 @ 684,790: 27x29
#1156 @ 565,390: 14x18
#1157 @ 284,581: 15x23
#1158 @ 961,338: 18x11
#1159 @ 151,897: 20x18
#1160 @ 11,656: 10x16
#1161 @ 577,297: 24x19
#1162 @ 132,101: 22x21
#1163 @ 790,201: 25x14
#1164 @ 883,736: 12x24
#1165 @ 785,896: 18x27
#1166 @ 528,270: 29x21
#1167 @ 229,576: 22x28
#1168 @ 376,838: 22x14
#1169 @ 772,84: 19x27
#1170 @ 465,903: 24x20
#1171 @ 336,107: 24x20
#1172 @ 282,807: 25x18
#1173 @ 231,646: 15x14
#1174 @ 247,468: 24x23
#1175 @ 708,50: 25x22
#1176 @ 121,472: 11x15
#1177 @ 753,0: 23x11
#1178 @ 669,255: 25x26
#1179 @ 846,858: 21x26
#1180 @ 720,113: 25x23
#1181 @ 217,365: 19x25
#1182 @ 964,449: 11x19
#1183 @ 299,414: 11x19
#1184 @ 258,668: 22x29
#1185 @ 81,527: 29x28
#1186 @ 420,893: 13x26
#1187 @ 146,370: 23x25
#1188 @ 708,462: 27x25
#1189 @ 19,4: 10x8
#1190 @ 126,851: 10x23
#1191 @ 278,398: 21x12
#1192 @ 668,550: 9x3
#1193 @ 406,82: 13x12
#1194 @ 792,696: 21x26
#1195 @ 65,637: 10x13
#1196 @ 753,180: 12x4
#1197 @ 668,425: 15x26
#1198 @ 526,749: 26x11
#1199 @ 879,723: 29x21
#1200 @ 451,291: 19x11
#1201 @ 318,888: 21x17
#1202 @ 359,491: 13x26
#1203 @ 851,514: 11x26
#1204 @ 711,183: 28x12
#1205 @ 910,705: 10x15
#1206 @ 952,951: 17x26
#1207 @ 410,35: 23x21
#1208 @ 164,517: 23x20
#1209 @ 238,510: 15x24
#1210 @ 179,40: 19x23
#1211 @ 48,517: 25x20
#1212 @ 157,627: 25x18
#1213 @ 751,289: 19x10
#1214 @ 495,43: 17x21
#1215 @ 455,596: 10x13
#1216 @ 183,253: 18x12
#1217 @ 444,913: 28x23
#1218 @ 560,687: 13x17
#1219 @ 130,173: 10x15
#1220 @ 916,132: 20x17
#1221 @ 773,89: 6x6
#1222 @ 732,809: 28x22
#1223 @ 987,793: 11x12
#1224 @ 373,495: 16x21
#1225 @ 806,490: 29x19
#1226 @ 771,623: 10x15
#1227 @ 247,308: 16x28
#1228 @ 280,413: 17x15
#1229 @ 543,444: 17x14
#1230 @ 835,926: 22x15
#1231 @ 373,819: 21x18
#1232 @ 191,810: 17x14
#1233 @ 950,792: 11x18
#1234 @ 427,280: 29x20
#1235 @ 489,496: 26x20
#1236 @ 931,173: 17x18
#1237 @ 355,940: 11x17
#1238 @ 467,463: 29x15
#1239 @ 479,281: 28x25
#1240 @ 979,710: 3x11
#1241 @ 195,801: 18x24
#1242 @ 34,345: 27x20
#1243 @ 849,60: 14x15
#1244 @ 45,112: 27x10
#1245 @ 667,146: 18x26
#1246 @ 699,319: 13x29
#1247 @ 393,12: 18x25
#1248 @ 428,295: 11x22
#1249 @ 767,81: 17x27
#1250 @ 893,82: 14x28
#1251 @ 566,978: 13x12
#1252 @ 769,373: 11x25
#1253 @ 346,599: 13x17
#1254 @ 139,325: 23x24
#1255 @ 288,976: 11x20
#1256 @ 602,70: 24x24
#1257 @ 246,248: 12x21
#1258 @ 911,598: 13x13
#1259 @ 325,463: 15x16
#1260 @ 348,904: 25x13
#1261 @ 747,732: 20x23
#1262 @ 231,571: 12x28
#1263 @ 822,867: 14x12
#1264 @ 385,141: 21x11
#1265 @ 372,514: 25x21
#1266 @ 170,644: 29x14
#1267 @ 672,358: 27x26
#1268 @ 224,341: 13x24
#1269 @ 864,609: 23x26
#1270 @ 322,455: 10x17
#1271 @ 591,628: 21x14
#1272 @ 448,282: 13x27
#1273 @ 252,404: 22x17
#1274 @ 172,908: 27x23
#1275 @ 588,577: 11x28
#1276 @ 744,178: 26x10
#1277 @ 593,627: 28x15
#1278 @ 424,621: 10x15
#1279 @ 118,715: 22x17
#1280 @ 388,490: 11x17
#1281 @ 645,612: 16x15
#1282 @ 45,620: 14x26
#1283 @ 163,727: 10x29
#1284 @ 418,656: 22x26
#1285 @ 903,95: 25x29
#1286 @ 161,810: 29x19
#1287 @ 9,101: 23x21
| 21.586461 | 72 | 0.633181 |
6a774d508bfa82ba64c7f018a1e9bf1042ef3011 | 2,624 | cask 'nvidia-web-driver' do
if MacOS.version <= :yosemite
version '346.02.03f14'
sha256 '21df2785257c58b940168b4d4ff73e32f71e9f7e28ed879bf0d605e4abc74aef'
elsif MacOS.version <= :el_capitan
version '346.03.15f16'
sha256 'f0c1a23a262ba6db35f1d7a0da39e7b7648805d63d65be20af33e582cc7050bc'
elsif MacOS.version <= :sierra
version '378.05.05.25f13'
sha256 'f5849297ee8a4a754f26d998db83878d4c02324db00ec28ab38da1d847d7e5c1'
else
version '387.10.10.10.40.130'
sha256 '214507c4d9e5daa3f30313084c0c0d33f1761827dbd5cae6ce05b845aa55afbc'
end
module Utils
def self.basename
'/Library/PreferencePanes/NVIDIA Driver Manager.prefPane/Contents/MacOS/NVIDIA Web Driver Uninstaller.app/Contents/Resources'
end
end
url "https://images.nvidia.com/mac/pkg/#{version.major}/WebDriver-#{version}.pkg"
appcast 'https://gfe.nvidia.com/mac-update'
name 'NVIDIA Web Driver'
homepage 'https://www.nvidia.com/Download/index.aspx'
depends_on macos: [
:yosemite,
:el_capitan,
:sierra,
:high_sierra,
]
pkg "WebDriver-#{version}.pkg"
uninstall_preflight do
system_command '/usr/sbin/pkgutil',
args: ['--expand', "#{Utils.basename}/NVUninstall.pkg", "#{Utils.basename}/NVUninstall"],
sudo: true
end
uninstall launchctl: [
'com.nvidia.nvagent',
'com.nvidia.nvroothelper',
],
quit: [
'com.nvidia.NVIDIAWebDriverUninstaller',
'com.nvidia.nvagent',
'com.nvidia.nvmenu',
],
kext: [
'com.nvidia.NVDAStartupWeb',
'com.nvidia.web.GeForce*Web',
'com.nvidia.web.NVDA*Web',
],
script: [ # Only the third argument is used and should be the system's root path
executable: "#{Utils.basename}/NVUninstall/Scripts/postinstall",
args: ['/', '/', '/'],
sudo: true,
],
pkgutil: 'com.nvidia.web-driver',
delete: '/Library/PreferencePanes/NVIDIA Driver Manager.prefPane'
zap trash: [
'~/Library/Preferences/ByHost/com.nvidia.nvagent.*.plist',
'~/Library/Preferences/ByHost/com.nvidia.nvprefpane.*.plist',
]
caveats do
reboot
end
end
| 35.945205 | 131 | 0.556784 |
1cf4f718fac036c478a29bed91ed75d08c6e8acb | 66 | module Gcpc
module Interceptors
VERSION = "0.0.2"
end
end
| 11 | 21 | 0.666667 |
28f8f758afd2eff45b55d2d565afadf1469a2297 | 1,166 | # frozen_string_literal: true
require "ruby_event_store/rom"
MIGRATIONS_PATH = "db/migrate".freeze
desc "Setup ROM EventRespository environment"
task "db:setup" do
Dir.chdir(Dir.pwd)
ROM::SQL::RakeSupport.env = ::RubyEventStore::ROM.configure(:sql).rom_container
end
desc "Copy RubyEventStore SQL migrations to db/migrate"
task "db:migrations:copy" => "db:setup" do
# Optional data type for `data` and `metadata`
data_type = ENV["DATA_TYPE"]
Dir[File.join(File.dirname(__FILE__), "../../../../../../", MIGRATIONS_PATH, "/*.rb")].each do |input|
contents = File.read(input)
name = File.basename(input, ".*").sub(/\d+_/, "")
re_data_type = /(ENV.+?DATA_TYPE.+?\|\|=\s*)['"](jsonb?|text)['"]/
if data_type && contents =~ re_data_type
# Search/replace this string: ENV['DATA_TYPE'] ||= 'text'
contents = contents.sub(re_data_type, format('\1"%<data_type>s"', data_type: data_type))
name += "_with_#{data_type}"
end
output = ROM::SQL::RakeSupport.create_migration(name, path: File.join(Dir.pwd, MIGRATIONS_PATH))
File.write output, contents
puts "<= migration file created #{output}"
end
end
| 31.513514 | 104 | 0.663808 |
1a229e3b87d99b890a8f38e33a256ecc66d344f3 | 57 | require 'carpetbomb/engine'
require 'carpetbomb/version'
| 19 | 28 | 0.824561 |
f78ea364147a0b20e0784c9db66c675ed7e8296f | 4,404 | # frozen_string_literal: true
# Use this for testing how a GraphQL query handles sorting and pagination.
# This is particularly important when using keyset pagination connection,
# which is the default for ActiveRecord relations, as certain sort keys
# might not be supportable.
#
# sort_param: the value to specify the sort
# data_path: the keys necessary to dig into the return GraphQL data to get the
# returned results
# first_param: number of items expected (like a page size)
# expected_results: array of comparison data of all items sorted correctly
# pagination_query: method that specifies the GraphQL query
# pagination_results_data: method that extracts the sorted data used to compare against
# the expected results
#
# Example:
# describe 'sorting and pagination' do
# let_it_be(:sort_project) { create(:project, :public) }
# let(:data_path) { [:project, :issues] }
#
# def pagination_query(arguments)
# graphql_query_for(:project, { full_path: sort_project.full_path },
# query_nodes(:issues, :iid, include_pagination_info: true, args: arguments)
# )
# end
#
# # A method transforming nodes to data to match against
# # default: the identity function
# def pagination_results_data(issues)
# issues.map { |issue| issue['iid].to_i }
# end
#
# context 'when sorting by weight' do
# let_it_be(:issues) { make_some_issues_with_weights }
#
# context 'when ascending' do
# let(:ordered_issues) { issues.sort_by(&:weight) }
#
# it_behaves_like 'sorted paginated query' do
# let(:sort_param) { :WEIGHT_ASC }
# let(:first_param) { 2 }
# let(:expected_results) { ordered_issues.map(&:iid) }
# end
# end
#
RSpec.shared_examples 'sorted paginated query' do
# Provided as a convenience when constructing queries using string concatenation
let(:page_info) { 'pageInfo { startCursor endCursor }' }
# Convenience for using default implementation of pagination_results_data
let(:node_path) { ['id'] }
it_behaves_like 'requires variables' do
let(:required_variables) { [:sort_param, :first_param, :expected_results, :data_path, :current_user] }
end
describe do
let(:sort_argument) { graphql_args(sort: sort_param) }
let(:params) { sort_argument }
# Convenience helper for the large number of queries defined as a projection
# from some root value indexed by full_path to a collection of objects with IID
def nested_internal_id_query(root_field, parent, field, args, selection: :iid)
graphql_query_for(root_field, { full_path: parent.full_path },
query_nodes(field, selection, args: args, include_pagination_info: true)
)
end
def pagination_query(params)
raise('pagination_query(params) must be defined in the test, see example in comment') unless defined?(super)
super
end
def pagination_results_data(nodes)
if defined?(super)
super(nodes)
else
nodes.map { |n| n.dig(*node_path) }
end
end
def results
nodes = graphql_dig_at(graphql_data(fresh_response_data), *data_path, :nodes)
pagination_results_data(nodes)
end
def end_cursor
graphql_dig_at(graphql_data(fresh_response_data), *data_path, :page_info, :end_cursor)
end
def start_cursor
graphql_dig_at(graphql_data(fresh_response_data), *data_path, :page_info, :start_cursor)
end
let(:query) { pagination_query(params) }
before do
post_graphql(query, current_user: current_user)
end
context 'when sorting' do
it 'sorts correctly' do
expect(results).to eq expected_results
end
context 'when paginating' do
let(:params) { sort_argument.merge(first: first_param) }
let(:first_page) { expected_results.first(first_param) }
let(:rest) { expected_results.drop(first_param) }
it 'paginates correctly' do
expect(results).to eq first_page
fwds = pagination_query(sort_argument.merge(after: end_cursor))
post_graphql(fwds, current_user: current_user)
expect(results).to eq rest
bwds = pagination_query(sort_argument.merge(before: start_cursor))
post_graphql(bwds, current_user: current_user)
expect(results).to eq first_page
end
end
end
end
end
| 34.139535 | 114 | 0.689827 |
1af067ee83056fb1a9608dce4adfb7a8cde6bf69 | 1,091 | require 'test_helper'
require 'tilt'
begin
require 'tilt/rst-pandoc'
class RstPandocTemplateTest < Minitest::Test
test "is registered for '.rst' files" do
assert_equal Tilt::RstPandocTemplate, Tilt['test.rst']
end
test "compiles and evaluates the template on #render" do
template = Tilt::RstPandocTemplate.new { |t| "Hello World!\n============" }
assert_equal "<h1 id=\"hello-world\">Hello World!</h1>", template.render
end
test "can be rendered more than once" do
template = Tilt::RstPandocTemplate.new { |t| "Hello World!\n============" }
3.times do
assert_equal "<h1 id=\"hello-world\">Hello World!</h1>", template.render
end
end
test "doens't use markdown options" do
template = Tilt::RstPandocTemplate.new(:escape_html => true) { |t| "HELLO <blink>WORLD</blink>" }
err = assert_raises(RuntimeError) { template.render }
assert_match /pandoc: unrecognized option `--escape-html/, err.message
end
end
rescue LoadError => boom
warn "Tilt::RstPandocTemplate (disabled) [#{boom}]"
end
| 33.060606 | 103 | 0.654445 |
18a36486a2165514d8670ae64d8e820f606f36c0 | 31,714 | #
# Cookbook:: websphere
# Resource:: websphere_base
#
# Copyright:: 2015-2021 J Sainsburys
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module WebsphereCookbook
class WebsphereBase < Chef::Resource
require_relative 'helpers'
include WebsphereHelpers
resource_name :websphere_base
property :websphere_root, String, default: '/opt/IBM/WebSphere/AppServer'
property :bin_dir, String, default: lazy { "#{websphere_root}/bin" }
property :run_user, String, default: 'was'
property :run_group, String, default: 'was'
property :admin_user, [String, nil]
property :admin_password, [String, nil]
property :dmgr_host, String, default: 'localhost' # dmgr host to federate to.
property :dmgr_port, [String, nil] # dmgr port to federate to.
property :dmgr_svc_timeout, [Integer, nil], default: 300 # systemd timeoutsec defaults.
property :sensitive_exec, [true, false], default: true # for debug purposes
property :timeout, [Integer, nil]
property :node_svc_timeout, [Integer, nil], default: 180 # systemd timeoutsec defaults.
# you need at least one action here to allow the action_class.class_eval block to work
action :dummy do
Chef::Application.fatal!('There is no default action for this resource. You must explicitly specify an action!!!')
end
# need to wrap helper methods in class_eval
# so they're available in the action.
action_class.class_eval do
def manageprofiles_exec(cmd, options)
command = "#{cmd} #{options}"
execute "manage_profiles #{cmd} #{new_resource.profile_name}" do
cwd new_resource.bin_dir
user new_resource.run_user
group new_resource.run_group
command command
sensitive new_resource.sensitive_exec
action :run
end
end
# returns a hash of endpoint => port for a server
# if server is nil it returns an array of all servers endpoints and ports
# returns nil if error
def get_ports(srvr_name = '', bin_directory = new_resource.bin_dir)
cookbook_file "#{bin_directory}/server_ports.py" do
cookbook 'websphere'
source 'server_ports.py'
mode '0755'
action :create
end
cmd = "-f server_ports.py #{srvr_name}"
mycmd = wsadmin_returns(cmd)
return nil if mycmd.error?
str = mycmd.stdout.match(/===(.*?)===/m)
return nil if str.nil?
json_str = str[1].strip.tr("'", '"') # strip and replace ' with " so it's a valid json str
endpoints = JSON.parse(json_str)
endpoints
end
# returns 246 if already stopped
def stop_profile_node(p_name, profile_bin)
cmd = "./stopNode.sh -profileName #{p_name} -stopservers"
cmd << " -username #{new_resource.admin_user} -password #{new_resource.admin_password}" if new_resource.admin_user && new_resource.admin_password
execute "stop #{p_name}" do
cwd profile_bin
user new_resource.run_user
command cmd
returns [0, 246, 255]
sensitive new_resource.sensitive_exec
action :run
end
end
# starts a node.
# requires path to profiles own bin dir
# TODO: add check if node or server are already started first.
def start_node(profile_bin)
# start_profile(profile_name, "#{profile_path}/bin", "./startNode.sh", [0, 255])
startnode = './startNode.sh'
startnode << " -timeout #{new_resource.timeout}" if new_resource.timeout
execute "start node on profile: #{profile_bin}" do
cwd profile_bin
user new_resource.run_user
command startnode
returns [0, 255] # ignore error if node already started.
action :run
end
end
# performs a node sync from the nodes own bin dir
# stops all servers if stop_servers=true
# restarts nodeagent if restart=true
def sync_node_sh(profile_bin, stop_servers, restart)
cmd = "./syncNode.sh #{new_resource.dmgr_host}"
cmd << " #{new_resource.dmgr_port}" if new_resource.dmgr_port
cmd << " -username #{new_resource.admin_user} -password #{new_resource.admin_password}" if new_resource.admin_user && new_resource.admin_password
cmd << ' -stopservers' if stop_servers
cmd << ' -restart' if restart
execute "sync node on profile: #{profile_bin}" do
cwd profile_bin
user new_resource.run_user
command cmd
returns [0]
action :run
sensitive new_resource.sensitive_exec
end
end
def sync_node_wsadmin(nde_name = 'all', bin_directory = new_resource.bin_dir)
cookbook_file "#{bin_directory}/sync_node.py" do
cookbook 'websphere'
source 'sync_node.py'
mode '0755'
action :create
end
cmd = "-f #{bin_directory}/sync_node.py #{nde_name}"
wsadmin_exec("syncNode #{nde_name}", cmd, [0, 103])
end
# starts a dmgr node.
# requires path to profiles own bin dir
# TODO: add check if node or server are already started first.
def start_manager(p_name, profile_env, profile_bin = new_resource.bin_dir)
# start_profile(profile_name, bin_dir, "./startManager.sh -profileName #{profile_name}", [0, 255])
startcommand = "./startManager.sh -profileName #{p_name}"
startcommand << " -timeout #{new_resource.timeout}" if new_resource.timeout
execute "start dmgr profile: #{p_name}" do
cwd profile_bin
environment profile_env
user new_resource.run_user
command startcommand
returns [0, 255] # ignore error if node already started.
action :run
end
end
# use to setup a nodeagent or dmgr as a service
# server_name should be 'dmgr' or 'nodeagent'
def enable_as_service(service_name, srvr_name, prof_path, runas = 'root', dependent_service = nil)
# if dplymgr user and password set, then add as additional args for stop.
stop_args = new_resource.admin_user && new_resource.admin_password ? "-username #{new_resource.admin_user} -password #{new_resource.admin_password}" : ''
# admin_user && admin_password ? start_args = "-username #{admin_user} -password #{admin_password}" : start_args = ''
if node['init_package'] == 'systemd'
node_srvc = srvr_name == 'dmgr' ? 'Manager' : 'Node'
# execute blocks are used here rather than file resources, as using a file resource breaks the library in odd unrelated ways
# Create the new stop start scripts for the nodeagent
%w(
stop
start
).each do |action|
ruby_block "rename #{action} control service script" do
block do
::File.rename("#{prof_path}/../../bin/#{action}#{node_srvc}.sh", "#{prof_path}/../../bin/#{action}#{node_srvc}Systemd.sh")
end
not_if { ::File.exist?("#{prof_path}/../../bin/#{action}#{node_srvc}Systemd.sh") }
end
cookbook_file "#{prof_path}/../../bin/#{action}#{node_srvc}.sh" do
mode '0755'
owner runas
group runas
source "#{action}#{node_srvc}Service.sh"
cookbook 'websphere'
end
cookbook_file "#{prof_path}/bin/#{action}#{node_srvc}Systemd.sh" do
mode '0755'
owner runas
group runas
source "profile_#{action}#{node_srvc}Service.sh"
cookbook 'websphere'
end
end
execute 'daemon_reload' do
command 'systemctl daemon-reload'
action :nothing
subscribes :run, "template[/etc/systemd/system/#{service_name}.service]", :immediate
end
template "/etc/systemd/system/#{service_name}.service" do
mode '0644'
source srvr_name == 'dmgr' ? 'dmgr_systemd.erb' : 'node_service_systemd.erb'
cookbook 'websphere'
variables(
service_name: service_name,
server_name: srvr_name,
profile_path: prof_path,
stop_args: stop_args,
start_args: '',
svc_timeout: srvr_name == 'dmgr' ? new_resource.dmgr_svc_timeout : new_resource.node_svc_timeout,
runas_user: runas,
dependent_service: dependent_service
)
end
else
template "/etc/init.d/#{service_name}" do
mode '0755'
source srvr_name == 'dmgr' ? 'dmgr_init.d.erb' : 'node_service_init.d.erb'
cookbook 'websphere'
variables(
service_name: service_name,
server_name: srvr_name,
profile_path: prof_path,
was_root: new_resource.bin_dir,
stop_args: stop_args,
start_args: new_resource.timeout ? "-timeout #{new_resource.timeout}" : '',
runas_user: runas
)
end
end
service service_name do
action :enable
end
end
def create_service_account(user, group)
group group do
action :create
not_if { user == 'root' }
end
user user do
comment 'websphere service account'
home "/home/#{user}"
group group
shell '/sbin/nologin'
not_if { user == 'root' }
end
directory "/home/#{user}" do
owner user
group group
mode '0750'
recursive true
action :create
not_if { user == 'root' }
end
# there's no perfect way to chown in chef without being explicit for each dir with the directory resource.
execute 'chown websphere root for service account' do
command "chown -R #{user}:#{group} #{new_resource.websphere_root}"
action :run
end
end
# federates a node to a dmgr.
# requires path to profiles own bin dir
def add_node(profile_bin_dir)
cmd = "./addNode.sh #{new_resource.dmgr_host}"
cmd << " #{new_resource.dmgr_port}" if new_resource.dmgr_port
cmd << ' -includeapps'
cmd << " -username #{new_resource.admin_user} -password #{new_resource.admin_password}" if new_resource.admin_user && new_resource.admin_password
execute "addNode #{profile_bin_dir}" do
cwd profile_bin_dir
user new_resource.run_user
command cmd
sensitive new_resource.sensitive_exec
action :run
end
save_config
end
# see here for the caveats and operations performed by remove_node:
# https://www-01.ibm.com/support/knowledgecenter/SSAW57_8.5.5/com.ibm.websphere.nd.iseries.doc/ae/rxml_removenode.html?lang=en
# you cannot remove a custom/cell node, you need to just delete it and run cleanup_node instead.
# only use remove_node if you intend to keep the appservers rather than delete them.
def remove_node
cmd = './removeNode.sh'
cmd << " -username #{new_resource.admin_user} -password #{new_resource.admin_password}" if new_resource.admin_user && new_resource.admin_password
execute "removeNode #{profile_bin_dir}" do
cwd new_resource.bin_dir
user new_resource.run_user
command cmd
sensitive new_resource.sensitive_exec
action :run
end
end
# run after deleting a federated profile/node
def cleanup_node(nde_name)
cmd = "./cleanupNode.sh #{nde_name} #{new_resource.dmgr_host}"
cmd << " #{new_resource.dmgr_port}" if new_resource.dmgr_port
cmd << " -username #{new_resource.admin_user} -password #{new_resource.admin_password}" if new_resource.admin_user && new_resource.admin_password
execute "cleanupNode node: #{nde_name} dmgr: #{new_resource.dmgr_host}" do
cwd new_resource.bin_dir
user new_resource.run_user
command cmd
sensitive new_resource.sensitive_exec
action :run
end
end
def update_registry
execute 'update profile registry' do
cwd new_resource.bin_dir
user new_resource.run_user
command './manageprofiles.sh -validateAndUpdateRegistry'
action :run
end
end
def profile_exists?(p_name)
mycmd = Mixlib::ShellOut.new('./manageprofiles.sh -listProfiles', cwd: new_resource.bin_dir, user: new_resource.run_user)
mycmd.run_command
return false if mycmd.error?
# ./manageprofiles.sh -listProfiles example output: [Dmgr01, AppSrv01, AppSrv02]
profiles_array = mycmd.stdout.chomp.gsub(/\[|\]/, '').split(', ') # trim '[' and ']' chars from string and convert to array
return false if profiles_array.nil?
profiles_array.each do |p|
if p_name == p
Chef::Log.debug("Profile #{p_name} exists")
return true
end
end
Chef::Log.debug("Profile #{p_name} does NOT exist")
false
end
def delete_profile(p_name, p_path)
execute "delete #{p_name}" do
cwd new_resource.bin_dir
user new_resource.run_user
command "./manageprofiles.sh -delete -profileName #{p_name} && "\
'./manageprofiles.sh -validateAndUpdateRegistry'
returns [0, 2]
action :run
end
directory p_path do
recursive true
action :delete
end
end
# enables javasdk.
# Use the profiles own bin dir
# java sdk must already be installed with ibm-installmgr cookbook
def enable_java_sdk(sdk_name, profile_bin, profile_name)
cmd = "./managesdk.sh -enableProfile -profileName #{profile_name} -sdkname #{sdk_name} -verbose -enableServers"
cmd << " -user #{new_resource.admin_user} -password #{new_resource.admin_password}" if new_resource.admin_user && new_resource.admin_password
execute "enable java #{sdk_name} on profile: #{profile_name}" do
cwd profile_bin
user new_resource.run_user
command cmd
action :run
end
# save_config
end
# executes wsadmin commands and captures stdout, return values, errors etc.
# returns Mixlib::ShellOut with the stdout, stderror and exitcodes populated.
# command needs to include the -f of -c flag for file or command string
def wsadmin_returns(cmd, bin_directory = new_resource.bin_dir)
wsadmin_cmd = './wsadmin.sh -lang jython -conntype SOAP '
wsadmin_cmd << "-host #{new_resource.dmgr_host} " if new_resource.dmgr_host
wsadmin_cmd << "-port #{new_resource.dmgr_port} " if new_resource.dmgr_port
wsadmin_cmd << "-user #{new_resource.admin_user} -password #{new_resource.admin_password} " if new_resource.admin_user && new_resource.admin_password
wsadmin_cmd << cmd
mycmd = Mixlib::ShellOut.new(wsadmin_cmd, cwd: bin_directory)
mycmd.run_command
Chef::Log.debug("wsadmin_returns cmd: #{cmd} stdout: #{mycmd.stdout} stderr: #{mycmd.stderr}")
mycmd
end
# returns an array of key,value arrays containing server info.
# eg. ["[cell", "MyNewCell]", "[serverType", "APPLICATION_SERVER]", "[com.ibm.websphere.baseProductVersion", "8.5.5.0]",
# "[node", "AppServer10_node]", "[server", "AppServer10_server]"]
# returns an empty array if server doesn't exit.
def server_info(serv_name)
cmd = " -c \"AdminServerManagement.showServerInfo('#{new_resource.node_name}', '#{serv_name}')\""
mycmd = wsadmin_returns(cmd)
return [] if mycmd.error?
str = result.stdout.match(/\['(.*?)'\]/).captures.first # match everything between ['']
info_array = str.chomp.split("', '")
info_array
end
def cluster_exists?(clus_name)
cmd = "-c \"AdminClusterManagement.checkIfClusterExists('#{clus_name}')\""
mycmd = wsadmin_returns(cmd)
return true if mycmd.stdout.include?("\n \n'true'\n")
false
end
# returns true if server name is a member of specified cluster for a specified node.
def member?(clus_name, serv_name, serv_node, bin_directory = new_resource.bin_dir)
# Get all server members from the cluster
cookbook_file "#{bin_directory}/cluster_member_exists.py" do
cookbook 'websphere'
source 'cluster_member_exists.py'
mode '0755'
action :create
end
Chef::Log.info("Checking for: #{clus_name} #{serv_node} #{serv_name}")
cmd = "-f #{bin_directory}/cluster_member_exists.py #{clus_name} #{serv_node} #{serv_name}"
mycmd = wsadmin_returns(cmd)
return false if mycmd.error?
Chef::Log.debug("Result from member query #{mycmd.stdout}")
return true if mycmd.stdout.include?('===1===')
false
end
# if server is a cluster member, it returns the cluster name
# returns nil is server is not part of a cluster
def member_of_cluster?(serv_name, serv_node)
clusters.each do |cluster|
return cluster if member?(cluster, serv_name, serv_node)
end
nil
end
def node_has_clustered_servers?(node_name)
svrs = get_servers('APPLICATION_SERVER', node_name)
svrs.each do |s|
my_cluster = member_of_cluster?(s['server'])
return true unless my_cluster.nil?
end
false
end
# returns an array of cluster names
def clusters
cmd = '-c "AdminClusterManagement.listClusters()"'
mycmd = wsadmin_returns(cmd)
cluster_array = []
return cluster_array if mycmd.error?
# example output last line: ['test1(cells/MyNewCell/clusters/test1|cluster.xml#ServerCluster_1454497648498)',
# 'test2(cells/MyNewCell/clusters/test2|cluster.xml#ServerCluster_1454497723266)']
str = mycmd.stdout.chomp.match(/\['(.*?)'\]/) # get only what's between [''].
return cluster_array if str.nil?
str_match = str.captures.first # get the first match only, removing outer brackets ['']
raw_clusters = str_match.split("', '")
raw_clusters.each do |c|
cluster = c.split('(')[0]
cluster_array << cluster
end
Chef::Log.debug("getclusters() result #{cluster_array}")
cluster_array
end
def get_cluster_members(clus_name)
cmd = "-c \"AdminClusterManagement.listClusterMembers('#{clus_name}')\""
mycmd = wsadmin_returns(cmd)
# example output: ['AdditionalAppServer_server(cells/MyNewCell/clusters/MyCluster03|cluster.xml#ClusterMember_1455629129667)']
members = []
return members if mycmd.error?
str = mycmd.stdout.chomp.match(/\['(.*?)'\]/) # get only what's between [''].
return members if str.nil?
str_match = str.captures.first
# get the first match only, removing outer brackets ['']
raw_servers = str_match.split("', '")
raw_servers.each do |s|
server = s.split('(')[0]
members << server
end
Chef::Log.debug("get_cluster_members() result #{members}")
members
end
# returns true if server exists on node.
def server_exists?(nde_name, serv_name, profile_bin_dir = new_resource.bin_dir)
cmd = "-c \"AdminServerManagement.checkIfServerExists('#{nde_name}', '#{serv_name}')\""
mycmd = wsadmin_returns(cmd, profile_bin_dir)
return true if mycmd.stdout.include?("\n'true'\n")
false
end
# returns an array of hashes {"server" => "myserver", "node" => "myserversnode"}.
# server_type or node_name or both can be nil.
# eg. if both are nil it will return all servers of all types on all nodes.
def get_servers(server_type, node_name)
cmd = "-c \"AdminServerManagement.listServers('#{server_type}', '#{node_name}')\""
mycmd = wsadmin_returns(cmd)
return [] if mycmd.error?
str = mycmd.stdout.match(/\['(.*?)'\]/)
return [] if str.nil?
str_match = str.captures.first # match everything between ['']
raw_array = str_match.chomp.split("', '") # example output ["AppSrv09_server(cells/MyNewCell/nodes/AppSrv09_node/servers/AppSrv09_server|server.xml#Server_1183122130078)"]
hash_array = []
raw_array.each do |raw_server|
server = raw_server.split('(')[0]
node = raw_server.match(%r{(?<=nodes\/).*?(?=\/servers)}).to_s
hash_array << { 'server' => server, 'node' => node }
end
Chef::Log.debug("get_servers() result #{hash_array}")
hash_array
end
# TODO: find a better way to retrieve a profiles current federated state.
# If a nodeagent/server.xml file exists then it is assumed the node is already federated
def federated?(p_path, node_name1)
current_cell = profile_cell(p_path)
if ::File.exist?("#{p_path}/config/cells/#{current_cell}/nodes/#{node_name1}/servers/nodeagent/server.xml")
Chef::Log.debug("#{node_name1} is federated")
return true
else
Chef::Log.debug("#{node_name1} is NOT federated")
return false
end
end
# executes wsadmin commands. Doesn't capture any stdout.
def wsadmin_exec(label, cmd, return_codes = [0], bin_directory = new_resource.bin_dir)
wsadmin_cmd = './wsadmin.sh -lang jython -conntype SOAP '
wsadmin_cmd << "-host #{new_resource.dmgr_host} " if new_resource.dmgr_host
wsadmin_cmd << "-port #{new_resource.dmgr_port} " if new_resource.dmgr_port
wsadmin_cmd << "-user #{new_resource.admin_user} -password #{new_resource.admin_password} " if new_resource.admin_user && new_resource.admin_password
wsadmin_cmd << "-c \"#{cmd}\""
Chef::Log.debug("wsadmin_exec running cmd: #{wsadmin_cmd} return_codes #{return_codes}")
execute "wsadmin #{label}" do
cwd bin_directory
user new_resource.run_user
command wsadmin_cmd
returns return_codes
sensitive new_resource.sensitive_exec
action :run
end
end
def wsadmin_multi_command_exec(label, cmd, return_codes = [0], bin_directory = new_resource.bin_dir)
wsadmin_cmd = './wsadmin.sh -lang jython -conntype SOAP '
wsadmin_cmd << "-host #{new_resource.dmgr_host} " if new_resource.dmgr_host
wsadmin_cmd << "-port #{new_resource.dmgr_port} " if new_resource.dmgr_port
wsadmin_cmd << "-user #{new_resource.admin_user} -password #{new_resource.admin_password} " if new_resource.admin_user && new_resource.admin_password
wsadmin_cmd << cmd.to_s
Chef::Log.debug("wsadmin_exec running cmd: #{wsadmin_cmd} return_codes #{return_codes}")
execute "wsadmin #{label}" do
cwd bin_directory
user new_resource.run_user
command wsadmin_cmd
returns return_codes
sensitive new_resource.sensitive_exec
action :run
end
end
def wsadmin_exec_file(label, cmd, return_codes = [0], bin_directory = new_resource.bin_dir)
wsadmin_cmd = './wsadmin.sh -lang jython -conntype SOAP '
wsadmin_cmd << "-host #{new_resource.dmgr_host} " if new_resource.dmgr_host
wsadmin_cmd << "-port #{new_resource.dmgr_port} " if new_resource.dmgr_port
wsadmin_cmd << "-user #{new_resource.admin_user} -password #{new_resource.admin_password} " if new_resource.admin_user && new_resource.admin_password
wsadmin_cmd << "-f #{cmd}"
Chef::Log.debug("wsadmin_exec running cmd: #{wsadmin_cmd}")
execute "wsadmin #{label}" do
cwd bin_directory
user new_resource.run_user
command wsadmin_cmd
returns return_codes
sensitive new_resource.sensitive_exec
action :run
end
end
def save_config
cmd = 'AdminConfig.save()'
wsadmin_exec('save config', cmd)
end
def start_server(nde_name, serv_name, return_codes = [0, 103])
cmd = "AdminServerManagement.startSingleServer('#{nde_name}', '#{serv_name}')"
wsadmin_exec("start server: #{serv_name} on node #{nde_name}", cmd, return_codes)
end
def start_all_servers(nde_name)
cmd = "AdminServerManagement.startAllServers('#{nde_name}')"
wsadmin_exec("start all servers on node #{nde_name}", cmd, [0, 103])
end
def stop_server(nde_name, serv_name)
cmd = "AdminServerManagement.stopSingleServer('#{nde_name}', '#{serv_name}')"
wsadmin_exec("stop server: #{serv_name} on node #{nde_name}", cmd, [0, 103])
end
def stop_all_servers(nde_name)
cmd = "AdminServerManagement.stopAllServers('#{nde_name}')"
wsadmin_exec("stopping all servers on node #{nde_name}", cmd, [0, 103])
end
def delete_server(nde_name, serv_name)
cmd = "AdminServerManagement.deleteServer('#{nde_name}', '#{serv_name}')"
wsadmin_exec("delete server: #{serv_name} on node #{nde_name}", cmd)
end
def stop_node(nde_name)
cmd = "AdminNodeManagement.stopNode('#{nde_name}')"
wsadmin_exec("stop node: #{nde_name}", cmd, [0, 103, 1])
end
def stop_node_agent(nde_name)
cmd = "AdminNodeManagement.stopNode('#{nde_name}')"
wsadmin_exec("stop node agent: #{nde_name}", cmd, [0, 103, 1])
end
# deletes a cluster member server
# there's no need to cleanup the server's orhpaned directory as it is removed lazily when
# another server with the same name is created again.
def delete_cluster_member(clus_name, member_name, member_node, delete_replicator)
cmd = "AdminTask.deleteClusterMember('[-clusterName #{clus_name} -memberNode #{member_node} "\
"-memberName #{member_name} -replicatorEntry [-deleteEntry #{delete_replicator}]]')"
wsadmin_exec("delete member: #{member_name} from cluster: #{clus_name}", cmd)
end
def delete_cluster(clus_name, repl_domain = false)
cmd = "AdminTask.deleteCluster('[-clusterName #{clus_name} -replicationDomain [-deleteRepDomain #{repl_domain}]]')"
wsadmin_exec("delete cluster: #{clus_name}", cmd)
end
# returns the object id from a specified match string. matchen eg /Cell:MyNewCell/
# eg result: 'MyNewCell(cells/MyNewCell|cell.xml#Cell_1)'
# returns nil if error or no result.
def get_id(matcher, bin_directory = new_resource.bin_dir)
cmd = "-c \"AdminConfig.getid('#{matcher}')\""
mycmd = wsadmin_returns(cmd, bin_directory)
return nil if mycmd.error?
str = mycmd.stdout.match(/\n'(.*?)'\n/) # match everything between \n'capture this'\n
return nil if str.nil? || str.captures.first == ''
str.captures.first
end
# returns the top level attributes object id for a given object id.
# eg object_id 'MyNewCell(cells/MyNewCell|cell.xml#Cell_1)'
# eg returns '(cells/MyNewCell|cell.xml#MonitoredDirectoryDeployment_1)'
def get_attribute_id(object_id, attr_name, bin_directory = new_resource.bin_dir)
cmd = "-c \"AdminConfig.showAttribute('#{object_id}', '#{attr_name}')\""
mycmd = wsadmin_returns(cmd, bin_directory)
return nil if mycmd.error?
str = mycmd.stdout.match(/\n'(.*?)'\n/) # match everything between \n'capture this'\n
return nil if str.nil?
str.captures.first
end
# modifies the specified attribues associated with a given config_id
# attr_key_val_list is a string with key, value pairs eg: "[['userID', 'newID'], ['password', 'newPW']]"
def modify_object(config_id, attr_key_val_list, _bin_directory = new_resource.bin_dir)
cmd = "AdminConfig.modify('#{config_id}', #{attr_key_val_list})"
Chef::Log.debug("Modifying #{config_id}' to #{attr_key_val_list}")
wsadmin_exec("modify config_id: #{config_id}", cmd)
save_config
end
# updates attributes of an object.
# attributes needs to be a nested hash of attributes and the names o their parents
# eg. "processDefinitions" => {
# "monitoringPolicy" => {
# "newvalue" => "[['nodeRestartState', 'RUNNING']]" }}
# parent should be the parent object id to the hash.
# eg. AppServer12_server(cells/MyNewCell/nodes/AppServer12_node/servers/AppServer12_server|server.xml#Server_1183122130078)
def update_attributes(attributes, parent)
attributes.each do |key, val|
if val.is_a?(Hash)
parent_attr_id = get_attribute_id(parent, key)
# convert to an array and iterate over it if needed.
if str_array?(parent_attr_id)
parent_attrs = str_to_array(parent_attr_id)
parent_attrs.each do |p|
update_attributes(val, p)
end
else
update_attributes(val, parent_attr_id)
end
else
Chef::Log.debug("Setting attributes on #{parent}")
modify_object(parent, val)
end
end
end
# deploys an application to a cluster. app_file can be an .ear, .war or .jar
# app_file is the full path to the application file.
# if you need to map the app to a virtual host use map_web_mod_to_vh
# eg. map_web_mod_to_vh=[['.*', '.*', 'default_host']]
def deploy_to_cluster(appl_name, appl_file, clus_name, map_web_mod_to_vh = nil)
cmd = "AdminApp.install('#{appl_file}', ['-appname', '#{appl_name}', '-cluster', '#{clus_name}'"
cmd << ", '-MapWebModToVH', [['.*', '.*', 'default_host']]" if map_web_mod_to_vh
cmd << '])'
wsadmin_exec("deploy #{appl_file} to cluster #{clus_name}", cmd)
save_config
end
# Retrieve those cluster templates already defined for a cluster
def get_cluster_templates(clus_name)
cmd = "-c \"AdminTask.listClusterMemberTemplates('[-clusterName #{clus_name}]')\""
mycmd = wsadmin_returns(cmd)
# example output:'V7MemberTemplate(templates/clusters/sample-cluster/servers/V7MemberTemplate|server.xml#Server_1506097558200)\nV7MemberTemplate0(templates/clusters/sample-cluster/servers/V7MemberTemplate0|server.xml#Server_1508144541044)'
was_templates = []
return was_templates if mycmd.error?
str = mycmd.stdout.chomp.match(/'(.*?)'/) # get only what's between ''.
return was_templates if str.nil?
str_match = str.captures.first
# get the first match only, removing outer brackets ['']
raw_servers = str_match.split("\n")
raw_servers.each do |s|
was_template = s.split('(')[0]
was_templates << was_template
end
Chef::Log.debug("get_cluster_templates() result #{was_templates}")
was_templates
end
end
end
end
| 43.325137 | 247 | 0.633979 |
03ff259c788ed78696f7fb8dc94f84623fda8a52 | 11,675 | module SlackGamebot
module Commands
class Set < SlackRubyBot::Commands::Base
include SlackGamebot::Commands::Mixins::Subscription
class << self
def set_nickname(client, data, user, v)
target_user = user
slack_mention = v.split.first if v
if v && User.slack_mention?(slack_mention)
raise SlackGamebot::Error, "You're not a captain, sorry." unless user.captain?
target_user = ::User.find_by_slack_mention!(client, slack_mention)
v = v[slack_mention.length + 1..-1]
end
target_user.update_attributes!(nickname: v) unless v.nil?
if target_user.nickname.blank?
client.say(channel: data.channel, text: "You don't have a nickname set, #{target_user.user_name}.", gif: 'anonymous')
logger.info "SET: #{client.owner} - #{user.user_name}: nickname #{target_user == user ? '' : ' for ' + target_user.user_name}is not set"
else
client.say(channel: data.channel, text: "Your nickname is #{v.nil? ? '' : 'now '}*#{target_user.nickname}*, #{target_user.slack_mention}.", gif: 'name')
logger.info "SET: #{client.owner} - #{user.user_name} nickname #{target_user == user ? '' : ' for ' + target_user.user_name}is #{target_user.nickname}"
end
end
def unset_nickname(client, data, user, v)
target_user = user
slack_mention = v.split.first if v
if User.slack_mention?(slack_mention)
raise SlackGamebot::Error, "You're not a captain, sorry." unless user.captain?
target_user = ::User.find_by_slack_mention!(client, slack_mention)
end
old_nickname = target_user.nickname
target_user.update_attributes!(nickname: nil)
client.say(channel: data.channel, text: "You don't have a nickname set#{old_nickname.blank? ? '' : ' anymore'}, #{target_user.slack_mention}.", gif: 'anonymous')
logger.info "UNSET: #{client.owner} - #{user.user_name}: nickname #{target_user == user ? '' : ' for ' + target_user.user_name} was #{old_nickname.blank? ? 'not ' : 'un'}set"
end
def set_gifs(client, data, user, v)
raise SlackGamebot::Error, "You're not a captain, sorry." unless v.nil? || user.captain?
unless v.nil?
client.owner.update_attributes!(gifs: v.to_b)
client.send_gifs = client.owner.gifs
end
client.say(channel: data.channel, text: "GIFs for team #{client.owner.name} are #{client.owner.gifs? ? 'on!' : 'off.'}", gif: 'fun')
logger.info "SET: #{client.owner} - #{user.user_name} GIFs are #{client.owner.gifs? ? 'on' : 'off'}"
end
def unset_gifs(client, data, user)
raise SlackGamebot::Error, "You're not a captain, sorry." unless user.captain?
client.owner.update_attributes!(gifs: false)
client.send_gifs = client.owner.gifs
client.say(channel: data.channel, text: "GIFs for team #{client.owner.name} are off.", gif: 'fun')
logger.info "UNSET: #{client.owner} - #{user.user_name} GIFs are off"
end
def set_unbalanced(client, data, user, v)
raise SlackGamebot::Error, "You're not a captain, sorry." unless v.nil? || user.captain?
client.owner.update_attributes!(unbalanced: v.to_b) unless v.nil?
client.say(channel: data.channel, text: "Unbalanced challenges for team #{client.owner.name} are #{client.owner.unbalanced? ? 'on!' : 'off.'}", gif: 'balance')
logger.info "SET: #{client.owner} - #{user.user_name} unbalanced challenges are #{client.owner.unbalanced? ? 'on' : 'off'}"
end
def unset_unbalanced(client, data, user)
raise SlackGamebot::Error, "You're not a captain, sorry." unless user.captain?
client.owner.update_attributes!(unbalanced: false)
client.say(channel: data.channel, text: "Unbalanced challenges for team #{client.owner.name} are off.", gif: 'balance')
logger.info "UNSET: #{client.owner} - #{user.user_name} unbalanced challenges are off"
end
def set_api(client, data, user, v)
raise SlackGamebot::Error, "You're not a captain, sorry." unless v.nil? || user.captain?
client.owner.update_attributes!(api: v.to_b) unless v.nil?
message = [
"API for team #{client.owner.name} is #{client.owner.api? ? 'on!' : 'off.'}",
client.owner.api_url
].compact.join("\n")
client.say(channel: data.channel, text: message, gif: 'programmer')
logger.info "SET: #{client.owner} - #{user.user_name} API is #{client.owner.api? ? 'on' : 'off'}"
end
def unset_api(client, data, user)
raise SlackGamebot::Error, "You're not a captain, sorry." unless user.captain?
client.owner.update_attributes!(api: false)
client.say(channel: data.channel, text: "API for team #{client.owner.name} is off.", gif: 'programmer')
logger.info "UNSET: #{client.owner} - #{user.user_name} API is off"
end
def set_elo(client, data, user, v)
raise SlackGamebot::Error, "You're not a captain, sorry." unless v.nil? || user.captain?
client.owner.update_attributes!(elo: parse_int(v)) unless v.nil?
message = "Base elo for team #{client.owner.name} is #{client.owner.elo}."
client.say(channel: data.channel, text: message, gif: 'score')
logger.info "SET: #{client.owner} - #{user.user_name} ELO is #{client.owner.elo}"
end
def unset_elo(client, data, user)
raise SlackGamebot::Error, "You're not a captain, sorry." unless user.captain?
client.owner.update_attributes!(elo: 0)
client.say(channel: data.channel, text: "Base elo for team #{client.owner.name} has been unset.", gif: 'score')
logger.info "UNSET: #{client.owner} - #{user.user_name} ELO has been unset"
end
def set_leaderboard_max(client, data, user, v)
raise SlackGamebot::Error, "You're not a captain, sorry." unless v.nil? || user.captain?
unless v.nil?
v = parse_int_with_inifinity(v)
client.owner.update_attributes!(leaderboard_max: v && v != 0 ? v : nil)
end
message = "Leaderboard max for team #{client.owner.name} is #{client.owner.leaderboard_max || 'not set'}."
client.say(channel: data.channel, text: message, gif: 'count')
logger.info "SET: #{client.owner} - #{user.user_name} LEADERBOARD MAX is #{client.owner.leaderboard_max}"
end
def unset_leaderboard_max(client, data, user)
raise SlackGamebot::Error, "You're not a captain, sorry." unless user.captain?
client.owner.update_attributes!(leaderboard_max: nil)
client.say(channel: data.channel, text: "Leaderboard max for team #{client.owner.name} has been unset.", gif: 'score')
logger.info "UNSET: #{client.owner} - #{user.user_name} LEADERBOARD MAX has been unset"
end
def set_aliases(client, data, user, v)
raise SlackGamebot::Error, "You're not a captain, sorry." unless v.nil? || user.captain?
unless v.nil?
client.owner.update_attributes!(aliases: v.split(/[\s,;]+/))
client.aliases = client.owner.aliases
end
if client.owner.aliases.any?
client.say(channel: data.channel, text: "Bot aliases for team #{client.owner.name} are #{client.owner.aliases.and}.", gif: 'name')
logger.info "SET: #{client.owner} - #{user.user_name} Bot aliases are #{client.owner.aliases.and}"
else
client.say(channel: data.channel, text: "Team #{client.owner.name} does not have any bot aliases.", gif: 'name')
logger.info "SET: #{client.owner} - #{user.user_name}, does not have any bot aliases"
end
end
def unset_aliases(client, data, user)
raise SlackGamebot::Error, "You're not a captain, sorry." unless user.captain?
client.owner.update_attributes!(aliases: [])
client.aliases = []
client.say(channel: data.channel, text: "Team #{client.owner.name} no longer has bot aliases.", gif: 'name')
logger.info "UNSET: #{client.owner} - #{user.user_name} no longer has bot aliases"
end
def parse_int_with_inifinity(v)
v == 'infinity' ? nil : parse_int(v)
end
def parse_int(v)
Integer(v)
rescue StandardError
raise SlackGamebot::Error, "Sorry, #{v} is not a valid number."
end
def set(client, data, user, k, v)
case k
when 'nickname' then
set_nickname client, data, user, v
when 'gifs' then
set_gifs client, data, user, v
when 'leaderboard' then
k, v = v.split(/[\s]+/, 2) if v
case k
when 'max' then
set_leaderboard_max client, data, user, v
else
raise SlackGamebot::Error, "Invalid leaderboard setting #{k}, you can _set leaderboard max_."
end
when 'unbalanced' then
set_unbalanced client, data, user, v
when 'api' then
set_api client, data, user, v
when 'elo' then
set_elo client, data, user, v
when 'aliases' then
set_aliases client, data, user, v
else
raise SlackGamebot::Error, "Invalid setting #{k}, you can _set gifs on|off_, _set unbalanced on|off_, _api on|off_, _leaderboard max_, _elo_, _nickname_ and _aliases_."
end
end
def unset(client, data, user, k, v)
case k
when 'nickname' then
unset_nickname client, data, user, v
when 'gifs' then
unset_gifs client, data, user
when 'leaderboard' then
case v
when 'max' then
unset_leaderboard_max client, data, user
else
raise SlackGamebot::Error, "Invalid leaderboard setting #{v}, you can _unset leaderboard max_."
end
when 'unbalanced' then
unset_unbalanced client, data, user
when 'api' then
unset_api client, data, user
when 'elo' then
unset_elo client, data, user
when 'aliases' then
unset_aliases client, data, user
else
raise SlackGamebot::Error, "Invalid setting #{k}, you can _unset gifs_, _api_, _leaderboard max_, _elo_, _nickname_ and _aliases_."
end
end
end
subscribed_command 'set' do |client, data, match|
user = ::User.find_create_or_update_by_slack_id!(client, data.user)
if !match['expression']
client.say(channel: data.channel, text: 'Missing setting, eg. _set gifs off_.', gif: 'help')
logger.info "SET: #{client.owner} - #{user.user_name}, failed, missing setting"
else
k, v = match['expression'].split(/[\s]+/, 2)
set client, data, user, k, v
end
end
subscribed_command 'unset' do |client, data, match|
user = ::User.find_create_or_update_by_slack_id!(client, data.user)
if !match['expression']
client.say(channel: data.channel, text: 'Missing setting, eg. _unset gifs_.', gif: 'help')
logger.info "UNSET: #{client.owner} - #{user.user_name}, failed, missing setting"
else
k, v = match['expression'].split(/[\s]+/, 2)
unset client, data, user, k, v
end
end
end
end
end
| 47.45935 | 184 | 0.603084 |
280676df2476dabde401b11b17c59ba847d93a79 | 5,220 | #
# Author:: Adam Jacob (<[email protected]>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'pathname'
require 'chef/provider/user'
class Chef
class Provider
class User
class Useradd < Chef::Provider::User
UNIVERSAL_OPTIONS = [[:comment, "-c"], [:gid, "-g"], [:password, "-p"], [:shell, "-s"], [:uid, "-u"]]
def create_user
command = compile_command("useradd") do |useradd|
useradd << universal_options
useradd << useradd_options
end
run_command(:command => command)
end
def manage_user
if universal_options != ""
command = compile_command("usermod") do |u|
u << universal_options
end
run_command(:command => command)
end
end
def remove_user
command = "userdel"
command << " -r" if managing_home_dir?
command << " #{@new_resource.username}"
run_command(:command => command)
end
def check_lock
status = popen4("passwd -S #{@new_resource.username}") do |pid, stdin, stdout, stderr|
status_line = stdout.gets.split(' ')
case status_line[1]
when /^P/
@locked = false
when /^N/
@locked = false
when /^L/
@locked = true
end
end
unless status.exitstatus == 0
raise_lock_error = false
# we can get an exit code of 1 even when it's successful on rhel/centos (redhat bug 578534)
if status.exitstatus == 1 && ['redhat', 'centos'].include?(node[:platform])
passwd_version_status = popen4('rpm -q passwd') do |pid, stdin, stdout, stderr|
passwd_version = stdout.gets.chomp
unless passwd_version == 'passwd-0.73-1'
raise_lock_error = true
end
end
else
raise_lock_error = true
end
raise Chef::Exceptions::User, "Cannot determine if #{@new_resource} is locked!" if raise_lock_error
end
@locked
end
def lock_user
run_command(:command => "usermod -L #{@new_resource.username}")
end
def unlock_user
run_command(:command => "usermod -U #{@new_resource.username}")
end
def compile_command(base_command)
yield base_command
base_command << " #{@new_resource.username}"
base_command
end
def universal_options
@universal_options ||=
begin
opts = ''
# magic allows UNIVERSAL_OPTIONS to be overridden in a subclass
self.class::UNIVERSAL_OPTIONS.each do |field, option|
update_options(field, option, opts)
end
if updating_home?
if managing_home_dir?
Chef::Log.debug("#{@new_resource} managing the users home directory")
opts << " -m -d '#{@new_resource.home}'"
else
Chef::Log.debug("#{@new_resource} setting home to #{@new_resource.home}")
opts << " -d '#{@new_resource.home}'"
end
end
opts << " -o" if @new_resource.non_unique || @new_resource.supports[:non_unique]
opts
end
end
def update_options(field, option, opts)
if @current_resource.send(field) != @new_resource.send(field)
if @new_resource.send(field)
Chef::Log.debug("#{@new_resource} setting #{field} to #{@new_resource.send(field)}")
opts << " #{option} '#{@new_resource.send(field)}'"
end
end
end
def useradd_options
opts = ''
opts << " -r" if @new_resource.system
opts
end
def updating_home?
# will return false if paths are equivalent
# Pathname#cleanpath does a better job than ::File::expand_path (on both unix and windows)
# ::File.expand_path("///tmp") == ::File.expand_path("/tmp") => false
# ::File.expand_path("\\tmp") => "C:/tmp"
return true if @current_resource.home.nil? && @new_resource.home
@new_resource.home and Pathname.new(@current_resource.home).cleanpath != Pathname.new(@new_resource.home).cleanpath
end
def managing_home_dir?
@new_resource.manage_home || @new_resource.supports[:manage_home]
end
end
end
end
end
| 33.896104 | 125 | 0.561877 |
38a9ce361dd3c5a63345b527a424f547bfb31823 | 16,355 | module ActiveSupport
module NumberHelper
extend ActiveSupport::Autoload
eager_autoload do
autoload :NumberConverter
autoload :NumberToRoundedConverter
autoload :NumberToDelimitedConverter
autoload :NumberToHumanConverter
autoload :NumberToHumanSizeConverter
autoload :NumberToPhoneConverter
autoload :NumberToCurrencyConverter
autoload :NumberToPercentageConverter
end
extend self
# Formats a +number+ into a US phone number (e.g., (555)
# 123-9876). You can customize the format in the +options+ hash.
#
# ==== Options
#
# * <tt>:area_code</tt> - Adds parentheses around the area code.
# * <tt>:delimiter</tt> - Specifies the delimiter to use
# (defaults to "-").
# * <tt>:extension</tt> - Specifies an extension to add to the
# end of the generated number.
# * <tt>:country_code</tt> - Sets the country code for the phone
# number.
# ==== Examples
#
# number_to_phone(5551234) # => 555-1234
# number_to_phone('5551234') # => 555-1234
# number_to_phone(1235551234) # => 123-555-1234
# number_to_phone(1235551234, area_code: true) # => (123) 555-1234
# number_to_phone(1235551234, delimiter: ' ') # => 123 555 1234
# number_to_phone(1235551234, area_code: true, extension: 555) # => (123) 555-1234 x 555
# number_to_phone(1235551234, country_code: 1) # => +1-123-555-1234
# number_to_phone('123a456') # => 123a456
#
# number_to_phone(1235551234, country_code: 1, extension: 1343, delimiter: '.')
# # => +1.123.555.1234 x 1343
def number_to_phone(number, options = {})
NumberToPhoneConverter.convert(number, options)
end
# Formats a +number+ into a currency string (e.g., $13.65). You
# can customize the format in the +options+ hash.
#
# ==== Options
#
# * <tt>:locale</tt> - Sets the locale to be used for formatting
# (defaults to current locale).
# * <tt>:precision</tt> - Sets the level of precision (defaults
# to 2).
# * <tt>:unit</tt> - Sets the denomination of the currency
# (defaults to "$").
# * <tt>:separator</tt> - Sets the separator between the units
# (defaults to ".").
# * <tt>:delimiter</tt> - Sets the thousands delimiter (defaults
# to ",").
# * <tt>:format</tt> - Sets the format for non-negative numbers
# (defaults to "%u%n"). Fields are <tt>%u</tt> for the
# currency, and <tt>%n</tt> for the number.
# * <tt>:negative_format</tt> - Sets the format for negative
# numbers (defaults to prepending an hyphen to the formatted
# number given by <tt>:format</tt>). Accepts the same fields
# than <tt>:format</tt>, except <tt>%n</tt> is here the
# absolute value of the number.
#
# ==== Examples
#
# number_to_currency(1234567890.50) # => $1,234,567,890.50
# number_to_currency(1234567890.506) # => $1,234,567,890.51
# number_to_currency(1234567890.506, precision: 3) # => $1,234,567,890.506
# number_to_currency(1234567890.506, locale: :fr) # => 1 234 567 890,51 €
# number_to_currency('123a456') # => $123a456
#
# number_to_currency(-1234567890.50, negative_format: '(%u%n)')
# # => ($1,234,567,890.50)
# number_to_currency(1234567890.50, unit: '£', separator: ',', delimiter: '')
# # => £1234567890,50
# number_to_currency(1234567890.50, unit: '£', separator: ',', delimiter: '', format: '%n %u')
# # => 1234567890,50 £
def number_to_currency(number, options = {})
NumberToCurrencyConverter.convert(number, options)
end
# Formats a +number+ as a percentage string (e.g., 65%). You can
# customize the format in the +options+ hash.
#
# ==== Options
#
# * <tt>:locale</tt> - Sets the locale to be used for formatting
# (defaults to current locale).
# * <tt>:precision</tt> - Sets the precision of the number
# (defaults to 3). Keeps the number's precision if nil.
# * <tt>:significant</tt> - If +true+, precision will be the number
# of significant_digits. If +false+, the number of fractional
# digits (defaults to +false+).
# * <tt>:separator</tt> - Sets the separator between the
# fractional and integer digits (defaults to ".").
# * <tt>:delimiter</tt> - Sets the thousands delimiter (defaults
# to "").
# * <tt>:strip_insignificant_zeros</tt> - If +true+ removes
# insignificant zeros after the decimal separator (defaults to
# +false+).
# * <tt>:format</tt> - Specifies the format of the percentage
# string The number field is <tt>%n</tt> (defaults to "%n%").
#
# ==== Examples
#
# number_to_percentage(100) # => 100.000%
# number_to_percentage('98') # => 98.000%
# number_to_percentage(100, precision: 0) # => 100%
# number_to_percentage(1000, delimiter: '.', separator: ',') # => 1.000,000%
# number_to_percentage(302.24398923423, precision: 5) # => 302.24399%
# number_to_percentage(1000, locale: :fr) # => 1 000,000%
# number_to_percentage:(1000, precision: nil) # => 1000%
# number_to_percentage('98a') # => 98a%
# number_to_percentage(100, format: '%n %') # => 100 %
def number_to_percentage(number, options = {})
NumberToPercentageConverter.convert(number, options)
end
# Formats a +number+ with grouped thousands using +delimiter+
# (e.g., 12,324). You can customize the format in the +options+
# hash.
#
# ==== Options
#
# * <tt>:locale</tt> - Sets the locale to be used for formatting
# (defaults to current locale).
# * <tt>:delimiter</tt> - Sets the thousands delimiter (defaults
# to ",").
# * <tt>:separator</tt> - Sets the separator between the
# fractional and integer digits (defaults to ".").
#
# ==== Examples
#
# number_to_delimited(12345678) # => 12,345,678
# number_to_delimited('123456') # => 123,456
# number_to_delimited(12345678.05) # => 12,345,678.05
# number_to_delimited(12345678, delimiter: '.') # => 12.345.678
# number_to_delimited(12345678, delimiter: ',') # => 12,345,678
# number_to_delimited(12345678.05, separator: ' ') # => 12,345,678 05
# number_to_delimited(12345678.05, locale: :fr) # => 12 345 678,05
# number_to_delimited('112a') # => 112a
# number_to_delimited(98765432.98, delimiter: ' ', separator: ',')
# # => 98 765 432,98
def number_to_delimited(number, options = {})
NumberToDelimitedConverter.convert(number, options)
end
# Formats a +number+ with the specified level of
# <tt>:precision</tt> (e.g., 112.32 has a precision of 2 if
# +:significant+ is +false+, and 5 if +:significant+ is +true+).
# You can customize the format in the +options+ hash.
#
# ==== Options
#
# * <tt>:locale</tt> - Sets the locale to be used for formatting
# (defaults to current locale).
# * <tt>:precision</tt> - Sets the precision of the number
# (defaults to 3). Keeps the number's precision if nil.
# * <tt>:significant</tt> - If +true+, precision will be the number
# of significant_digits. If +false+, the number of fractional
# digits (defaults to +false+).
# * <tt>:separator</tt> - Sets the separator between the
# fractional and integer digits (defaults to ".").
# * <tt>:delimiter</tt> - Sets the thousands delimiter (defaults
# to "").
# * <tt>:strip_insignificant_zeros</tt> - If +true+ removes
# insignificant zeros after the decimal separator (defaults to
# +false+).
#
# ==== Examples
#
# number_to_rounded(111.2345) # => 111.235
# number_to_rounded(111.2345, precision: 2) # => 111.23
# number_to_rounded(13, precision: 5) # => 13.00000
# number_to_rounded(389.32314, precision: 0) # => 389
# number_to_rounded(111.2345, significant: true) # => 111
# number_to_rounded(111.2345, precision: 1, significant: true) # => 100
# number_to_rounded(13, precision: 5, significant: true) # => 13.000
# number_to_rounded(13, precision: nil) # => 13
# number_to_rounded(111.234, locale: :fr) # => 111,234
#
# number_to_rounded(13, precision: 5, significant: true, strip_insignificant_zeros: true)
# # => 13
#
# number_to_rounded(389.32314, precision: 4, significant: true) # => 389.3
# number_to_rounded(1111.2345, precision: 2, separator: ',', delimiter: '.')
# # => 1.111,23
def number_to_rounded(number, options = {})
NumberToRoundedConverter.convert(number, options)
end
# Formats the bytes in +number+ into a more understandable
# representation (e.g., giving it 1500 yields 1.5 KB). This
# method is useful for reporting file sizes to users. You can
# customize the format in the +options+ hash.
#
# See <tt>number_to_human</tt> if you want to pretty-print a
# generic number.
#
# ==== Options
#
# * <tt>:locale</tt> - Sets the locale to be used for formatting
# (defaults to current locale).
# * <tt>:precision</tt> - Sets the precision of the number
# (defaults to 3).
# * <tt>:significant</tt> - If +true+, precision will be the number
# of significant_digits. If +false+, the number of fractional
# digits (defaults to +true+)
# * <tt>:separator</tt> - Sets the separator between the
# fractional and integer digits (defaults to ".").
# * <tt>:delimiter</tt> - Sets the thousands delimiter (defaults
# to "").
# * <tt>:strip_insignificant_zeros</tt> - If +true+ removes
# insignificant zeros after the decimal separator (defaults to
# +true+)
#
# ==== Examples
#
# number_to_human_size(123) # => 123 Bytes
# number_to_human_size(1234) # => 1.21 KB
# number_to_human_size(12345) # => 12.1 KB
# number_to_human_size(1234567) # => 1.18 MB
# number_to_human_size(1234567890) # => 1.15 GB
# number_to_human_size(1234567890123) # => 1.12 TB
# number_to_human_size(1234567, precision: 2) # => 1.2 MB
# number_to_human_size(483989, precision: 2) # => 470 KB
# number_to_human_size(1234567, precision: 2, separator: ',') # => 1,2 MB
# number_to_human_size(1234567890123, precision: 5) # => "1.1228 TB"
# number_to_human_size(524288000, precision: 5) # => "500 MB"
def number_to_human_size(number, options = {})
NumberToHumanSizeConverter.convert(number, options)
end
# Pretty prints (formats and approximates) a number in a way it
# is more readable by humans (eg.: 1200000000 becomes "1.2
# Billion"). This is useful for numbers that can get very large
# (and too hard to read).
#
# See <tt>number_to_human_size</tt> if you want to print a file
# size.
#
# You can also define your own unit-quantifier names if you want
# to use other decimal units (eg.: 1500 becomes "1.5
# kilometers", 0.150 becomes "150 milliliters", etc). You may
# define a wide range of unit quantifiers, even fractional ones
# (centi, deci, mili, etc).
#
# ==== Options
#
# * <tt>:locale</tt> - Sets the locale to be used for formatting
# (defaults to current locale).
# * <tt>:precision</tt> - Sets the precision of the number
# (defaults to 3).
# * <tt>:significant</tt> - If +true+, precision will be the number
# of significant_digits. If +false+, the number of fractional
# digits (defaults to +true+)
# * <tt>:separator</tt> - Sets the separator between the
# fractional and integer digits (defaults to ".").
# * <tt>:delimiter</tt> - Sets the thousands delimiter (defaults
# to "").
# * <tt>:strip_insignificant_zeros</tt> - If +true+ removes
# insignificant zeros after the decimal separator (defaults to
# +true+)
# * <tt>:units</tt> - A Hash of unit quantifier names. Or a
# string containing an i18n scope where to find this hash. It
# might have the following keys:
# * *integers*: <tt>:unit</tt>, <tt>:ten</tt>,
# <tt>:hundred</tt>, <tt>:thousand</tt>, <tt>:million</tt>,
# <tt>:billion</tt>, <tt>:trillion</tt>,
# <tt>:quadrillion</tt>
# * *fractionals*: <tt>:deci</tt>, <tt>:centi</tt>,
# <tt>:mili</tt>, <tt>:micro</tt>, <tt>:nano</tt>,
# <tt>:pico</tt>, <tt>:femto</tt>
# * <tt>:format</tt> - Sets the format of the output string
# (defaults to "%n %u"). The field types are:
# * %u - The quantifier (ex.: 'thousand')
# * %n - The number
#
# ==== Examples
#
# number_to_human(123) # => "123"
# number_to_human(1234) # => "1.23 Thousand"
# number_to_human(12345) # => "12.3 Thousand"
# number_to_human(1234567) # => "1.23 Million"
# number_to_human(1234567890) # => "1.23 Billion"
# number_to_human(1234567890123) # => "1.23 Trillion"
# number_to_human(1234567890123456) # => "1.23 Quadrillion"
# number_to_human(1234567890123456789) # => "1230 Quadrillion"
# number_to_human(489939, precision: 2) # => "490 Thousand"
# number_to_human(489939, precision: 4) # => "489.9 Thousand"
# number_to_human(1234567, precision: 4,
# significant: false) # => "1.2346 Million"
# number_to_human(1234567, precision: 1,
# separator: ',',
# significant: false) # => "1,2 Million"
#
# number_to_human(500000000, precision: 5) # => "500 Million"
# number_to_human(12345012345, significant: false) # => "12.345 Billion"
#
# Non-significant zeros after the decimal separator are stripped
# out by default (set <tt>:strip_insignificant_zeros</tt> to
# +false+ to change that):
#
# number_to_human(12.00001) # => "12"
# number_to_human(12.00001, strip_insignificant_zeros: false) # => "12.0"
#
# ==== Custom Unit Quantifiers
#
# You can also use your own custom unit quantifiers:
# number_to_human(500000, units: { unit: 'ml', thousand: 'lt' }) # => "500 lt"
#
# If in your I18n locale you have:
#
# distance:
# centi:
# one: "centimeter"
# other: "centimeters"
# unit:
# one: "meter"
# other: "meters"
# thousand:
# one: "kilometer"
# other: "kilometers"
# billion: "gazillion-distance"
#
# Then you could do:
#
# number_to_human(543934, units: :distance) # => "544 kilometers"
# number_to_human(54393498, units: :distance) # => "54400 kilometers"
# number_to_human(54393498000, units: :distance) # => "54.4 gazillion-distance"
# number_to_human(343, units: :distance, precision: 1) # => "300 meters"
# number_to_human(1, units: :distance) # => "1 meter"
# number_to_human(0.34, units: :distance) # => "34 centimeters"
def number_to_human(number, options = {})
NumberToHumanConverter.convert(number, options)
end
end
end
| 47.268786 | 106 | 0.568389 |
916c205642bf5bbda5b8f94d5368bd7ae3f9cac2 | 62,886 | require 'pedant/rspec/common'
require 'pedant/acl'
describe "ACL API", :acl do
include Pedant::ACL
# Generate random string identifier prefixed with current pid
def rand_id
"#{Process.pid}_#{rand(10**7...10**8).to_s}"
end
# (temporarily?) deprecating /users/*/_acl endpoint due to its broken state and lack of usefulness
skip "/users/<name>/_acl endpoint" do
let(:username) { platform.admin_user.name }
let(:request_url) { "#{platform.server}/users/#{username}/_acl" }
let(:read_access_group) { platform.test_org.name + "_read_access_group"}
let(:read_groups) { [read_access_group] }
context "GET /users/<user>/_acl" do
let(:actors) { ["pivotal", username].uniq }
let(:groups) { [] }
let(:acl_body) {{
"create" => {"actors" => actors, "groups" => groups},
"read" => {"actors" => actors, "groups" => read_groups},
"update" => {"actors" => actors, "groups" => groups},
"delete" => {"actors" => actors, "groups" => groups},
"grant" => {"actors" => actors, "groups" => groups}
}}
context "superuser" do
it "can get user acl" do
get(request_url, platform.superuser).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "admin user" do
it "can get user acl" do
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
end
%w(create read update delete grant).each do |permission|
context "/users/<user>/_acl/#{permission} endpoint" do
if (permission == "read")
smoketest = :smoke
else
smoketest = :notsmoke
end
let(:acl_url) { "#{platform.server}/users/#{username}/_acl" }
let(:request_url) { "#{platform.server}/users/#{username}/_acl/#{permission}" }
context "PUT /users/<user>/_acl/#{permission}" do
let(:actors) { ["pivotal", username].uniq }
let(:groups) { [] }
let(:default_body) {{
"create" => {"actors" => actors, "groups" => groups},
"read" => {"actors" => actors, "groups" => read_groups},
"update" => {"actors" => actors, "groups" => groups},
"delete" => {"actors" => actors, "groups" => groups},
"grant" => {"actors" => actors, "groups" => groups}
}}
let(:request_body) {{
permission => {
"actors" => ["pivotal", platform.admin_user.name,
platform.non_admin_user.name],
"groups" => groups
}
}}
after :each do
reset_body = {permission => default_body[permission]}
put(request_url, platform.admin_user,
:payload => reset_body).should look_like({
:status => 200
})
# Make sure everything's the same again -- this could really screw up the
# rest of the test suite if the permissions aren't right
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
context "admin user", smoketest do
it "can modify ACL" do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 200
})
modified_body = default_body.dup;
modified_body[permission] = request_body[permission]
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => modified_body
})
end
end
context "default normal user", smoketest do
it "returns 403", :authorization do
put(request_url, platform.non_admin_user,
:payload => request_body).should look_like({
:status => 403
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "default normal client" do
it "returns 401", :authentication do
put(request_url, platform.non_admin_client,
:payload => request_body).should look_like({
:status => 401
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "outside user" do
it "returns 403", :authorization do
put(request_url, outside_user,
:payload => request_body).should look_like({
:status => 403
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "invalid user" do
it "returns 401", :authentication do
put(request_url, invalid_user,
:payload => request_body).should look_like({
:status => 401
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
#
# Nonexistent users are just dropped (perhaps this should be a 400, to match
# organizations/<object>/_acl
context "malformed requests" do
context "invalid actor" do
let(:request_body) {{
permission => {
"actors" => ["pivotal", "bogus", platform.admin_user.name],
"groups" => permission == "read" ? read_groups : groups
}
}}
it "returns 200" do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 200
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "invalid group", :validation do
let(:request_body) {{
permission => {
"actors" => ["pivotal", platform.admin_user.name,
platform.non_admin_user.name],
"groups" => ["admins", "bogus"]
}
}}
it "returns 400", :validation do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 400
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "missing actors", :validation do
let(:request_body) {{
permission => {
"groups" => groups
}
}}
it "returns 400", :validation do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 400
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "missing groups", :validation do
let(:request_body) {{
permission => {
"actors" => ["pivotal", "bogus", platform.admin_user.name,
platform.non_admin_user.name]
}
}}
it "returns 400", :validation do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 400
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "empty body", :validation do
let(:request_body) { {} }
it "returns 400", :validation do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 400
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
end # context malformed requests
end
end
#
# There's a clause 'with modified acls' for organizations objects below that should be extended
# here, but some of the semantics around what they should be are unclear to me
#
end
end
context "/organizations/_acl endpoint" do
let(:request_url) { api_url("organizations/_acl") }
context "GET /organizations/_acl" do
let(:actors) { ["pivotal"] }
let(:groups) { ["admins"] }
let(:read_groups) { ["admins", "users"] }
let(:acl_body) {{
"create" => {"actors" => actors, "groups" => groups},
"read" => {"actors" => actors, "groups" => read_groups},
"update" => {"actors" => actors, "groups" => groups},
"delete" => {"actors" => actors, "groups" => groups},
"grant" => {"actors" => actors, "groups" => groups}
}}
context "admin user" do
it "can get object ACL" do
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "default normal user" do
it "returns 403", :authorization do
get(request_url, platform.non_admin_user).should look_like({
:status => 403
})
end
end
context "default client" do
it "returns 403", :authorization do
get(request_url, platform.non_admin_client).should look_like({
:status => 403
})
end
end
context "outside user" do
it "returns 403", :authorization do
get(request_url, outside_user).should look_like({
:status => 403
})
end
end
context "invalid user" do
it "returns 401", :authentication do
get(request_url, invalid_user).should look_like({
:status => 401
})
end
end
context "with modified ACLs" do
after :each do
%w(create read update delete grant).each do |perm|
reset_body = { perm => acl_body[perm] }
put("#{request_url}/#{perm}", superuser,
:payload => reset_body).should look_like({
:status => 200
})
end
# Make sure everything's the same again -- this could really screw up the
# rest of the test suite if the permissions aren't right -- in the long
# run this is an obvious candidate for moving to a different org when we
# support multi-org tests.
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
context "when normal user granted all permissions except GRANT" do
it "returns 403", :authorization do
restrict_permissions_to("organizations",
platform.non_admin_user => ['create', 'read', 'update', 'delete'])
get(request_url, platform.non_admin_user).should look_like({
:status => 403
})
end
end
context "when normal client granted all permissions except GRANT" do
it "returns 403", :authorization, :smoke do
restrict_permissions_to("organizations",
platform.non_admin_client => ['create', 'read', 'update', 'delete'])
get(request_url, platform.non_admin_client).should look_like({
:status => 403
})
end
end
context "when normal user granted GRANT permission" do
it "can get ACL" do
restrict_permissions_to("organizations",
platform.non_admin_user => ['grant'])
get(request_url, platform.non_admin_user).should look_like({
:status => 200
})
end
end
context "when normal client granted GRANT permission" do
it "can get ACL", :smoke do
restrict_permissions_to("organizations",
platform.non_admin_client => ['grant'])
get(request_url, platform.non_admin_client).should look_like({
:status => 200
})
end
end
end # context with modified ACLs
end # context GET /organizations/_acl
context "PUT /organizations/_acl" do
context "admin user" do
it "returns 405" do
put(request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context PUT /organizations/_acl
context "POST /organizations/_acl" do
context "admin user" do
it "returns 405" do
post(request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context POST /organizations/_acl
context "DELETE /organizations/_acl" do
context "admin user" do
it "returns 405" do
delete(request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context DELETE /organizations/_acl
end # context /organizations/_acl endpoint
%w(create read update delete grant).each do |permission|
context "/organizations/_acl/#{permission} endpoint" do
# Don't run a smoke test test for every permission (to keep the smoke test count
# from being unnecessarily repetetive)
if (permission == "read")
smoketest = :smoke
else
smoketest = :notsmoke
end
let(:acl_url) { api_url("organizations/_acl") }
let(:request_url) { api_url("organizations/_acl/#{permission}") }
context "PUT /organizations/_acl/#{permission}" do
let(:actors) { ["pivotal"] }
let(:groups) { ["admins"] }
let(:read_groups) { ["admins", "users"] }
let(:default_body) {{
"create" => {"actors" => actors, "groups" => groups},
"read" => {"actors" => actors, "groups" => read_groups},
"update" => {"actors" => actors, "groups" => groups},
"delete" => {"actors" => actors, "groups" => groups},
"grant" => {"actors" => actors, "groups" => groups}
}}
let(:request_body) {{
permission => {
"actors" => ["pivotal", platform.admin_user.name,
platform.non_admin_user.name].uniq,
"groups" => groups
}
}}
after :each do
reset_body = {permission => default_body[permission]}
put(request_url, platform.admin_user,
:payload => reset_body).should look_like({
:status => 200
})
# Make sure everything's the same again -- this could really screw up the
# rest of the test suite if the permissions aren't right
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
context "admin user" do
it "can modify ACL" do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 200
})
modified_body = default_body.dup;
modified_body[permission] = request_body[permission]
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => modified_body
})
end
end
context "default normal user" do
it "returns 403", :authorization do
put(request_url, platform.non_admin_user,
:payload => request_body).should look_like({
:status => 403
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "default normal client" do
it "returns 403", :authorization do
put(request_url, platform.non_admin_client,
:payload => request_body).should look_like({
:status => 403
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "outside user" do
it "returns 403", :authorization do
put(request_url, outside_user,
:payload => request_body).should look_like({
:status => 403
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "invalid user" do
it "returns 401", :authentication do
put(request_url, invalid_user,
:payload => request_body).should look_like({
:status => 401
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "malformed requests" do
context "invalid actor" do
let(:request_body) {{
permission => {
"actors" => ["pivotal", "bogus", platform.admin_user.name,
platform.non_admin_user.name],
"groups" => groups
}
}}
it "returns 400", :validation do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 400
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "invalid group" do
let(:request_body) {{
permission => {
"actors" => ["pivotal", platform.admin_user.name,
platform.non_admin_user.name],
"groups" => ["admins", "bogus"]
}
}}
it "returns 400", :validation do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 400
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "missing actors" do
let(:request_body) {{
permission => {
"groups" => groups
}
}}
it "returns 400", :validation do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 400
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "missing groups" do
let(:request_body) {{
permission => {
"actors" => ["pivotal", "bogus", platform.admin_user.name,
platform.non_admin_user.name]
}
}}
it "returns 400", :validation do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 400
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
context "empty body" do
let(:request_body) { {} }
it "returns 400", :validation do
put(request_url, platform.admin_user,
:payload => request_body).should look_like({
:status => 400
})
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
end
end # context malformed requests
context "with modified ACLs" do
after :each do
%w(create read update delete grant).each do |perm|
reset_body = { perm => default_body[perm] }
put("#{acl_url}/#{perm}", superuser,
:payload => reset_body).should look_like({
:status => 200
})
end
# Make sure everything's the same again -- this could really screw up the
# rest of the test suite if the permissions aren't right
get(acl_url, platform.admin_user).should look_like({
:status => 200,
:body => default_body
})
end
context "when normal user granted all permissions except GRANT" do
# We only run the smoke tests for read permission (set above)
it "returns 403", :authorization do
restrict_permissions_to("organizations",
platform.non_admin_user => ['create', 'read', 'update', 'delete'])
put(request_url, platform.non_admin_user,
:payload => request_body).should look_like({
:status => 403
})
end
end
context "when normal client granted all permissions except GRANT" do
# We only run the smoke tests for read permission (set above)
it "returns 403", :authorization do
restrict_permissions_to("organizations",
platform.non_admin_client => ['create', 'read', 'update', 'delete'])
put(request_url, platform.non_admin_client,
:payload => request_body).should look_like({
:status => 403
})
end
end
context "when normal user granted GRANT permission" do
# We only run the smoke tests for read permission (set above)
it "can modify ACL", smoketest do
restrict_permissions_to("organizations",
platform.non_admin_user => ['grant'])
put(request_url, platform.non_admin_user,
:payload => request_body).should look_like({
:status => 200
})
end
end
context "when normal client granted GRANT permission" do
# We only run the smoke tests for read permission (set above)
it "can modify ACL" do
restrict_permissions_to("organizations",
platform.non_admin_client => ['grant'])
put(request_url, platform.non_admin_client,
:payload => request_body).should look_like({
:status => 200
})
end
end
end # context with modified ACLs
end # context PUT /organizations/_acl/<permission>
context "GET /organizations/_acl/#{permission}" do
context "admin user" do
it "returns 405" do
get(request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context GET /organizations/_acl/<permission>
context "POST /organizations/_acl/#{permission}" do
context "admin user" do
it "returns 405" do
post(request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context POST /organizations/_acl/<permission>
context "DELETE /organizations/_acl/#{permission}" do
context "admin user" do
it "returns 405" do
delete(request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context DELETE /organizations/_acl/<permission>
end # context /organizations/_acl/<permission> endpoint
end # loop (each) over permissions
# Special case that doesn't fit into the generic behaviors above - specifically
# when a client & a user of the same name exist, updates to an acl specifying
# the common name as the actor should fail with a 422, because we have no
# way of knowing if the caller wanted to add the client or the user to the ACL.
context "when a client exists with the same name as a user", :validation do
let(:admin_requestor){admin_user}
let(:requestor){admin_requestor}
let(:shared_name) { "pedant-acl-#{rand_id}" }
let(:request_url) { api_url("/clients/#{shared_name}/_acl/read") }
let(:acl_request_body) {
{ read: { actors: ['pivotal', shared_name],
groups: ['admins'] } }
}
before :each do
@user = platform.create_user(shared_name, associate: false)
@client = platform.create_client(shared_name, platform.test_org)
end
after :each do
platform.delete_user(@user)
platform.delete_client(@client)
end
context "and the user is a member of the organization" do
before :each do
platform.associate_user_with_org(platform.test_org.name, @user)
end
it "updates of the object ACL results in a 422 due to ambiguous request" do
expect(put(request_url, platform.admin_user, payload: acl_request_body)).
to look_like(status: 422)
end
context "and 'clients' and 'users' fields are provided in the request" do
let(:acl_request_body) {
{ read: { "actors" => [],
"users" => ['pivotal', shared_name ],
"clients" => [ shared_name ],
"groups" => ['admins'] } }
}
it "updates of the object ACL using 'clients' and 'users' are successful" do
expect(put(request_url, platform.admin_user, payload: acl_request_body))
.to have_status_code 200
# Verify that the returned list contains this actor twice (once
# as client and once as user), since we don't separate them in the GET.
res = get(api_url("/clients/#{shared_name}/_acl"), platform.admin_user)
read_ace = JSON.parse(res.body)['read']
expect(read_ace['actors'].sort).to eq [shared_name, shared_name, "pivotal"]
end
end
end
context "and the user is not a member of the organization" do
it "there is no ambiguity and the object ACL update succeeds" do
expect(put(request_url, platform.admin_user, payload: acl_request_body))
.to have_status_code 200
end
end
end
context "/<type>/<name>/_acl endpoint" do
# TODO: Sanity check: users don't seem to have any ACLs, or at least, nothing is
# accessible from external API as far as I can tell:
# - [jkeiser] Users have ACLs, but they are at /users/NAME/_acl
%w(clients groups containers data nodes roles environments cookbooks policies policy_groups).each do |type|
context "for #{type} type" do
let(:new_object) { "new-object" }
let(:creation_url) { api_url(type) }
let(:deletion_url) { api_url("#{type}/#{new_object}") }
let(:request_url) { api_url("#{type}/#{new_object}/_acl") }
let(:setup_user) { platform.admin_user }
# Body used to create object (generally overriden):
let(:creation_body) {{
"name" => new_object
}}
# Yeah, this is confusing as hell, but for whatever insane reason the
# default ACLs are different on almost every different types -- so these are
# the defaults of defaults, which are overridden below for the different
# types:
let(:actors) { ["pivotal", platform.admin_user.name].uniq }
let(:users) { ["pivotal", platform.admin_user.name].uniq }
let(:clients) { [] }
let(:groups) { ["admins"] }
let(:read_groups) { groups }
let(:update_groups) { groups }
let(:delete_groups) { groups }
# Usually still ["admins"] even when the other groups aren't:
let(:grant_groups) { ["admins"] }
let(:acl_body) {{
"create" => {"actors" => actors, "groups" => groups},
"read" => {"actors" => actors, "groups" => read_groups},
"update" => {"actors" => actors, "groups" => update_groups},
"delete" => {"actors" => actors, "groups" => delete_groups},
"grant" => {"actors" => actors, "groups" => grant_groups}
}}
let(:granular_acl_body) {{
"create" => {"actors" => [], "users" => users, "clients" => clients, "groups" => groups},
"read" => {"actors" => [], "users" => users, "clients" => clients, "groups" => read_groups},
"update" => {"actors" => [], "users" => users, "clients" => clients, "groups" => update_groups},
"delete" => {"actors" => [], "users" => users, "clients" => clients, "groups" => delete_groups},
"grant" => {"actors" => [], "users" => users, "clients" => clients, "groups" => grant_groups},
}}
# Mainly this is for the different creation bodies, but also for the
# different default ACLs for each type, etc. We love consistency!
case type
when "clients"
let(:actors) {
# As long as 'new_object' isn't a validator (and you're on
# the Erchef client endpoint), new_object will be in the
# actors list
["pivotal", new_object, setup_user.name].uniq
}
let(:clients) { [ new_object ] }
let(:users) { ["pivotal", setup_user.name].uniq }
let(:read_groups) { ["users", "admins"] }
let(:delete_groups) { ["users", "admins"] }
when "groups"
let(:creation_body) {{
"id" => new_object
}}
let(:read_groups) { ["users", "admins"] }
when "containers"
let(:creation_body) {{
"id" => new_object,
"containerpath" => "/"
}}
let(:actors) { [platform.admin_user.name] }
let(:users) { [platform.admin_user.name] }
let(:groups) { [] }
let(:grant_groups) { [] }
when "data"
let(:read_groups) { ["users","clients", "admins"] }
let(:groups) { ["users","admins"] }
when "nodes"
let(:groups) { ["users", "clients", "admins"] }
let(:update_groups) { ["users", "admins"] }
let(:delete_groups) { ["users", "admins"] }
when "roles"
let(:creation_body) {{
"name" => new_object,
"json_class" => "Chef::Role"
}}
let(:groups) { ["users", "admins"] }
let(:read_groups) { ["users", "clients", "admins"] }
when "environments"
let(:creation_body) {{
"name" => new_object,
"json_class" => "Chef::Environment"
}}
let(:groups) { ["users", "admins"] }
let(:read_groups) { ["users", "clients", "admins"] }
when "cookbooks"
let(:version) { "1.0.0" }
let(:creation_url) { api_url("#{type}/#{new_object}/#{version}") }
let(:deletion_url) { creation_url }
let(:creation_body) {{
"name" => "#{new_object}-#{version}",
"cookbook_name" => new_object,
"version" => version,
"json_class" => "Chef::CookbookVersion",
"chef_type" => "cookbook_version",
"frozen?" => false,
"recipes" => [],
"metadata" => {
"version" => version,
"name" => new_object,
"maintainer" => "spacemonkey",
"maintainer_email" => "[email protected]",
"description" => "",
"long_description" => "",
"license" => "",
"dependencies" => {},
"attributes" => {},
"recipes" => {}
}
}}
let(:groups) { ["users", "admins"] }
let(:read_groups) { ["users", "clients", "admins"] }
when "policies"
let(:creation_url) { api_url("#{type}/#{new_object}/revisions") }
let(:creation_body) {{
"revision_id" => "909c26701e291510eacdc6c06d626b9fa5350d25",
"name" => new_object,
"run_list" => [ "recipe[policyfile_demo::default]" ],
"cookbook_locks" => {
"policyfile_demo" => {
"identifier" => "f04cc40faf628253fe7d9566d66a1733fb1afbe9",
"version" => "1.2.3"
}
}
}}
when "policy_groups"
let(:creation_url) {
api_url("#{type}/#{new_object}/policies/acl_test_policy")
}
let(:creation_body) {{
"revision_id" => "909c26701e291510eacdc6c06d626b9fa5350d25",
"name" => "acl_test_policy",
"run_list" => [ "recipe[policyfile_demo::default]" ],
"cookbook_locks" => {
"policyfile_demo" => {
"identifier" => "f04cc40faf628253fe7d9566d66a1733fb1afbe9",
"version" => "1.2.3"
}
}
}}
end
before :each do
if (type == "cookbooks" || type == "policy_groups")
# Inconsistent API needs a PUT here. We love consistency!
put(creation_url, setup_user,
:payload => creation_body).should look_like({
:status => 201
})
else
post(creation_url, setup_user,
:payload => creation_body).should look_like({
:status => 201
})
end
end
after :each do
if (type == "policy_groups")
# Policy groups are only created indirectly when we create policies;
# deleting the group doesn't delete the policy so we do it explicitly
delete(api_url("policies/acl_test_policy"),
platform.admin_user).should look_like({
:status => 200
})
end
delete(deletion_url, platform.admin_user).should look_like({
:status => 200
})
end
context "GET /#{type}/<name>/_acl" do
context "admin user" do
it "can get object ACL" do
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body_exact => acl_body
})
end
it "can get a granular object ACL" do
get("#{request_url}?detail=granular", platform.admin_user).should look_like({
:status => 200,
:body_exact => granular_acl_body
})
end
end
context "default normal user" do
it "returns 403", :authorization do
get(request_url, platform.non_admin_user).should look_like({
:status => 403
})
end
end
context "default client" do
it "returns 403", :authorization do
get(request_url, platform.non_admin_client).should look_like({
:status => 403
})
end
end
context "outside user" do
it "returns 403", :authorization do
get(request_url, outside_user).should look_like({
:status => 403
})
end
end
context "invalid user" do
it "returns 401", :authentication do
get(request_url, invalid_user).should look_like({
:status => 401
})
end
end
context "when normal user granted all permissions except GRANT" do
it "returns 403", :authorization do
["create", "read", "update", "delete"].each do |perm|
put("#{request_url}/#{perm}", platform.admin_user,
:payload => {perm => {
"actors" => [platform.non_admin_user.name,
platform.admin_user.name, "pivotal"],
"groups" => ["admins"]
}}).should look_like({
:status => 200
})
end
get(request_url, platform.non_admin_user).should look_like({
:status => 403
})
end
end
context "when normal client granted all permissions except GRANT" do
it "returns 403", :authorization do
["create", "read", "update", "delete"].each do |perm|
put("#{request_url}/#{perm}", platform.admin_user,
:payload => {perm => {
"actors" => [platform.non_admin_client.name,
platform.admin_user.name, "pivotal"],
"groups" => ["admins"]
}}).should look_like({
:status => 200
})
end
get(request_url, platform.non_admin_client).should look_like({
:status => 403
})
end
end
context "when normal user granted GRANT permission" do
it "can get object ACL" do
put("#{request_url}/grant", platform.admin_user,
:payload => {"grant" => {
"actors" => [platform.non_admin_user.name,
platform.admin_user.name, "pivotal"],
"groups" => ["admins"]
}}).should look_like({
:status => 200
})
get(request_url, platform.non_admin_user).should look_like({
:status => 200
})
end
end
context "when normal client granted GRANT permission" do
it "can get object ACL" do
put("#{request_url}/grant", platform.admin_user,
:payload => {"grant" => {
"actors" => [platform.non_admin_client.name,
platform.admin_user.name, "pivotal"],
"groups" => ["admins"]
}}).should look_like({
:status => 200
})
get(request_url, platform.non_admin_client).should look_like({
:status => 200
})
end
end
context "OC-1702 - when containing a missing group" do
let(:missing_group) { "missing-group" }
let(:updated_read) { {"actors" => actors, "groups" => [missing_group] + read_groups} }
before(:each) do
# create the groups
post(api_url("groups"), platform.admin_user,
:payload => {"id" => missing_group}).should look_like({:status => 201})
# add the group to the read ace
put("#{request_url}/read",
platform.admin_user,
:payload => { "read" => updated_read }).should look_like(:status => 200)
# delete the group
delete(api_url("groups/#{missing_group}"), platform.admin_user)
end
after(:each) do
delete(api_url("groups/#{missing_group}"), platform.admin_user)
end
it "should return the acl", :validation do
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
end # context GET /<type>/<name>/_acl
context "PUT /#{type}/<name>/_acl" do
context "admin user" do
it "returns 405" do
put(request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context PUT /<type>/<name>/_acl
context "POST /#{type}/<name>/_acl" do
context "admin user" do
it "returns 405" do
post(request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context POST /<type>/<name>/_acl
context "DELETE /#{type}/<name>/_acl" do
context "admin user" do
it "returns 405" do
delete(request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context DELETE /<type>/<name>/_acl
%w(create read update delete grant).each do |permission|
context "/#{type}/<name>/_acl/#{permission} endpoint" do
# Don't run a smoke test test for every permission (to keep the smoke
# test count from being unnecessarily repetetive). Also avoid minor
# overlap with somewhat similar group and container tests
if (permission == "update" && type != "groups" && type != "containers")
smoketest = :smoke
else
smoketest = :nosmoke
end
let(:permission_request_url) { "#{request_url}/#{permission}" }
context "GET /#{type}/<name>/_acl/#{permission}" do
context "admin user" do
it "returns 405" do
get(permission_request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context GET /<type>/<name>/_acl/<permission>
context "PUT /#{type}/<name>/_acl/#{permission}" do
let(:clients) { [platform.non_admin_client.name] }
let(:users) {
[platform.non_admin_user.name, platform.admin_user.name, "pivotal"]
}
let(:groups_and_actors) {{
"actors" => [platform.non_admin_user.name,
platform.admin_user.name, "pivotal"].uniq + clients,
"groups" => ["admins", "users", "clients"]
}}
let(:update_body) {{
permission => groups_and_actors
}}
context "admin user" do
context "using the new 'users' and 'clients' attributes" do
let(:update_body) {
{ permission => {
"actors" => [], # back-compat, empty actors and
# clients/users present indicates
# that clients/users should be used.
"users" => users,
"clients" => clients,
"groups" => ["admins", "users", "clients"]}
}
}
let(:response_body) {
{ permission => groups_and_actors }
}
it "can update ACL" do
put(permission_request_url, platform.admin_user,
:payload => update_body).should have_status_code 200
# Note thet resulting GET body should look the same -
# we are not returning clients/users separately at this point
# to avoid a confusing response that includes both 'actors'
# and 'clients/users', when we only accept one of those options containing
# values. If we revisit and determine it's needed,
# it will be a new APIv2 behavior.
check_body = acl_body
check_body[permission] = groups_and_actors
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => check_body
})
end
end
context "using the legacy 'actors' attribute" do
let(:update_body) { { permission => groups_and_actors } }
it "can update ACL" do
put(permission_request_url, platform.admin_user,
:payload => update_body).should look_like({
:status => 200
})
check_body = acl_body
check_body[permission] = groups_and_actors
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => check_body
})
end
end
end
context "default normal user" do
it "returns 403", :authorization do
put(permission_request_url, platform.non_admin_user,
:payload => update_body).should look_like({
:status => 403
})
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "default client" do
it "returns 403", :authorization do
put(permission_request_url, platform.non_admin_client,
:payload => update_body).should look_like({
:status => 403
})
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "outside user" do
it "returns 403", :authorization do
put(permission_request_url, outside_user,
:payload => update_body).should look_like({
:status => 403
})
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "invalid user" do
it "returns 401", :authentication do
put(permission_request_url, invalid_user,
:payload => update_body).should look_like({
:status => 401
})
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "malformed requests", :validation do
context "invalid actor" do
let(:update_body) {{
permission => {
"actors" => ["pivotal", "bogus", platform.admin_user.name,
platform.non_admin_user.name],
"groups" => ["admins", "users", "clients"]
}
}}
it "returns 400", :validation do
put(permission_request_url, platform.admin_user,
:payload => update_body).should look_like({
:status => 400
})
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "includes valid actor list and valid client list" do
let(:update_body) {
{
permission => {
"actors" => ["pivotal", platform.admin_user.name,
platform.non_admin_user.name],
"clients" => [platform.non_admin_client.name],
"groups" => ["admins", "users", "clients"]
}
}
}
it "returns 400", :validation do
response = put(permission_request_url, platform.admin_user,
:payload => update_body)
expect(response).to have_status_code 400
end
end
context "includes valid actor list and valid user list" do
let(:update_body) {
{
permission => {
"actors" => ["pivotal", platform.admin_user.name,
platform.non_admin_user.name],
"users" => ["pivotal"],
"groups" => ["admins", "users", "clients"]
}
}
}
it "returns 400", :validation do
response = put(permission_request_url, platform.admin_user,
:payload => update_body)
expect(response).to have_status_code 400
end
end
context "includes valid actor list and valid user and client lists" do
let(:update_body) {
{
permission => {
"actors" => ["pivotal", platform.admin_user.name,
platform.non_admin_user.name],
"users" => ["pivotal"],
"clients" => [ platform.non_admin_client.name ],
"groups" => ["admins", "users", "clients"]
}
}
}
it "returns 400", :validation do
response = put(permission_request_url, platform.admin_user,
:payload => update_body)
expect(response).to have_status_code 400
end
end
context "invalid client" do
let(:update_body) {{
permission => {
"actors" => [],
"users" => ["pivotal", platform.admin_user.name,
platform.non_admin_user.name],
"clients" => [platform.non_admin_client.name, "bogus"],
"groups" => ["admins", "users", "clients"]
}
}}
it "returns 400", :validation do
response = put(permission_request_url, platform.admin_user,
:payload => update_body)
expect(response).to have_status_code 400
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "invalid group" do
let(:update_body) {{
permission => {
"actors" => ["pivotal", platform.admin_user.name,
platform.non_admin_user.name],
"groups" => ["admins", "users", "clients", "bogus"]
}
}}
it "returns 400", :validation do
put(permission_request_url, platform.admin_user,
:payload => update_body).should have_status_code 400
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "missing actors" do
let(:update_body) {{
permission => {
"groups" => ["admins", "users", "clients"]
}
}}
it "returns 400", :validation do
put(permission_request_url, platform.admin_user,
:payload => update_body).should look_like({
:status => 400
})
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "missing groups" do
let(:update_body) {{
permission => {
"actors" => ["pivotal", platform.admin_user.name,
platform.non_admin_user.name]
}
}}
it "returns 400", :validation do
put(permission_request_url, platform.admin_user,
:payload => update_body).should look_like({
:status => 400
})
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
context "empty body" do
let(:update_body) { {} }
it "returns 400", :validation do
put(permission_request_url, platform.admin_user,
:payload => update_body).should look_like({
:status => 400
})
get(request_url, platform.admin_user).should look_like({
:status => 200,
:body => acl_body
})
end
end
end # context malformed requests
context "normal user with all permissions except GRANT", :authorization do
# We only run the smoke tests for read permission (set above)
it "returns 403", smoketest do
["create", "read", "update", "delete"].each do |perm|
put("#{request_url}/#{perm}", platform.admin_user,
:payload => {perm => {
"actors" => [platform.non_admin_user.name,
platform.admin_user.name, "pivotal"],
"groups" => []
}}).should look_like({
:status => 200
})
end
put(permission_request_url, platform.non_admin_user,
:payload => update_body).should look_like({
:status => 403
})
end
end
context "normal user with GRANT permission" do
# We only run the smoke tests for read permission (set above)
it "can update ACL", smoketest do
put("#{request_url}/grant", platform.admin_user,
:payload => {"grant" => {
"actors" => [platform.non_admin_user.name,
platform.admin_user.name, "pivotal"],
# Per ACL policy, non-superusers can't remove the admins
# group from the grant ACL.
"groups" => ["admins"]
}}).should look_like({
:status => 200
})
put(permission_request_url, platform.non_admin_user,
:payload => update_body).should look_like({
:status => 200
})
end
end
context "normal client with all permissions except GRANT", :authorization do
it "returns 403", :authorization do
["create", "read", "update", "delete"].each do |perm|
put("#{request_url}/#{perm}", platform.admin_user,
:payload => {perm => {
"actors" => [platform.non_admin_client.name,
platform.admin_user.name, "pivotal"],
"groups" => []
}}).should look_like({
:status => 200
})
end
put(permission_request_url, platform.non_admin_client,
:payload => update_body).should look_like({
:status => 403
})
end
end
context "normal client with GRANT permission" do
it "can update ACL" do
put("#{request_url}/grant", platform.admin_user,
:payload => {"grant" => {
"actors" => [platform.non_admin_client.name,
platform.admin_user.name, "pivotal"],
# Per ACL policy, non-superusers can't remove the admins
# group from the grant ACL.
"groups" => ["admins"]
}}).should look_like({
:status => 200
})
put(permission_request_url, platform.non_admin_client,
:payload => update_body).should look_like({
:status => 200
})
end
end
end # context PUT /<type>/<name>/_acl/<permission>
context "POST /#{type}/<name>/_acl/#{permission}" do
context "admin user" do
it "returns 405" do
post(permission_request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context POST /<type>/<name>/_acl/<permission>
context "DELETE /#{type}/<name>/_acl/#{permission}" do
context "admin user" do
it "returns 405" do
delete(permission_request_url, platform.admin_user).should look_like({
:status => 405
})
end
end
end # context DELETE /<type>/<name>/_acl/<permission>
end # context /<type>/<name>/_acl/<permission> endpoint
end # loop (each) over permission
end # context for <type> type
end # loop (each) over type
end # context /<type>/<name>/_acl endpoint
end # describe ACL API
| 39.700758 | 111 | 0.45829 |
1888f5b82c03a22988fa7bf917d4e01f0aef5ff8 | 4,427 | # frozen_string_literal: true
require 'bolt/task'
# Installs the puppet-agent package on targets if needed then collects facts, including any custom
# facts found in Bolt's modulepath.
#
# Agent detection will be skipped if the target includes the 'puppet-agent' feature, either as a
# property of its transport (PCP) or by explicitly setting it as a feature in Bolt's inventory.
#
# If no agent is detected on the target using the 'puppet_agent::version' task, it's installed
# using 'puppet_agent::install' and the puppet service is stopped/disabled using the 'service' task.
Puppet::Functions.create_function(:apply_prep) do
# @param targets A pattern or array of patterns identifying a set of targets.
# @example Prepare targets by name.
# apply_prep('target1,target2')
dispatch :apply_prep do
param 'Boltlib::TargetSpec', :targets
end
def script_compiler
@script_compiler ||= Puppet::Pal::ScriptCompiler.new(closure_scope.compiler)
end
def run_task(executor, targets, name, args = {})
tasksig = script_compiler.task_signature(name)
raise Bolt::Error.new("#{name} could not be found", 'bolt/apply-prep') unless tasksig
task = Bolt::Task.new(tasksig.task_hash)
results = executor.run_task(targets, task, args)
raise Bolt::RunFailure.new(results, 'run_task', task.name) unless results.ok?
results
end
# Returns true if the target has the puppet-agent feature defined, either from inventory or transport.
def agent?(target, executor, inventory)
inventory.features(target).include?('puppet-agent') ||
executor.transport(target.protocol).provided_features.include?('puppet-agent')
end
def apply_prep(target_spec)
applicator = Puppet.lookup(:apply_executor) { nil }
executor = Puppet.lookup(:bolt_executor) { nil }
inventory = Puppet.lookup(:bolt_inventory) { nil }
unless applicator && executor && inventory && Puppet.features.bolt?
raise Puppet::ParseErrorWithIssue.from_issue_and_stack(
Puppet::Pops::Issues::TASK_MISSING_BOLT, action: _('apply_prep')
)
end
executor.report_function_call('apply_prep')
targets = inventory.get_targets(target_spec)
executor.log_action('install puppet and gather facts', targets) do
executor.without_default_logging do
# Skip targets that include the puppet-agent feature, as we know an agent will be available.
agent_targets, unknown_targets = targets.partition { |target| agent?(target, executor, inventory) }
agent_targets.each { |target| Puppet.debug "Puppet Agent feature declared for #{target.name}" }
unless unknown_targets.empty?
# Ensure Puppet is installed
versions = run_task(executor, unknown_targets, 'puppet_agent::version')
need_install, installed = versions.partition { |r| r['version'].nil? }
installed.each do |r|
Puppet.debug "Puppet Agent #{r['version']} installed on #{r.target.name}"
inventory.set_feature(r.target, 'puppet-agent')
end
unless need_install.empty?
need_install_targets = need_install.map(&:target)
run_task(executor, need_install_targets, 'puppet_agent::install')
# Service task works best when targets have puppet-agent feature
need_install_targets.each { |target| inventory.set_feature(target, 'puppet-agent') }
# Ensure the Puppet service is stopped after new install
run_task(executor, need_install_targets, 'service', 'action' => 'stop', 'name' => 'puppet')
run_task(executor, need_install_targets, 'service', 'action' => 'disable', 'name' => 'puppet')
end
end
# Gather facts, including custom facts
plugins = applicator.build_plugin_tarball do |mod|
search_dirs = []
search_dirs << mod.plugins if mod.plugins?
search_dirs << mod.pluginfacts if mod.pluginfacts?
search_dirs
end
task = applicator.custom_facts_task
arguments = { 'plugins' => Puppet::Pops::Types::PSensitiveType::Sensitive.new(plugins) }
results = executor.run_task(targets, task, arguments)
raise Bolt::RunFailure.new(results, 'run_task', task.name) unless results.ok?
results.each do |result|
inventory.add_facts(result.target, result.value)
end
end
end
# Return nothing
nil
end
end
| 42.980583 | 107 | 0.691891 |
1d3e741c06f73d674f39f517ab3ec5fe07187953 | 3,451 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::MediaServices::Mgmt::V2019_05_01_preview
module Models
#
# A metric emitted by service.
#
class Metric
include MsRestAzure
# @return [String] The metric name.
attr_accessor :name
# @return [String] The metric display name.
attr_accessor :display_name
# @return [String] The metric display description.
attr_accessor :display_description
# @return [MetricUnit] The metric unit. Possible values include: 'Bytes',
# 'Count', 'Milliseconds'
attr_accessor :unit
# @return [MetricAggregationType] The metric aggregation type. Possible
# values include: 'Average', 'Count', 'Total'
attr_accessor :aggregation_type
# @return [Array<MetricDimension>] The metric dimensions.
attr_accessor :dimensions
#
# Mapper for Metric class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Metric',
type: {
name: 'Composite',
class_name: 'Metric',
model_properties: {
name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
display_name: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'displayName',
type: {
name: 'String'
}
},
display_description: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'displayDescription',
type: {
name: 'String'
}
},
unit: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'unit',
type: {
name: 'String'
}
},
aggregation_type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'aggregationType',
type: {
name: 'String'
}
},
dimensions: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'dimensions',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'MetricDimensionElementType',
type: {
name: 'Composite',
class_name: 'MetricDimension'
}
}
}
}
}
}
}
end
end
end
end
| 29.245763 | 79 | 0.470009 |
e2ec62f3723c5519281e7ab6f1da4a1c04103a95 | 469 | # frozen_string_literal: true
require "./config/environment"
class ApplicationController < Sinatra::Base
configure do
set :public_folder, "public"
set :views, "app/views"
enable :sessions
set :session_secret, "password_security"
end
get "/" do
erb :index
end
helpers do
def logged_in?
current_user
end
def current_user
@current_user = User.find_by(id: session[:user_id]) if session[:user_id]
end
end
end
| 17.37037 | 78 | 0.675906 |
39a2e628f25fcf7c9eead318ef068c6d0fd4f46e | 1,245 | class Dive < Formula
desc "Tool for exploring each layer in a docker image"
homepage "https://github.com/wagoodman/dive"
url "https://github.com/wagoodman/dive.git",
:tag => "v0.7.2",
:revision => "09296c0214c4cc7477fe53bc79c54805899c6d19"
bottle do
cellar :any_skip_relocation
sha256 "47366221b4f7e6ebdc7c89f6dd620e9615154aef174d4e1e55fd93007c991dea" => :mojave
sha256 "f2a31f3886bb3ebb8fd834f5712882b3fedcb2fb16e590b0be2b13eda3f3294b" => :high_sierra
sha256 "f1fa1a7e1c7082e513977ea1d9974aaa0263fa3abe3b8db5156de71e2287aafb" => :sierra
end
depends_on "go" => :build
depends_on "docker"
def install
ENV["GOPATH"] = buildpath
ENV["GO111MODULE"] = "on"
dir = buildpath/"src/github.com/wagoodman/dive"
dir.install buildpath.children
cd dir do
system "go", "build", "-ldflags", "-s -w -X main.version=#{version}", "-o", bin/"dive"
prefix.install_metafiles
end
end
test do
(testpath/"Dockerfile").write <<~EOS
FROM alpine
ENV test=homebrew-core
RUN echo "hello"
EOS
assert_match "dive #{version}", shell_output("#{bin}/dive version")
assert_match "Building image", shell_output("CI=true #{bin}/dive build .", 1)
end
end
| 28.953488 | 93 | 0.693976 |
1a1537707ed1b1d5bcd7a824d59bab7e06046a38 | 30,620 | require_relative 'test_helper'
require 'rack/cache/context'
describe Rack::Cache::Context do
before { setup_cache_context }
after { teardown_cache_context }
it 'passes options to the underlying stores' do
app = CacheContextHelpers::FakeApp.new(200, {}, ['foo'])
context = Rack::Cache::Context.new(app, foo: 'bar')
entity_options = context.entitystore.instance_variable_get('@options')
meta_options = context.metastore.instance_variable_get('@options')
entity_options[:foo].must_equal('bar')
meta_options[:foo].must_equal('bar')
end
it 'passes on non-GET/HEAD requests' do
respond_with 200
post '/'
assert app.called?
assert response.ok?
cache.trace.must_include :pass
response.headers.wont_include 'age'
end
it 'passes on rack-cache.force-pass' do
respond_with 200
get '/', {"rack-cache.force-pass" => true}
assert app.called?
assert response.ok?
cache.trace.must_equal [:pass]
response.headers.wont_include 'age'
end
it "passes on options requests" do
respond_with 200
request "options", '/'
assert app.called?
assert response.ok?
cache.trace.must_include :pass
end
it "doesnt invalidate on options requests" do
respond_with 200
request "options", '/'
assert app.called?
assert response.ok?
cache.trace.wont_include :invalidate
end
%w[post put delete].each do |request_method|
it "invalidates on #{request_method} requests" do
respond_with 200
request request_method, '/'
assert app.called?
assert response.ok?
cache.trace.must_include :invalidate
cache.trace.must_include :pass
end
end
it 'does not cache with Authorization request header and non public response' do
respond_with 200, 'etag' => '"FOO"'
get '/', 'HTTP_AUTHORIZATION' => 'basic foobarbaz'
assert app.called?
assert response.ok?
response.headers['cache-control'].must_equal 'private'
cache.trace.must_include :miss
cache.trace.wont_include :store
response.headers.wont_include 'age'
end
it 'does cache with Authorization request header and public response' do
respond_with 200, 'cache-control' => 'public', 'etag' => '"FOO"'
get '/', 'HTTP_AUTHORIZATION' => 'basic foobarbaz'
assert app.called?
assert response.ok?
cache.trace.must_include :miss
cache.trace.must_include :store
cache.trace.wont_include :ignore
response.headers.must_include 'age'
response.headers['cache-control'].must_equal 'public'
end
it 'does not cache with Cookie header and non public response' do
respond_with 200, 'etag' => '"FOO"'
get '/', 'HTTP_COOKIE' => 'foo=bar'
assert app.called?
assert response.ok?
response.headers['cache-control'].must_equal 'private'
cache.trace.must_include :miss
cache.trace.wont_include :store
response.headers.wont_include 'age'
end
it 'does not cache requests with a Cookie header' do
respond_with 200
get '/', 'HTTP_COOKIE' => 'foo=bar'
assert response.ok?
assert app.called?
cache.trace.must_include :miss
cache.trace.wont_include :store
response.headers.wont_include 'age'
response.headers['cache-control'].must_equal 'private'
end
it 'does remove set-cookie response header from a cacheable response' do
respond_with 200, 'cache-control' => 'public', 'etag' => '"FOO"', 'set-cookie' => 'TestCookie=OK'
get '/'
assert app.called?
assert response.ok?
cache.trace.must_include :store
cache.trace.must_include :ignore
response.headers['set-cookie'].must_be_nil
end
it 'does remove all configured ignore_headers from a cacheable response' do
respond_with 200, 'cache-control' => 'public', 'etag' => '"FOO"', 'set-cookie' => 'TestCookie=OK', 'x-strip-me' => 'Secret'
get '/', 'rack-cache.ignore_headers' => ['set-cookie', 'x-strip-me']
assert app.called?
assert response.ok?
cache.trace.must_include :store
cache.trace.must_include :ignore
response.headers['set-cookie'].must_be_nil
response.headers['x-strip-me'].must_be_nil
end
it 'does not remove set-cookie response header from a private response' do
respond_with 200, 'cache-control' => 'private', 'set-cookie' => 'TestCookie=OK'
get '/'
assert app.called?
assert response.ok?
cache.trace.wont_include :store
cache.trace.wont_include :ignore
response.headers['set-cookie'].must_equal 'TestCookie=OK'
end
it 'responds with 304 when if-modified-since matches last-modified' do
timestamp = Time.now.httpdate
respond_with do |req,res|
res.status = 200
res['last-modified'] = timestamp
res['content-type'] = 'text/plain'
res.body = ['Hello World']
end
get '/',
'HTTP_IF_MODIFIED_SINCE' => timestamp
assert app.called?
response.status.must_equal 304
response.original_headers.wont_include 'content-length'
response.original_headers.wont_include 'content-type'
assert response.body.empty?
cache.trace.must_include :miss
cache.trace.must_include :store
end
it 'responds with 304 when if-none-match matches etag' do
respond_with do |req,res|
res.status = 200
res['etag'] = '12345'
res['content-type'] = 'text/plain'
res.body = ['Hello World']
end
get '/',
'HTTP_IF_NONE_MATCH' => '12345'
assert app.called?
response.status.must_equal 304
response.original_headers.wont_include 'content-length'
response.original_headers.wont_include 'content-type'
response.headers.must_include 'etag'
assert response.body.empty?
cache.trace.must_include :miss
cache.trace.must_include :store
end
it 'responds with 304 only if if-none-match and if-modified-since both match' do
timestamp = Time.now
respond_with do |req,res|
res.status = 200
res['etag'] = '12345'
res['last-modified'] = timestamp.httpdate
res['content-type'] = 'text/plain'
res.body = ['Hello World']
end
# Only etag matches
get '/',
'HTTP_IF_NONE_MATCH' => '12345', 'HTTP_IF_MODIFIED_SINCE' => (timestamp - 1).httpdate
assert app.called?
response.status.must_equal 200
# Only last-modified matches
get '/',
'HTTP_IF_NONE_MATCH' => '1234', 'HTTP_IF_MODIFIED_SINCE' => timestamp.httpdate
assert app.called?
response.status.must_equal 200
# Both matches
get '/',
'HTTP_IF_NONE_MATCH' => '12345', 'HTTP_IF_MODIFIED_SINCE' => timestamp.httpdate
assert app.called?
response.status.must_equal 304
end
it 'validates private responses cached on the client' do
respond_with do |req,res|
etags = req.env['HTTP_IF_NONE_MATCH'].to_s.split(/\s*,\s*/)
if req.env['HTTP_COOKIE'] == 'authenticated'
res['cache-control'] = 'private, no-store'
res['etag'] = '"private tag"'
if etags.include?('"private tag"')
res.status = 304
else
res.status = 200
res['content-type'] = 'text/plain'
res.body = ['private data']
end
else
res['etag'] = '"public tag"'
if etags.include?('"public tag"')
res.status = 304
else
res.status = 200
res['content-type'] = 'text/plain'
res.body = ['public data']
end
end
end
get '/'
assert app.called?
response.status.must_equal 200
response.headers['etag'].must_equal '"public tag"'
response.body.must_equal 'public data'
cache.trace.must_include :miss
cache.trace.must_include :store
get '/', 'HTTP_COOKIE' => 'authenticated'
assert app.called?
response.status.must_equal 200
response.headers['etag'].must_equal '"private tag"'
response.body.must_equal 'private data'
cache.trace.must_include :stale
cache.trace.must_include :invalid
cache.trace.wont_include :store
get '/',
'HTTP_IF_NONE_MATCH' => '"public tag"'
assert app.called?
response.status.must_equal 304
response.headers['etag'].must_equal '"public tag"'
cache.trace.must_include :stale
cache.trace.must_include :valid
cache.trace.must_include :store
get '/',
'HTTP_IF_NONE_MATCH' => '"private tag"',
'HTTP_COOKIE' => 'authenticated'
assert app.called?
response.status.must_equal 304
response.headers['etag'].must_equal '"private tag"'
cache.trace.must_include :valid
cache.trace.wont_include :store
end
it 'stores responses when no-cache request directive present' do
respond_with 200, 'expires' => (Time.now + 5).httpdate
get '/', 'HTTP_CACHE_CONTROL' => 'no-cache'
assert response.ok?
cache.trace.must_include :store
response.headers.must_include 'age'
end
it 'reloads responses when cache hits but no-cache request directive present ' +
'when allow_reload is set true' do
count = 0
respond_with 200, 'cache-control' => 'max-age=10000' do |req,res|
count+= 1
res.body = (count == 1) ? ['Hello World'] : ['Goodbye World']
end
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :store
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :fresh
get '/',
'rack-cache.allow_reload' => true,
'HTTP_CACHE_CONTROL' => 'no-cache'
assert response.ok?
response.body.must_equal 'Goodbye World'
cache.trace.must_include :reload
cache.trace.must_include :store
end
it 'does not reload responses when allow_reload is set false (default)' do
count = 0
respond_with 200, 'cache-control' => 'max-age=10000' do |req,res|
count+= 1
res.body = (count == 1) ? ['Hello World'] : ['Goodbye World']
end
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :store
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :fresh
get '/',
'rack-cache.allow_reload' => false,
'HTTP_CACHE_CONTROL' => 'no-cache'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.wont_include :reload
# test again without explicitly setting the allow_reload option to false
get '/',
'HTTP_CACHE_CONTROL' => 'no-cache'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.wont_include :reload
end
it 'revalidates fresh cache entry when max-age request directive is exceeded ' +
'when allow_revalidate option is set true' do
count = 0
respond_with do |req,res|
count+= 1
res['cache-control'] = 'max-age=10000'
res['etag'] = count.to_s
res.body = (count == 1) ? ['Hello World'] : ['Goodbye World']
end
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :store
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :fresh
get '/',
'rack-cache.allow_revalidate' => true,
'HTTP_CACHE_CONTROL' => 'max-age=0'
assert response.ok?
response.body.must_equal 'Goodbye World'
cache.trace.must_include :stale
cache.trace.must_include :invalid
cache.trace.must_include :store
end
it 'returns a stale cache entry when max-age request directive is exceeded ' +
'when allow_revalidate and fault_tolerant options are set to true and ' +
'the remote server returns a connection error' do
count = 0
respond_with do |req, res|
count += 1
raise Timeout::Error, 'Connection failed' if count == 2
res['cache-control'] = 'max-age=10000'
res['etag'] = count.to_s
res.body = (count == 1) ? ['Hello World'] : ['Goodbye World']
end
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :store
get '/',
'rack-cache.allow_revalidate' => true,
'rack-cache.fault_tolerant' => true,
'HTTP_CACHE_CONTROL' => 'max-age=0'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :stale
cache.trace.must_include :connnection_failed
# Once the server comes back, the request should be revalidated.
get '/',
'rack-cache.allow_revalidate' => true,
'HTTP_CACHE_CONTROL' => 'max-age=0'
assert response.ok?
response.body.must_equal 'Goodbye World'
cache.trace.must_include :stale
cache.trace.must_include :invalid
cache.trace.must_include :store
end
it 'does not revalidate fresh cache entry when enable_revalidate option is set false (default)' do
count = 0
respond_with do |req,res|
count+= 1
res['cache-control'] = 'max-age=10000'
res['etag'] = count.to_s
res.body = (count == 1) ? ['Hello World'] : ['Goodbye World']
end
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :store
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :fresh
get '/',
'rack-cache.allow_revalidate' => false,
'HTTP_CACHE_CONTROL' => 'max-age=0'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.wont_include :stale
cache.trace.wont_include :invalid
cache.trace.must_include :fresh
# test again without explicitly setting the allow_revalidate option to false
get '/',
'HTTP_CACHE_CONTROL' => 'max-age=0'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.wont_include :stale
cache.trace.wont_include :invalid
cache.trace.must_include :fresh
end
it 'fetches response from backend when cache misses' do
respond_with 200, 'expires' => (Time.now + 5).httpdate
get '/'
assert response.ok?
cache.trace.must_include :miss
response.headers.must_include 'age'
end
[(201..202),(204..206),(303..305),(400..403),(405..409),(411..417),(500..505)].each do |range|
range.each do |response_code|
it "does not cache #{response_code} responses" do
respond_with response_code, 'expires' => (Time.now + 5).httpdate
get '/'
cache.trace.wont_include :store
response.status.must_equal response_code
response.headers.wont_include 'age'
end
end
end
it "does not cache responses with explicit no-store directive" do
respond_with 200,
'expires' => (Time.now + 5).httpdate,
'cache-control' => 'no-store'
get '/'
assert response.ok?
cache.trace.wont_include :store
response.headers.wont_include 'age'
end
it 'does not cache responses without freshness information or a validator' do
respond_with 200
get '/'
assert response.ok?
cache.trace.wont_include :store
end
it "caches responses with explicit no-cache directive" do
respond_with 200,
'expires' => (Time.now + 5).httpdate,
'cache-control' => 'no-cache'
get '/'
assert response.ok?
cache.trace.must_include :store
response.headers.must_include 'age'
end
it 'caches responses with an Expiration header' do
respond_with 200, 'expires' => (Time.now + 5).httpdate
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
response.headers.must_include 'date'
refute response['age'].nil?
refute response['x-content-digest'].nil?
cache.trace.must_include :miss
cache.trace.must_include :store
cache.metastore.to_hash.keys.length.must_equal 1
end
it 'caches responses with a max-age directive' do
respond_with 200, 'cache-control' => 'max-age=5'
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
response.headers.must_include 'date'
refute response['age'].nil?
refute response['x-content-digest'].nil?
cache.trace.must_include :miss
cache.trace.must_include :store
cache.metastore.to_hash.keys.length.must_equal 1
end
it 'caches responses with a s-maxage directive' do
respond_with 200, 'cache-control' => 's-maxage=5'
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
response.headers.must_include 'date'
refute response['age'].nil?
refute response['x-content-digest'].nil?
cache.trace.must_include :miss
cache.trace.must_include :store
cache.metastore.to_hash.keys.length.must_equal 1
end
it 'caches responses with a last-modified validator but no freshness information' do
respond_with 200, 'last-modified' => Time.now.httpdate
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :miss
cache.trace.must_include :store
end
it 'caches responses with an etag validator but no freshness information' do
respond_with 200, 'etag' => '"123456"'
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :miss
cache.trace.must_include :store
end
it 'hits cached response with expires header' do
respond_with 200,
'date' => (Time.now - 5).httpdate,
'expires' => (Time.now + 5).httpdate
get '/'
assert app.called?
assert response.ok?
response.headers.must_include 'date'
cache.trace.must_include :miss
cache.trace.must_include :store
response.body.must_equal 'Hello World'
get '/'
assert response.ok?
refute app.called?
response['date'].must_equal responses.first['date']
response['age'].to_i.must_be :>, 0
refute response['x-content-digest'].nil?
cache.trace.must_include :fresh
cache.trace.wont_include :store
response.body.must_equal 'Hello World'
end
it 'hits cached response with max-age directive' do
respond_with 200,
'date' => (Time.now - 5).httpdate,
'cache-control' => 'max-age=10'
get '/'
assert app.called?
assert response.ok?
response.headers.must_include 'date'
cache.trace.must_include :miss
cache.trace.must_include :store
response.body.must_equal 'Hello World'
get '/'
assert response.ok?
refute app.called?
response['date'].must_equal responses.first['date']
response['age'].to_i.must_be :>, 0
refute response['x-content-digest'].nil?
cache.trace.must_include :fresh
cache.trace.wont_include :store
response.body.must_equal 'Hello World'
end
it 'hits cached response with s-maxage directive' do
respond_with 200,
'date' => (Time.now - 5).httpdate,
'cache-control' => 's-maxage=10, max-age=0'
get '/'
assert app.called?
assert response.ok?
response.headers.must_include 'date'
cache.trace.must_include :miss
cache.trace.must_include :store
response.body.must_equal 'Hello World'
get '/'
assert response.ok?
refute app.called?
response['date'].must_equal responses.first['date']
response['age'].to_i.must_be :>, 0
refute response['x-content-digest'].nil?
cache.trace.must_include :fresh
cache.trace.wont_include :store
response.body.must_equal 'Hello World'
end
it 'assigns default_ttl when response has no freshness information' do
respond_with 200
get '/', 'rack-cache.default_ttl' => 10
assert app.called?
assert response.ok?
cache.trace.must_include :miss
cache.trace.must_include :store
response.body.must_equal 'Hello World'
response['cache-control'].must_include 's-maxage=10'
get '/', 'rack-cache.default_ttl' => 10
assert response.ok?
refute app.called?
cache.trace.must_include :fresh
cache.trace.wont_include :store
response.body.must_equal 'Hello World'
end
it 'does not assign default_ttl when response has must-revalidate directive' do
respond_with 200,
'cache-control' => 'must-revalidate'
get '/', 'rack-cache.default_ttl' => 10
assert app.called?
assert response.ok?
cache.trace.must_include :miss
cache.trace.wont_include :store
response['cache-control'].wont_include 's-maxage'
response.body.must_equal 'Hello World'
end
it 'fetches full response when cache stale and no validators present' do
respond_with 200, 'expires' => (Time.now + 5).httpdate
# build initial request
get '/'
assert app.called?
assert response.ok?
response.headers.must_include 'date'
response.headers.must_include 'x-content-digest'
response.headers.must_include 'age'
cache.trace.must_include :miss
cache.trace.must_include :store
response.body.must_equal 'Hello World'
# go in and play around with the cached metadata directly ...
# XXX find some other way to do this
hash = cache.metastore.to_hash
hash.values.length.must_equal 1
entries = Marshal.load(hash.values.first)
entries.length.must_equal 1
req, res = entries.first
res['expires'] = (Time.now - 1).httpdate
hash[hash.keys.first] = Marshal.dump([[req, res]])
# build subsequent request; should be found but miss due to freshness
get '/'
assert app.called?
assert response.ok?
response['age'].to_i.must_equal 0
response.headers.must_include 'x-content-digest'
cache.trace.must_include :stale
cache.trace.wont_include :fresh
cache.trace.wont_include :miss
cache.trace.must_include :store
response.body.must_equal 'Hello World'
end
it 'validates cached responses with last-modified and no freshness information' do
timestamp = Time.now.httpdate
respond_with do |req,res|
res['last-modified'] = timestamp
if req.env['HTTP_IF_MODIFIED_SINCE'] == timestamp
res.status = 304
res.body = []
end
end
# build initial request
get '/'
assert app.called?
assert response.ok?
response.headers.must_include 'last-modified'
response.headers.must_include 'x-content-digest'
response.body.must_equal 'Hello World'
cache.trace.must_include :miss
cache.trace.must_include :store
cache.trace.wont_include :stale
# build subsequent request; should be found but miss due to freshness
get '/'
assert app.called?
assert response.ok?
response.headers.must_include 'last-modified'
response.headers.must_include 'x-content-digest'
response['age'].to_i.must_equal 0
response.body.must_equal 'Hello World'
cache.trace.must_include :stale
cache.trace.must_include :valid
cache.trace.must_include :store
cache.trace.wont_include :miss
end
it 'validates cached responses with etag and no freshness information' do
timestamp = Time.now.httpdate
respond_with do |req,res|
res['ETAG'] = '"12345"'
if req.env['HTTP_IF_NONE_MATCH'] == res['etag']
res.status = 304
res.body = []
end
end
# build initial request
get '/'
assert app.called?
assert response.ok?
response.headers.must_include 'etag'
response.headers.must_include 'x-content-digest'
response.body.must_equal 'Hello World'
cache.trace.must_include :miss
cache.trace.must_include :store
# build subsequent request; should be found but miss due to freshness
get '/'
assert app.called?
assert response.ok?
response.headers.must_include 'etag'
response.headers.must_include 'x-content-digest'
response['age'].to_i.must_equal 0
response.body.must_equal 'Hello World'
cache.trace.must_include :stale
cache.trace.must_include :valid
cache.trace.must_include :store
cache.trace.wont_include :miss
end
it 'replaces cached responses when validation results in non-304 response' do
timestamp = Time.now.httpdate
count = 0
respond_with do |req,res|
res['last-modified'] = timestamp
case (count+=1)
when 1 ; res.body = ['first response']
when 2 ; res.body = ['second response']
when 3
res.body = []
res.status = 304
end
end
# first request should fetch from backend and store in cache
get '/'
response.status.must_equal 200
response.body.must_equal 'first response'
# second request is validated, is invalid, and replaces cached entry
get '/'
response.status.must_equal 200
response.body.must_equal 'second response'
# third respone is validated, valid, and returns cached entry
get '/'
response.status.must_equal 200
response.body.must_equal 'second response'
count.must_equal 3
end
it 'stores HEAD as original_method on HEAD requests' do
respond_with do |req,res|
res.status = 200
res.body = []
req.request_method.must_equal 'GET'
req.env['rack.methodoverride.original_method'].must_equal 'HEAD'
end
head '/'
assert app.called?
response.body.must_equal ''
end
it 'passes HEAD requests through directly on pass' do
respond_with do |req,res|
res.status = 200
res.body = []
req.request_method.must_equal 'HEAD'
end
head '/', 'HTTP_EXPECT' => 'something ...'
assert app.called?
response.body.must_equal ''
end
it 'uses cache to respond to HEAD requests when fresh' do
respond_with do |req,res|
res['cache-control'] = 'max-age=10'
res.body = ['Hello World']
req.request_method.wont_equal 'HEAD'
end
get '/'
assert app.called?
response.status.must_equal 200
response.body.must_equal 'Hello World'
head '/'
refute app.called?
response.status.must_equal 200
response.body.must_equal ''
response['content-length'].must_equal 'Hello World'.length.to_s
end
it 'invalidates cached responses on POST' do
respond_with do |req,res|
if req.request_method == 'GET'
res.status = 200
res['cache-control'] = 'public, max-age=500'
res.body = ['Hello World']
elsif req.request_method == 'POST'
res.status = 303
res['Location'] = '/'
res.headers.delete('cache-control')
res.body = []
end
end
# build initial request to enter into the cache
get '/'
assert app.called?
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :miss
cache.trace.must_include :store
# make sure it is valid
get '/'
refute app.called?
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :fresh
# now POST to same URL
post '/'
assert app.called?
assert response.redirect?
response['Location'].must_equal '/'
cache.trace.must_include :invalidate
cache.trace.must_include :pass
response.body.must_equal ''
# now make sure it was actually invalidated
get '/'
assert app.called?
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :stale
cache.trace.must_include :invalid
cache.trace.must_include :store
end
describe 'with responses that include a Vary header' do
before do
count = 0
respond_with 200 do |req,res|
res['vary'] = 'accept user-agent Foo'
res['cache-control'] = 'max-age=10'
res['x-response-count'] = (count+=1).to_s
res.body = [req.env['HTTP_USER_AGENT']]
end
end
it 'serves from cache when headers match' do
get '/',
'HTTP_ACCEPT' => 'text/html',
'HTTP_USER_AGENT' => 'Bob/1.0'
assert response.ok?
response.body.must_equal 'Bob/1.0'
cache.trace.must_include :miss
cache.trace.must_include :store
get '/',
'HTTP_ACCEPT' => 'text/html',
'HTTP_USER_AGENT' => 'Bob/1.0'
assert response.ok?
response.body.must_equal 'Bob/1.0'
cache.trace.must_include :fresh
cache.trace.wont_include :store
response.headers.must_include 'x-content-digest'
end
it 'stores multiple responses when headers differ' do
get '/',
'HTTP_ACCEPT' => 'text/html',
'HTTP_USER_AGENT' => 'Bob/1.0'
assert response.ok?
response.body.must_equal 'Bob/1.0'
response['x-response-count'].must_equal '1'
get '/',
'HTTP_ACCEPT' => 'text/html',
'HTTP_USER_AGENT' => 'Bob/2.0'
cache.trace.must_include :miss
cache.trace.must_include :store
response.body.must_equal 'Bob/2.0'
response['x-response-count'].must_equal '2'
get '/',
'HTTP_ACCEPT' => 'text/html',
'HTTP_USER_AGENT' => 'Bob/1.0'
cache.trace.must_include :fresh
response.body.must_equal 'Bob/1.0'
response['x-response-count'].must_equal '1'
get '/',
'HTTP_ACCEPT' => 'text/html',
'HTTP_USER_AGENT' => 'Bob/2.0'
cache.trace.must_include :fresh
response.body.must_equal 'Bob/2.0'
response['x-response-count'].must_equal '2'
get '/',
'HTTP_USER_AGENT' => 'Bob/2.0'
cache.trace.must_include :miss
response.body.must_equal 'Bob/2.0'
response['x-response-count'].must_equal '3'
end
end
it 'passes if there was a metastore exception' do
respond_with 200, 'cache-control' => 'max-age=10000' do |req,res|
res.body = ['Hello World']
end
get '/'
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :store
get '/' do |cache|
cache.expects(:metastore).raises Timeout::Error
end
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :pass
post '/' do |cache|
cache.expects(:metastore).raises Timeout::Error
end
assert response.ok?
response.body.must_equal 'Hello World'
cache.trace.must_include :pass
end
it 'does not cache when cache-control response header changed to private (reset @cache_control on dup)' do
count = 0
respond_with do |req,res|
count+= 1
res['cache-control'] = (count == 1) ? 'public' : 'private, no-store'
res['etag'] = count.to_s
res.status = (count == 1) ? 200 : 304
end
get '/'
assert app.called?
assert response.ok?
cache.trace.must_include :miss
cache.trace.must_include :store
get '/'
assert app.called?
assert response.ok?
cache.trace.must_include :stale
cache.trace.must_include :valid
cache.trace.wont_include :store
end
it 'logs to rack.logger if available' do
logger = Class.new do
attr_reader :logged_level
def info(message)
@logged_level = "info"
end
end.new
respond_with 200
get '/', 'rack.logger' => logger
assert response.ok?
logger.logged_level.must_equal "info"
end
end
| 29.64182 | 127 | 0.664729 |
1ce76490448c68594e6ebd088e07f3faeaa03ac1 | 1,376 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ContainerExpirationPolicy, type: :model do
describe 'relationships' do
it { is_expected.to belong_to(:project) }
end
describe 'validations' do
it { is_expected.to validate_presence_of(:project) }
describe '#enabled' do
it { is_expected.to allow_value(true).for(:enabled) }
it { is_expected.to allow_value(false).for(:enabled) }
it { is_expected.not_to allow_value(nil).for(:enabled) }
end
describe '#cadence' do
it { is_expected.to validate_presence_of(:cadence) }
it { is_expected.to allow_value('1d').for(:cadence) }
it { is_expected.to allow_value('1month').for(:cadence) }
it { is_expected.not_to allow_value('123asdf').for(:cadence) }
it { is_expected.not_to allow_value(nil).for(:cadence) }
end
describe '#older_than' do
it { is_expected.to allow_value('7d').for(:older_than) }
it { is_expected.to allow_value('14d').for(:older_than) }
it { is_expected.to allow_value(nil).for(:older_than) }
it { is_expected.not_to allow_value('123asdf').for(:older_than) }
end
describe '#keep_n' do
it { is_expected.to allow_value(10).for(:keep_n) }
it { is_expected.to allow_value(nil).for(:keep_n) }
it { is_expected.not_to allow_value('foo').for(:keep_n) }
end
end
end
| 32.761905 | 71 | 0.674419 |
01b8d397590d6355c54492eb1d86a834a9575970 | 2,222 | # frozen_string_literal: true
require "rails_helper"
describe Auth::EveOnlineSso::CallbacksController do
it { should be_a(ApplicationController) }
it { should rescue_from(EveOnline::Exceptions::ServiceUnavailable).with(:handle_service_unavailable) }
it { should rescue_from(EveOnline::Exceptions::InternalServerError).with(:handle_internal_server_error) }
it { should rescue_from(EveOnline::Exceptions::BadGateway).with(:handle_bad_gateway) }
it { should rescue_from(EveOnline::Exceptions::Timeout).with(:handle_timeout) }
describe "#show" do
let(:user) { create(:user, locale: "english") }
before { sign_in(user) }
before { expect(subject).to receive(:current_user).and_return(user).exactly(4).times }
let(:service) { instance_double(EveOnlineCallbackService) }
before do
#
# EveOnlineCallbackService.new(current_user, request) # => service
#
expect(EveOnlineCallbackService).to receive(:new).with(any_args).and_return(service) # TODO: replace `any_args` with real data
end
before { expect(service).to receive(:save!) }
before { get :show }
it { should respond_with(:found) }
it { should redirect_to("/characters") }
end
# private methods
describe "#handle_service_unavailable" do
before { expect(subject).to receive(:render).with(inline: "Service Unavailable (503). Please, try again later.") }
specify { expect { subject.send(:handle_service_unavailable) }.not_to raise_error }
end
describe "#handle_internal_server_error" do
before { expect(subject).to receive(:render).with(inline: "Internal Server Error (500). Please, try again later.") }
specify { expect { subject.send(:handle_internal_server_error) }.not_to raise_error }
end
describe "#handle_bad_gateway" do
before { expect(subject).to receive(:render).with(inline: "Bad Gateway (502). Please, try again later.") }
specify { expect { subject.send(:handle_bad_gateway) }.not_to raise_error }
end
describe "#handle_timeout" do
before { expect(subject).to receive(:render).with(inline: "Timeout Error. Please, try again later.") }
specify { expect { subject.send(:handle_timeout) }.not_to raise_error }
end
end
| 33.164179 | 132 | 0.720072 |
115ab946224fadcf4f314fe8910443fe5d91e1f2 | 27,488 | require 'spec_helper'
class FakeCalculator < Spree::Calculator
def compute(computable)
5
end
end
describe Spree::Order, :type => :model do
let(:user) { stub_model(Spree::LegacyUser, :email => "[email protected]") }
let(:order) { stub_model(Spree::Order, :user => user) }
before do
allow(Spree::LegacyUser).to receive_messages(:current => mock_model(Spree::LegacyUser, :id => 123))
end
context "#canceled_by" do
let(:admin_user) { create :admin_user }
let(:order) { create :order }
before do
allow(order).to receive(:cancel!)
end
subject { order.canceled_by(admin_user) }
it 'should cancel the order' do
expect(order).to receive(:cancel!)
subject
end
it 'should save canceler_id' do
subject
expect(order.reload.canceler_id).to eq(admin_user.id)
end
it 'should save canceled_at' do
subject
expect(order.reload.canceled_at).to_not be_nil
end
it 'should have canceler' do
subject
expect(order.reload.canceler).to eq(admin_user)
end
end
context "#create" do
let(:order) { Spree::Order.create }
it "should assign an order number" do
expect(order.number).not_to be_nil
end
it 'should create a randomized 22 character token' do
expect(order.guest_token.size).to eq(22)
end
end
context "creates shipments cost" do
let(:shipment) { double }
before { allow(order).to receive_messages shipments: [shipment] }
it "update and persist totals" do
expect(shipment).to receive :update_amounts
expect(order.updater).to receive :update_shipment_total
expect(order.updater).to receive :persist_totals
order.set_shipments_cost
end
end
context "#finalize!" do
let(:order) { Spree::Order.create(email: '[email protected]') }
before do
order.update_column :state, 'complete'
end
it "should set completed_at" do
expect(order).to receive(:touch).with(:completed_at)
order.finalize!
end
it "should sell inventory units" do
order.shipments.each do |shipment|
expect(shipment).to receive(:update!)
expect(shipment).to receive(:finalize!)
end
order.finalize!
end
it "should decrease the stock for each variant in the shipment" do
order.shipments.each do |shipment|
expect(shipment.stock_location).to receive(:decrease_stock_for_variant)
end
order.finalize!
end
it "should change the shipment state to ready if order is paid" do
Spree::Shipment.create(order: order)
order.shipments.reload
allow(order).to receive_messages(:paid? => true, :complete? => true)
order.finalize!
order.reload # reload so we're sure the changes are persisted
expect(order.shipment_state).to eq('ready')
end
after { Spree::Config.set :track_inventory_levels => true }
it "should not sell inventory units if track_inventory_levels is false" do
Spree::Config.set :track_inventory_levels => false
expect(Spree::InventoryUnit).not_to receive(:sell_units)
order.finalize!
end
it "should send an order confirmation email" do
mail_message = double "Mail::Message"
expect(Spree::OrderMailer).to receive(:confirm_email).with(order.id).and_return mail_message
expect(mail_message).to receive :deliver
order.finalize!
end
it "sets confirmation delivered when finalizing" do
expect(order.confirmation_delivered?).to be false
order.finalize!
expect(order.confirmation_delivered?).to be true
end
it "should not send duplicate confirmation emails" do
allow(order).to receive_messages(:confirmation_delivered? => true)
expect(Spree::OrderMailer).not_to receive(:confirm_email)
order.finalize!
end
it "should freeze all adjustments" do
# Stub this method as it's called due to a callback
# and it's irrelevant to this test
allow(order).to receive :has_available_shipment
allow(Spree::OrderMailer).to receive_message_chain :confirm_email, :deliver
adjustments = [double]
expect(order).to receive(:all_adjustments).and_return(adjustments)
adjustments.each do |adj|
expect(adj).to receive(:close)
end
order.finalize!
end
context "order is considered risky" do
before do
allow(order).to receive_messages :is_risky? => true
end
it "should change state to risky" do
expect(order).to receive(:considered_risky!)
order.finalize!
end
context "and order is approved" do
before do
allow(order).to receive_messages :approved? => true
end
it "should leave order in complete state" do
order.finalize!
expect(order.state).to eq 'complete'
end
end
end
end
context "insufficient_stock_lines" do
let(:line_item) { mock_model Spree::LineItem, :insufficient_stock? => true }
before { allow(order).to receive_messages(:line_items => [line_item]) }
it "should return line_item that has insufficient stock on hand" do
expect(order.insufficient_stock_lines.size).to eq(1)
expect(order.insufficient_stock_lines.include?(line_item)).to be true
end
end
describe '#ensure_line_items_are_in_stock' do
subject { order.ensure_line_items_are_in_stock }
let(:line_item) { mock_model Spree::LineItem, :insufficient_stock? => true }
before do
allow(order).to receive(:restart_checkout_flow)
allow(order).to receive_messages(:line_items => [line_item])
end
it 'should restart checkout flow' do
expect(order).to receive(:restart_checkout_flow).once
subject
end
it 'should have error message' do
subject
expect(order.errors[:base]).to include(Spree.t(:insufficient_stock_lines_present))
end
it 'should be false' do
expect(subject).to be_falsey
end
end
context "empty!" do
let(:order) { stub_model(Spree::Order, item_count: 2) }
before do
allow(order).to receive_messages(:line_items => line_items = [1, 2])
allow(order).to receive_messages(:adjustments => adjustments = [])
end
it "clears out line items, adjustments and update totals" do
expect(order.line_items).to receive(:destroy_all)
expect(order.adjustments).to receive(:destroy_all)
expect(order.shipments).to receive(:destroy_all)
expect(order.updater).to receive(:update_totals)
expect(order.updater).to receive(:persist_totals)
order.empty!
expect(order.item_total).to eq 0
end
end
context "#display_outstanding_balance" do
it "returns the value as a spree money" do
allow(order).to receive(:outstanding_balance) { 10.55 }
expect(order.display_outstanding_balance).to eq(Spree::Money.new(10.55))
end
end
context "#display_item_total" do
it "returns the value as a spree money" do
allow(order).to receive(:item_total) { 10.55 }
expect(order.display_item_total).to eq(Spree::Money.new(10.55))
end
end
context "#display_adjustment_total" do
it "returns the value as a spree money" do
order.adjustment_total = 10.55
expect(order.display_adjustment_total).to eq(Spree::Money.new(10.55))
end
end
context "#display_total" do
it "returns the value as a spree money" do
order.total = 10.55
expect(order.display_total).to eq(Spree::Money.new(10.55))
end
end
context "#currency" do
context "when object currency is ABC" do
before { order.currency = "ABC" }
it "returns the currency from the object" do
expect(order.currency).to eq("ABC")
end
end
context "when object currency is nil" do
before { order.currency = nil }
it "returns the globally configured currency" do
expect(order.currency).to eq("USD")
end
end
end
# Regression tests for #2179
context "#merge!" do
let(:variant) { create(:variant) }
let(:order_1) { Spree::Order.create }
let(:order_2) { Spree::Order.create }
it "destroys the other order" do
order_1.merge!(order_2)
expect { order_2.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
context "user is provided" do
it "assigns user to new order" do
order_1.merge!(order_2, user)
expect(order_1.user).to eq user
end
end
context "merging together two orders with line items for the same variant" do
before do
order_1.contents.add(variant, 1)
order_2.contents.add(variant, 1)
end
specify do
order_1.merge!(order_2)
expect(order_1.line_items.count).to eq(1)
line_item = order_1.line_items.first
expect(line_item.quantity).to eq(2)
expect(line_item.variant_id).to eq(variant.id)
end
end
context "merging using extension-specific line_item_comparison_hooks" do
before do
Spree::Order.register_line_item_comparison_hook(:foos_match)
allow(Spree::Variant).to receive(:price_modifier_amount).and_return(0.00)
end
after do
# reset to avoid test pollution
Spree::Order.line_item_comparison_hooks = Set.new
end
context "2 equal line items" do
before do
@line_item_1 = order_1.contents.add(variant, 1, {foos: {}})
@line_item_2 = order_2.contents.add(variant, 1, {foos: {}})
end
specify do
expect(order_1).to receive(:foos_match).with(@line_item_1, kind_of(Hash)).and_return(true)
order_1.merge!(order_2)
expect(order_1.line_items.count).to eq(1)
line_item = order_1.line_items.first
expect(line_item.quantity).to eq(2)
expect(line_item.variant_id).to eq(variant.id)
end
end
context "2 different line items" do
before do
allow(order_1).to receive(:foos_match).and_return(false)
order_1.contents.add(variant, 1, {foos: {}})
order_2.contents.add(variant, 1, {foos: {bar: :zoo}})
end
specify do
order_1.merge!(order_2)
expect(order_1.line_items.count).to eq(2)
line_item = order_1.line_items.first
expect(line_item.quantity).to eq(1)
expect(line_item.variant_id).to eq(variant.id)
line_item = order_1.line_items.last
expect(line_item.quantity).to eq(1)
expect(line_item.variant_id).to eq(variant.id)
end
end
end
context "merging together two orders with different line items" do
let(:variant_2) { create(:variant) }
before do
order_1.contents.add(variant, 1)
order_2.contents.add(variant_2, 1)
end
specify do
order_1.merge!(order_2)
line_items = order_1.line_items.reload
expect(line_items.count).to eq(2)
expect(order_1.item_count).to eq 2
expect(order_1.item_total).to eq line_items.map(&:amount).sum
# No guarantee on ordering of line items, so we do this:
expect(line_items.pluck(:quantity)).to match_array([1, 1])
expect(line_items.pluck(:variant_id)).to match_array([variant.id, variant_2.id])
end
end
end
context "#confirmation_required?" do
# Regression test for #4117
it "is required if the state is currently 'confirm'" do
order = Spree::Order.new
assert !order.confirmation_required?
order.state = 'confirm'
assert order.confirmation_required?
end
context 'Spree::Config[:always_include_confirm_step] == true' do
before do
Spree::Config[:always_include_confirm_step] = true
end
it "returns true if payments empty" do
order = Spree::Order.new
assert order.confirmation_required?
end
end
context 'Spree::Config[:always_include_confirm_step] == false' do
it "returns false if payments empty and Spree::Config[:always_include_confirm_step] == false" do
order = Spree::Order.new
assert !order.confirmation_required?
end
it "does not bomb out when an order has an unpersisted payment" do
order = Spree::Order.new
order.payments.build
assert !order.confirmation_required?
end
end
end
context "add_update_hook" do
before do
Spree::Order.class_eval do
register_update_hook :add_awesome_sauce
end
end
after do
Spree::Order.update_hooks = Set.new
end
it "calls hook during update" do
order = create(:order)
expect(order).to receive(:add_awesome_sauce)
order.update!
end
it "calls hook during finalize" do
order = create(:order)
expect(order).to receive(:add_awesome_sauce)
order.finalize!
end
end
describe "#tax_address" do
before { Spree::Config[:tax_using_ship_address] = tax_using_ship_address }
subject { order.tax_address }
context "when tax_using_ship_address is true" do
let(:tax_using_ship_address) { true }
it 'returns ship_address' do
expect(subject).to eq(order.ship_address)
end
end
context "when tax_using_ship_address is not true" do
let(:tax_using_ship_address) { false }
it "returns bill_address" do
expect(subject).to eq(order.bill_address)
end
end
end
describe "#restart_checkout_flow" do
it "updates the state column to the first checkout_steps value" do
order = create(:order_with_totals, state: "delivery")
expect(order.checkout_steps).to eql ["address", "delivery", "complete"]
expect{ order.restart_checkout_flow }.to change{order.state}.from("delivery").to("address")
end
context "without line items" do
it "updates the state column to cart" do
order = create(:order, state: "delivery")
expect{ order.restart_checkout_flow }.to change{order.state}.from("delivery").to("cart")
end
end
end
# Regression tests for #4072
context "#state_changed" do
let(:order) { FactoryGirl.create(:order) }
it "logs state changes" do
order.update_column(:payment_state, 'balance_due')
order.payment_state = 'paid'
expect(order.state_changes).to be_empty
order.state_changed('payment')
state_change = order.state_changes.find_by(:name => 'payment')
expect(state_change.previous_state).to eq('balance_due')
expect(state_change.next_state).to eq('paid')
end
it "does not do anything if state does not change" do
order.update_column(:payment_state, 'balance_due')
expect(order.state_changes).to be_empty
order.state_changed('payment')
expect(order.state_changes).to be_empty
end
end
# Regression test for #4199
context "#available_payment_methods" do
it "includes frontend payment methods" do
payment_method = Spree::PaymentMethod.create!({
:name => "Fake",
:active => true,
:display_on => "front_end",
:environment => Rails.env
})
expect(order.available_payment_methods).to include(payment_method)
end
it "includes 'both' payment methods" do
payment_method = Spree::PaymentMethod.create!({
:name => "Fake",
:active => true,
:display_on => "both",
:environment => Rails.env
})
expect(order.available_payment_methods).to include(payment_method)
end
it "does not include a payment method twice if display_on is blank" do
payment_method = Spree::PaymentMethod.create!({
:name => "Fake",
:active => true,
:display_on => "both",
:environment => Rails.env
})
expect(order.available_payment_methods.count).to eq(1)
expect(order.available_payment_methods).to include(payment_method)
end
end
context "#apply_free_shipping_promotions" do
it "calls out to the FreeShipping promotion handler" do
shipment = double('Shipment')
allow(order).to receive_messages :shipments => [shipment]
expect(Spree::PromotionHandler::FreeShipping).to receive(:new).and_return(handler = double)
expect(handler).to receive(:activate)
expect(Spree::ItemAdjustments).to receive(:new).with(shipment).and_return(adjuster = double)
expect(adjuster).to receive(:update)
expect(order.updater).to receive(:update_shipment_total)
expect(order.updater).to receive(:persist_totals)
order.apply_free_shipping_promotions
end
end
context "#products" do
before :each do
@variant1 = mock_model(Spree::Variant, :product => "product1")
@variant2 = mock_model(Spree::Variant, :product => "product2")
@line_items = [mock_model(Spree::LineItem, :product => "product1", :variant => @variant1, :variant_id => @variant1.id, :quantity => 1),
mock_model(Spree::LineItem, :product => "product2", :variant => @variant2, :variant_id => @variant2.id, :quantity => 2)]
allow(order).to receive_messages(:line_items => @line_items)
end
it "contains?" do
expect(order.contains?(@variant1)).to be true
end
it "gets the quantity of a given variant" do
expect(order.quantity_of(@variant1)).to eq(1)
@variant3 = mock_model(Spree::Variant, :product => "product3")
expect(order.quantity_of(@variant3)).to eq(0)
end
it "can find a line item matching a given variant" do
expect(order.find_line_item_by_variant(@variant1)).not_to be_nil
expect(order.find_line_item_by_variant(mock_model(Spree::Variant))).to be_nil
end
context "match line item with options" do
before do
Spree::Order.register_line_item_comparison_hook(:foos_match)
end
after do
# reset to avoid test pollution
Spree::Order.line_item_comparison_hooks = Set.new
end
it "matches line item when options match" do
allow(order).to receive(:foos_match).and_return(true)
expect(order.line_item_options_match(@line_items.first, {foos: {bar: :zoo}})).to be true
end
it "does not match line item without options" do
allow(order).to receive(:foos_match).and_return(false)
expect(order.line_item_options_match(@line_items.first, {})).to be false
end
end
end
context "#generate_order_number" do
context "when no configure" do
let(:default_length) { Spree::Order::ORDER_NUMBER_LENGTH + Spree::Order::ORDER_NUMBER_PREFIX.length }
subject(:order_number) { order.generate_order_number }
describe '#class' do
subject { super().class }
it { is_expected.to eq String }
end
describe '#length' do
subject { super().length }
it { is_expected.to eq default_length }
end
it { is_expected.to match /^#{Spree::Order::ORDER_NUMBER_PREFIX}/ }
end
context "when length option is 5" do
let(:option_length) { 5 + Spree::Order::ORDER_NUMBER_PREFIX.length }
it "should be option length for order number" do
expect(order.generate_order_number(length: 5).length).to eq option_length
end
end
context "when letters option is true" do
it "generates order number include letter" do
expect(order.generate_order_number(length: 100, letters: true)).to match /[A-Z]/
end
end
context "when prefix option is 'P'" do
it "generates order number and it prefix is 'P'" do
expect(order.generate_order_number(prefix: 'P')).to match /^P/
end
end
end
context "#associate_user!" do
let!(:user) { FactoryGirl.create(:user) }
it "should associate a user with a persisted order" do
order = FactoryGirl.create(:order_with_line_items, created_by: nil)
order.user = nil
order.email = nil
order.associate_user!(user)
expect(order.user).to eq(user)
expect(order.email).to eq(user.email)
expect(order.created_by).to eq(user)
# verify that the changes we made were persisted
order.reload
expect(order.user).to eq(user)
expect(order.email).to eq(user.email)
expect(order.created_by).to eq(user)
end
it "should not overwrite the created_by if it already is set" do
creator = create(:user)
order = FactoryGirl.create(:order_with_line_items, created_by: creator)
order.user = nil
order.email = nil
order.associate_user!(user)
expect(order.user).to eq(user)
expect(order.email).to eq(user.email)
expect(order.created_by).to eq(creator)
# verify that the changes we made were persisted
order.reload
expect(order.user).to eq(user)
expect(order.email).to eq(user.email)
expect(order.created_by).to eq(creator)
end
it "should associate a user with a non-persisted order" do
order = Spree::Order.new
expect do
order.associate_user!(user)
end.to change { [order.user, order.email] }.from([nil, nil]).to([user, user.email])
end
it "should not persist an invalid address" do
address = Spree::Address.new
order.user = nil
order.email = nil
order.ship_address = address
expect do
order.associate_user!(user)
end.not_to change { address.persisted? }.from(false)
end
end
context "#can_ship?" do
let(:order) { Spree::Order.create }
it "should be true for order in the 'complete' state" do
allow(order).to receive_messages(:complete? => true)
expect(order.can_ship?).to be true
end
it "should be true for order in the 'resumed' state" do
allow(order).to receive_messages(:resumed? => true)
expect(order.can_ship?).to be true
end
it "should be true for an order in the 'awaiting return' state" do
allow(order).to receive_messages(:awaiting_return? => true)
expect(order.can_ship?).to be true
end
it "should be true for an order in the 'returned' state" do
allow(order).to receive_messages(:returned? => true)
expect(order.can_ship?).to be true
end
it "should be false if the order is neither in the 'complete' nor 'resumed' state" do
allow(order).to receive_messages(:resumed? => false, :complete? => false)
expect(order.can_ship?).to be false
end
end
context "#completed?" do
it "should indicate if order is completed" do
order.completed_at = nil
expect(order.completed?).to be false
order.completed_at = Time.now
expect(order.completed?).to be true
end
end
context "#allow_checkout?" do
it "should be true if there are line_items in the order" do
allow(order).to receive_message_chain(:line_items, :count => 1)
expect(order.checkout_allowed?).to be true
end
it "should be false if there are no line_items in the order" do
allow(order).to receive_message_chain(:line_items, :count => 0)
expect(order.checkout_allowed?).to be false
end
end
context "#amount" do
before do
@order = create(:order, :user => user)
@order.line_items = [create(:line_item, :price => 1.0, :quantity => 2),
create(:line_item, :price => 1.0, :quantity => 1)]
end
it "should return the correct lum sum of items" do
expect(@order.amount).to eq(3.0)
end
end
context "#backordered?" do
it 'is backordered if one of the shipments is backordered' do
allow(order).to receive_messages(:shipments => [mock_model(Spree::Shipment, :backordered? => false),
mock_model(Spree::Shipment, :backordered? => true)])
expect(order).to be_backordered
end
end
context "#can_cancel?" do
it "should be false for completed order in the canceled state" do
order.state = 'canceled'
order.shipment_state = 'ready'
order.completed_at = Time.now
expect(order.can_cancel?).to be false
end
it "should be true for completed order with no shipment" do
order.state = 'complete'
order.shipment_state = nil
order.completed_at = Time.now
expect(order.can_cancel?).to be true
end
end
context "#tax_total" do
it "adds included tax and additional tax" do
allow(order).to receive_messages(:additional_tax_total => 10, :included_tax_total => 20)
expect(order.tax_total).to eq 30
end
end
# Regression test for #4923
context "locking" do
let(:order) { Spree::Order.create } # need a persisted in order to test locking
it 'can lock' do
expect { order.with_lock {} }.to_not raise_error
end
end
describe "#pre_tax_item_amount" do
it "sums all of the line items' pre tax amounts" do
subject.line_items = [
Spree::LineItem.new(price: 10, quantity: 2, pre_tax_amount: 5.0),
Spree::LineItem.new(price: 30, quantity: 1, pre_tax_amount: 14.0),
]
expect(subject.pre_tax_item_amount).to eq 19.0
end
end
describe '#quantity' do
# Uses a persisted record, as the quantity is retrieved via a DB count
let(:order) { create :order_with_line_items, line_items_count: 3 }
it 'sums the quantity of all line items' do
expect(order.quantity).to eq 3
end
end
describe '#has_non_reimbursement_related_refunds?' do
subject do
order.has_non_reimbursement_related_refunds?
end
context 'no refunds exist' do
it { is_expected.to eq false }
end
context 'a non-reimbursement related refund exists' do
let(:order) { refund.payment.order }
let(:refund) { create(:refund, reimbursement_id: nil, amount: 5) }
it { is_expected.to eq true }
end
context 'an old-style refund exists' do
let(:order) { create(:order_ready_to_ship) }
let(:payment) { order.payments.first.tap { |p| allow(p).to receive_messages(profiles_supported: false) } }
let!(:refund_payment) {
build(:payment, amount: -1, order: order, state: 'completed', source: payment).tap do |p|
allow(p).to receive_messages(profiles_supported?: false)
p.save!
end
}
it { is_expected.to eq true }
end
context 'a reimbursement related refund exists' do
let(:order) { refund.payment.order }
let(:refund) { create(:refund, reimbursement_id: 123, amount: 5)}
it { is_expected.to eq false }
end
end
describe "#create_proposed_shipments" do
it "assigns the coordinator returned shipments to its shipments" do
shipment = build(:shipment)
allow_any_instance_of(Spree::Stock::Coordinator).to receive(:shipments).and_return([shipment])
subject.create_proposed_shipments
expect(subject.shipments).to eq [shipment]
end
end
describe "#all_inventory_units_returned?" do
let(:order) { create(:order_with_line_items, line_items_count: 3) }
subject { order.all_inventory_units_returned? }
context "all inventory units are returned" do
before { order.inventory_units.update_all(state: 'returned') }
it "is true" do
expect(subject).to eq true
end
end
context "some inventory units are returned" do
before do
order.inventory_units.first.update_attribute(:state, 'returned')
end
it "is false" do
expect(subject).to eq false
end
end
context "no inventory units are returned" do
it "is false" do
expect(subject).to eq false
end
end
end
end
| 30.781635 | 141 | 0.660506 |
0333e0664eeb7569b91717ec16831a93e82c57b3 | 4,335 | #-- encoding: UTF-8
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
require File.expand_path('../../support/shared/become_member', __FILE__)
require 'support/shared/acts_as_watchable'
describe News, type: :model do
include BecomeMember
let(:project) {
project = FactoryBot.create(:public_project)
project.enabled_modules << EnabledModule.new(name: 'news')
project.reload
}
let!(:news) { FactoryBot.create(:news, project: project) }
let(:permissions) { [] }
let(:role) { FactoryBot.build(:role, permissions: permissions) }
it_behaves_like 'acts_as_watchable included' do
let(:model_instance) { FactoryBot.create(:news) }
let(:watch_permission) { :view_news }
let(:project) { model_instance.project }
end
describe '.latest' do
before do
Role.anonymous
end
it 'includes news elements from projects where news module is enabled' do
expect(News.latest).to match_array [news]
end
it "doesn't include news elements from projects where news module is not enabled" do
EnabledModule.where(project_id: project.id, name: 'news').delete_all
expect(News.latest).to be_empty
end
it 'only includes news elements from projects that are visible to the user' do
private_project = FactoryBot.create(:project, public: false)
FactoryBot.create(:news, project: private_project)
latest_news = News.latest(user: User.anonymous)
expect(latest_news).to match_array [news]
end
it 'limits the number of returned news elements' do
News.delete_all
10.times do
FactoryBot.create(:news, project: project)
end
expect(News.latest(user: User.current, count: 2).size).to eq(2)
expect(News.latest(user: User.current, count: 6).size).to eq(6)
expect(News.latest(user: User.current, count: 15).size).to eq(10)
end
it 'returns five news elements by default' do
News.delete_all
2.times do
FactoryBot.create(:news, project: project)
end
expect(News.latest.size).to eq(2)
3.times do
FactoryBot.create(:news, project: project)
end
expect(News.latest.size).to eq(5)
2.times do
FactoryBot.create(:news, project: project)
end
expect(News.latest.size).to eq(5)
end
end
describe '#save',
with_settings: { notified_events: %w(news_added) } do
it 'sends email notifications when created' do
FactoryBot.create(:user,
member_in_project: project,
member_through_role: role)
project.members.reload
FactoryBot.create(:news, project: project)
expect(ActionMailer::Base.deliveries.size).to eq(1)
end
end
describe '#to_param' do
it 'includes includes id and title for a nicer url' do
title = 'OpenProject now has a Twitter Account'
news = FactoryBot.create(:news, title: title)
slug = "#{news.id}-openproject-now-has-a-twitter-account"
expect(news.to_param).to eq slug
end
it 'returns nil for unsaved news' do
news = News.new
expect(news.to_param).to be_nil
end
end
end
| 31.875 | 91 | 0.690888 |
ed02f83bb3ee158256c91b4cfc7c9b5ba8fea556 | 3,742 | module MediaInstagramItem
extend ActiveSupport::Concern
INSTAGRAM_URL = /^https?:\/\/(www\.)?instagram\.com\/(p|tv)\/([^\/]+)/
included do
Media.declare('instagram_item', [INSTAGRAM_URL])
end
def data_from_instagram_item
id = self.url.match(INSTAGRAM_URL)[3]
handle_exceptions(self, StandardError) do
self.get_instagram_data(id.to_s)
self.data.merge!(external_id: id)
data = self.data
raise data.dig('raw', 'api', 'error', 'message') if data.dig('raw', 'api', 'error') && data.dig('raw', 'graphql', 'error')
self.data.merge!({
external_id: id,
username: '@' + get_instagram_username_from_data,
description: get_instagram_text_from_data,
title: get_instagram_text_from_data,
picture: get_instagram_picture_from_data,
author_url: get_info_from_data('api', data, 'author_url'),
html: get_info_from_data('api', data, 'html'),
author_picture: data.dig('raw', 'graphql', 'shortcode_media', 'owner', 'profile_pic_url'),
author_name: data.dig('raw', 'graphql', 'shortcode_media', 'owner', 'full_name'),
published_at: self.get_instagram_datetime
})
end
end
def get_instagram_username_from_data
username = get_info_from_data('api', self.data, 'author_name')
username.blank? ? (data.dig('raw', 'graphql', 'shortcode_media', 'owner', 'username') || '' ) : username
end
def get_instagram_text_from_data
text = get_info_from_data('api', self.data, 'title')
return text unless text.blank?
text = self.data.dig('raw', 'graphql', 'shortcode_media', 'edge_media_to_caption', 'edges')
(!text.blank? && text.is_a?(Array)) ? text.first.dig('node', 'text') : ''
end
def get_instagram_picture_from_data
picture = get_info_from_data('api', self.data, 'thumbnail_url')
picture.blank? ? self.data.dig('raw', 'graphql', 'shortcode_media', 'display_url') : picture
end
def get_instagram_data(id)
pool = []
sources = { api: "https://api.instagram.com/oembed/?url=http://instagr.am/p/#{id}", graphql: "https://www.instagram.com/p/#{id}/?__a=1" }
sources.each do |source|
pool << Thread.new {
begin
data = self.get_instagram_json_data(source[1])
self.data['raw'][source[0]] = (source[0] == :api) ? data : data['graphql']
rescue StandardError => error
PenderAirbrake.notify(error.message, instagram_source: source)
Rails.logger.warn level: 'WARN', message: '[Parser] Cannot get data from Instagram URL', source_type: source[0], source_url: source[1], error_class: error.class, error_message: error.message
self.data['raw'][source[0]] = { error: { message: error.message, code: LapisConstants::ErrorCodes::const_get('UNKNOWN') }}
end
}
end
pool.each(&:join)
self.data['raw']['oembed'] = self.data['raw']['api']
end
def get_instagram_datetime
datetime = get_info_from_data('api', self.data, 'html').match(/.*datetime=\\?"([^"]+)\\?".*/)
datetime ? Time.parse(datetime[1]) : ''
end
def get_instagram_json_data(url)
uri = URI.parse(url)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = uri.scheme == 'https'
headers = { 'User-Agent' => Media.html_options(uri)['User-Agent'] }
request = Net::HTTP::Get.new(uri.request_uri, headers)
response = http.request(request)
raise StandardError.new("#{response.class}: #{response.message}") unless %(200 301 302).include?(response.code)
return JSON.parse(response.body) if response.code == '200'
location = response.header['location']
raise StandardError.new('Login required') if Media.is_a_login_page(location)
self.get_instagram_json_data(location)
end
end
| 42.522727 | 200 | 0.660609 |
08d761e23f52044a266de3431bf153a1fd0e3d6e | 3,041 | class Contribuicao < ApplicationRecord
belongs_to :usuario
belongs_to :plano
enum status: { pendente: 0, ativo: 1, cancelado: 2, suspenso: 3 }
enum tipo: { mensal: 0, avulsa: 1 }
attr_accessor :hash_cliente
attr_accessor :token_cartao
attr_accessor :nome_cartao
def check_status
# Atualiza o status da contribuição pelo status do pagseguro
credentials = PagSeguro::AccountCredentials.new(PagSeguro.email, PagSeguro.token)
subscription = PagSeguro::Subscription.find_by_code(self.codigo, credentials: credentials)
unless subscription
puts 'PAGSEGURO -> ERRO BUSCAR CONTRIBUIÇÃO'
puts subscription.to_yaml
return false
end
if subscription.errors.any?
puts 'PAGSEGURO -> ERRO BUSCAR CONTRIBUIÇÃO'
puts subscription.errors.join('\n')
return false
end
self.atualizar_status(subscription.status)
self.save
end
def atualizar_status(novo_status)
self.status = case novo_status.upcase
when 'PENDING'
:pendente
when 'ACTIVE'
:ativo
when 'CANCELLED'
:cancelado
when 'CANCELLED_BY_SENDER'
:cancelado
when 'CANCELLED_BY_RECEIVER'
:cancelado
when 'SUSPENDED'
:suspenso
end
end
def pagamentos
c = self
options = { credentials: PagSeguro::AccountCredentials.new(PagSeguro.email, PagSeguro.token)}
report = PagSeguro::SubscriptionSearchPaymentOrders.new(c.codigo, '', options)
unless report.valid?
puts "PAGSEGURO: Erro recuperar contribuicao"
puts report.errors.join("\n")
puts options
return nil
end
pagamentos = Array.new
while report.next_page?
report.next_page!
report.payment_orders.each do |p|
pagamentos << p
end
end
pagamentos.sort_by! &:last_event_date
return pagamentos
end
def payment_cancel
c = self
cancel = PagSeguro::SubscriptionCanceller.new(subscription_code: c.codigo)
cancel.credentials = PagSeguro::AccountCredentials.new(PagSeguro.email, PagSeguro.token)
begin
cancel.save
rescue
puts 'PAGSEGURO -> ERRO CANCELAR CONTRIBUIÇÃO - CANCEL.SAVE'
return false
end
if cancel.errors.any?
puts 'PAGSEGURO -> ERRO CANCELAR CONTRIBUIÇÃO'
puts cancel.errors.join('\n')
return false
else
return true
end
end
def payment_retry
c = self
p = pagamentos
return false unless p
p_retry = PagSeguro::SubscriptionRetry.new(
subscription_code: c.codigo,
payment_order_code: p.last.code
)
p_retry.credentials = PagSeguro::AccountCredentials.new(PagSeguro.email, PagSeguro.token)
p_retry.save
if p_retry.errors.any?
puts 'PAGSEGURO -> ERRO RETENTATIVA DE PAGAMENTO'
puts p_retry.errors.join('\n')
return false
else
return true
end
end
end
| 25.554622 | 97 | 0.642552 |
e81d98236843975bc3b939ec56aa28f7a92c284f | 237 | # String
class String
def snakecase
# gsub(/::/, '/').
gsub(/([A-Z]+)([A-Z][a-z])/, '\1_\2')
.gsub(/([a-z\d])([A-Z])/, '\1_\2')
.tr('-', '_')
.gsub(/\s/, '_')
.gsub(/__+/, '_')
.downcase
end
end
| 18.230769 | 41 | 0.371308 |
e9eb6eedd2c75f82777b5e8bb301c668d8d2774d | 3,431 | #--
# PDF::Writer for Ruby.
# http://rubyforge.org/projects/ruby-pdf/
# Copyright 2003 - 2005 Austin Ziegler.
#
# Licensed under a MIT-style licence. See LICENCE in the main distribution
# for full licensing information.
#
# $Id: pages.rb,v 1.2 2005/05/16 03:59:21 austin Exp $
#++
# object which is a parent to the pages in the document
class PDF::Writer::Object::Pages < PDF::Writer::Object
def initialize(parent)
super(parent)
@parent.catalog.pages = self
@pages = []
@procset = nil
@media_box = nil
@fonts = []
@xObjects = []
@bleed_box = nil
@trim_box = nil
end
def size
@pages.size
end
def first_page
@pages[0]
end
# Add the page ID to the end of the page list.
def <<(p)
if p.kind_of?(PDF::Writer::Object::Page)
@pages << p
elsif p.kind_of?(PDF::Writer::Object::Font)
@fonts << p
elsif p.kind_of?(PDF::Writer::External)
@xObjects << p
else
raise ArgumentError, PDF::Message[:req_FPXO]
end
end
# Add a page to the page list. If p is just a Page, then it will be
# added to the page list. Otherwise, it will be treated as a Hash with
# keys :page, :pos, and :rpage. :page is the Page to be added to the
# list; :pos is :before or :after; :rpage is the Page to which the
# new Page will be added relative to.
def add(p)
if p.kind_of?(PDF::Writer::Object::Page)
@pages << p
elsif p.kind_of?(PDF::Writer::FontMetrics)
@fonts << p
elsif p.kind_of?(PDF::Writer::External)
@xObjects << p
elsif p.kind_of?(Hash)
# Find a match.
i = @pages.index(p[:rpage])
unless i.nil?
# There is a match; insert the page.
case p[:pos]
when :before
@pages[i, 0] = p[:page]
when :after
@pages[i + 1, 0] = p[:page]
else
raise ArgumentError, PDF::Message[:invalid_pos]
end
end
else
raise ArgumentError, PDF::Message[:req_FPXOH]
end
end
attr_accessor :procset
# Each of the following should be an array of 4 numbers, the x and y
# coordinates of the lower left and upper right bounds of the box.
attr_accessor :media_box
attr_accessor :bleed_box
attr_accessor :trim_box
def to_s
unless @pages.empty?
res = "\n#{@oid} 0 obj\n<< /Type /Pages\n/Kids ["
@pages.uniq! # uniqify the data...
@pages.each { |p| res << "#{p.oid} 0 R\n" }
res << "]\n/Count #{@pages.size}"
unless @fonts.empty? and @procset.nil?
res << "\n/Resources <<"
res << "\n/ProcSet #{@procset.oid} 0 R" unless @procset.nil?
unless @fonts.empty?
res << "\n/Font << "
@fonts.each { |f| res << "\n/F#{f.font_id} #{f.oid} 0 R" }
res << " >>"
end
unless @xObjects.empty?
res << "\n/XObject << "
@xObjects.each { |x| res << "\n/#{x.label} #{x.oid} 0 R" }
res << " >>"
end
res << "\n>>"
res << "\n/MediaBox [#{@media_box.join(' ')}]" unless @media_box.nil? or @media_box.empty?
res << "\n/BleedBox [#{@bleed_box.join(' ')}]" unless @bleed_box.nil? or @bleed_box.empty?
res << "\n/TrimBox [#{@trim_box.join(' ')}]" unless @trim_box.nil? or @trim_box.empty?
end
res << "\n >>\nendobj"
else
"\n#{@oid} 0 obj\n<< /Type /Pages\n/Count 0\n>>\nendobj"
end
end
end
| 29.577586 | 98 | 0.566599 |
4a4c4993af57341d8118ab7fdae13ae33d719ee9 | 2,563 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Securitycenter
module V1beta1
# User specified security marks that are attached to the parent Security
# Command Center resource. Security marks are scoped within a Security Command
# Center organization -- they can be modified and viewed by all users who have
# proper permissions on the organization.
# @!attribute [rw] name
# @return [::String]
# The relative resource name of the SecurityMarks. See:
# https://cloud.google.com/apis/design/resource_names#relative_resource_name
# Examples:
# "organizations/\\{organization_id}/assets/\\{asset_id}/securityMarks"
# "organizations/\\{organization_id}/sources/\\{source_id}/findings/\\{finding_id}/securityMarks".
# @!attribute [rw] marks
# @return [::Google::Protobuf::Map{::String => ::String}]
# Mutable user specified security marks belonging to the parent resource.
# Constraints are as follows:
#
# * Keys and values are treated as case insensitive
# * Keys must be between 1 - 256 characters (inclusive)
# * Keys must be letters, numbers, underscores, or dashes
# * Values have leading and trailing whitespace trimmed, remaining
# characters must be between 1 - 4096 characters (inclusive)
class SecurityMarks
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class MarksEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
end
end
end
end
| 41.33871 | 110 | 0.65158 |
33932d551f2cb3f3674f5d9765b86003f257ad3b | 138 | json.extract! @job, :id, :title, :description, :date, :start_time, :end_time, :place, :address, :size, :user_id, :created_at, :updated_at
| 69 | 137 | 0.702899 |
798770c46bb9a4c9291bb6edc7b09d859939ddfc | 2,243 | #!/usr/bin/env ruby
#
# RabbitMQ check alive plugin
# ===
#
# This plugin checks if RabbitMQ server is alive using the REST API
#
# Copyright 2012 Abhijith G <[email protected]> and Runa Inc.
#
# Released under the same terms as Sensu (the MIT license); see LICENSE
# for details.
require 'rubygems' if RUBY_VERSION < '1.9.0'
require 'sensu-plugin/check/cli'
require 'json'
require 'rest_client'
class CheckRabbitMQ < Sensu::Plugin::Check::CLI
option :host,
description: 'RabbitMQ host',
short: '-w',
long: '--host HOST',
default: 'localhost'
option :vhost,
description: 'RabbitMQ vhost',
short: '-v',
long: '--vhost VHOST',
default: '%2F'
option :username,
description: 'RabbitMQ username',
short: '-u',
long: '--username USERNAME',
default: 'guest'
option :password,
description: 'RabbitMQ password',
short: '-p',
long: '--password PASSWORD',
default: 'guest'
option :port,
description: 'RabbitMQ API port',
short: '-P',
long: '--port PORT',
default: '15672'
option :ssl,
description: 'Enable SSL for connection to RabbitMQ',
long: '--ssl',
boolean: true,
default: false
def run
res = vhost_alive?
if res['status'] == 'ok'
ok res['message']
elsif res['status'] == 'critical'
critical res['message']
else
unknown res['message']
end
end
def vhost_alive?
host = config[:host]
port = config[:port]
username = config[:username]
password = config[:password]
vhost = config[:vhost]
ssl = config[:ssl]
begin
resource = RestClient::Resource.new "http#{ssl ? 's' : ''}://#{host}:#{port}/api/aliveness-test/#{vhost}", username, password
# Attempt to parse response (just to trigger parse exception)
_response = JSON.parse(resource.get) == { 'status' => 'ok' }
{ 'status' => 'ok', 'message' => 'RabbitMQ server is alive' }
rescue Errno::ECONNREFUSED => e
{ 'status' => 'critical', 'message' => e.message }
rescue => e
{ 'status' => 'unknown', 'message' => e.message }
end
end
end
| 25.781609 | 131 | 0.576014 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.