hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e933b7ea9468073590d5f2b4232018857a2f7e41 | 48,228 | # frozen_string_literal: true
require "active_record/relation/from_clause"
require "active_record/relation/query_attribute"
require "active_record/relation/where_clause"
require "active_record/relation/where_clause_factory"
require "active_model/forbidden_attributes_protection"
module ActiveRecord
module QueryMethods
extend ActiveSupport::Concern
include ActiveModel::ForbiddenAttributesProtection
# WhereChain objects act as placeholder for queries in which #where does not have any parameter.
# In this case, #where must be chained with #not to return a new relation.
class WhereChain
include ActiveModel::ForbiddenAttributesProtection
def initialize(scope)
@scope = scope
end
# Returns a new relation expressing WHERE + NOT condition according to
# the conditions in the arguments.
#
# #not accepts conditions as a string, array, or hash. See QueryMethods#where for
# more details on each format.
#
# User.where.not("name = 'Jon'")
# # SELECT * FROM users WHERE NOT (name = 'Jon')
#
# User.where.not(["name = ?", "Jon"])
# # SELECT * FROM users WHERE NOT (name = 'Jon')
#
# User.where.not(name: "Jon")
# # SELECT * FROM users WHERE name != 'Jon'
#
# User.where.not(name: nil)
# # SELECT * FROM users WHERE name IS NOT NULL
#
# User.where.not(name: %w(Ko1 Nobu))
# # SELECT * FROM users WHERE name NOT IN ('Ko1', 'Nobu')
def not(opts, *rest)
opts = sanitize_forbidden_attributes(opts)
where_clause = @scope.send(:where_clause_factory).build(opts, rest)
@scope.references!(PredicateBuilder.references(opts)) if Hash === opts
if not_behaves_as_nor?(opts)
ActiveSupport::Deprecation.warn(<<~MSG.squish)
NOT conditions will no longer behave as NOR in Rails 6.1.
To continue using NOR conditions, NOT each conditions manually
(`#{
opts.flat_map { |key, value|
if value.is_a?(Hash) && value.size > 1
value.map { |k, v| ".where.not(#{key.inspect} => { #{k.inspect} => ... })" }
else
".where.not(#{key.inspect} => ...)"
end
}.join
}`).
MSG
@scope.where_clause += where_clause.invert(:nor)
else
@scope.where_clause += where_clause.invert
end
@scope
end
# Returns a new relation with left outer joins and where clause to idenitfy
# missing relations.
#
# For example, posts that are missing a related author:
#
# Post.where.missing(:author)
# # SELECT "posts".* FROM "posts"
# # LEFT OUTER JOIN "authors" ON "authors"."id" = "posts"."author_id"
# # WHERE "authors"."id" IS NULL
#
# Additionally, multiple relations can be combined. This will return posts
# that are missing both an author and any comments:
#
# Post.where.missing(:author, :comments)
# # SELECT "posts".* FROM "posts"
# # LEFT OUTER JOIN "authors" ON "authors"."id" = "posts"."author_id"
# # LEFT OUTER JOIN "comments" ON "comments"."post_id" = "posts"."id"
# # WHERE "authors"."id" IS NULL AND "comments"."id" IS NULL
def missing(*args)
args.each do |arg|
reflection = @scope.klass._reflect_on_association(arg)
opts = { reflection.table_name => { reflection.association_primary_key => nil } }
@scope.left_outer_joins!(arg)
@scope.where!(opts)
end
@scope
end
private
def not_behaves_as_nor?(opts)
return false unless opts.is_a?(Hash)
opts.any? { |k, v| v.is_a?(Hash) && v.size > 1 } ||
opts.size > 1
end
end
FROZEN_EMPTY_ARRAY = [].freeze
FROZEN_EMPTY_HASH = {}.freeze
Relation::VALUE_METHODS.each do |name|
method_name, default =
case name
when *Relation::MULTI_VALUE_METHODS
["#{name}_values", "FROZEN_EMPTY_ARRAY"]
when *Relation::SINGLE_VALUE_METHODS
["#{name}_value", name == :create_with ? "FROZEN_EMPTY_HASH" : "nil"]
when *Relation::CLAUSE_METHODS
["#{name}_clause", name == :from ? "Relation::FromClause.empty" : "Relation::WhereClause.empty"]
end
class_eval <<-CODE, __FILE__, __LINE__ + 1
def #{method_name} # def includes_values
@values.fetch(:#{name}, #{default}) # @values.fetch(:includes, FROZEN_EMPTY_ARRAY)
end # end
def #{method_name}=(value) # def includes_values=(value)
assert_mutability! # assert_mutability!
@values[:#{name}] = value # @values[:includes] = value
end # end
CODE
end
alias extensions extending_values
# Specify relationships to be included in the result set. For
# example:
#
# users = User.includes(:address)
# users.each do |user|
# user.address.city
# end
#
# allows you to access the +address+ attribute of the +User+ model without
# firing an additional query. This will often result in a
# performance improvement over a simple join.
#
# You can also specify multiple relationships, like this:
#
# users = User.includes(:address, :friends)
#
# Loading nested relationships is possible using a Hash:
#
# users = User.includes(:address, friends: [:address, :followers])
#
# === conditions
#
# If you want to add string conditions to your included models, you'll have
# to explicitly reference them. For example:
#
# User.includes(:posts).where('posts.name = ?', 'example')
#
# Will throw an error, but this will work:
#
# User.includes(:posts).where('posts.name = ?', 'example').references(:posts)
#
# Note that #includes works with association names while #references needs
# the actual table name.
#
# If you pass the conditions via hash, you don't need to call #references
# explicitly, as #where references the tables for you. For example, this
# will work correctly:
#
# User.includes(:posts).where(posts: { name: 'example' })
def includes(*args)
check_if_method_has_arguments!(:includes, args)
spawn.includes!(*args)
end
def includes!(*args) # :nodoc:
args.compact_blank!
args.flatten!
self.includes_values |= args
self
end
# Forces eager loading by performing a LEFT OUTER JOIN on +args+:
#
# User.eager_load(:posts)
# # SELECT "users"."id" AS t0_r0, "users"."name" AS t0_r1, ...
# # FROM "users" LEFT OUTER JOIN "posts" ON "posts"."user_id" =
# # "users"."id"
def eager_load(*args)
check_if_method_has_arguments!(:eager_load, args)
spawn.eager_load!(*args)
end
def eager_load!(*args) # :nodoc:
self.eager_load_values |= args
self
end
# Allows preloading of +args+, in the same way that #includes does:
#
# User.preload(:posts)
# # SELECT "posts".* FROM "posts" WHERE "posts"."user_id" IN (1, 2, 3)
def preload(*args)
check_if_method_has_arguments!(:preload, args)
spawn.preload!(*args)
end
def preload!(*args) # :nodoc:
self.preload_values |= args
self
end
# Extracts a named +association+ from the relation. The named association is first preloaded,
# then the individual association records are collected from the relation. Like so:
#
# account.memberships.extract_associated(:user)
# # => Returns collection of User records
#
# This is short-hand for:
#
# account.memberships.preload(:user).collect(&:user)
def extract_associated(association)
preload(association).collect(&association)
end
# Use to indicate that the given +table_names+ are referenced by an SQL string,
# and should therefore be JOINed in any query rather than loaded separately.
# This method only works in conjunction with #includes.
# See #includes for more details.
#
# User.includes(:posts).where("posts.name = 'foo'")
# # Doesn't JOIN the posts table, resulting in an error.
#
# User.includes(:posts).where("posts.name = 'foo'").references(:posts)
# # Query now knows the string references posts, so adds a JOIN
def references(*table_names)
check_if_method_has_arguments!(:references, table_names)
spawn.references!(*table_names)
end
def references!(*table_names) # :nodoc:
table_names.flatten!
table_names.map!(&:to_s)
self.references_values |= table_names
self
end
# Works in two unique ways.
#
# First: takes a block so it can be used just like <tt>Array#select</tt>.
#
# Model.all.select { |m| m.field == value }
#
# This will build an array of objects from the database for the scope,
# converting them into an array and iterating through them using
# <tt>Array#select</tt>.
#
# Second: Modifies the SELECT statement for the query so that only certain
# fields are retrieved:
#
# Model.select(:field)
# # => [#<Model id: nil, field: "value">]
#
# Although in the above example it looks as though this method returns an
# array, it actually returns a relation object and can have other query
# methods appended to it, such as the other methods in ActiveRecord::QueryMethods.
#
# The argument to the method can also be an array of fields.
#
# Model.select(:field, :other_field, :and_one_more)
# # => [#<Model id: nil, field: "value", other_field: "value", and_one_more: "value">]
#
# You can also use one or more strings, which will be used unchanged as SELECT fields.
#
# Model.select('field AS field_one', 'other_field AS field_two')
# # => [#<Model id: nil, field: "value", other_field: "value">]
#
# If an alias was specified, it will be accessible from the resulting objects:
#
# Model.select('field AS field_one').first.field_one
# # => "value"
#
# Accessing attributes of an object that do not have fields retrieved by a select
# except +id+ will throw ActiveModel::MissingAttributeError:
#
# Model.select(:field).first.other_field
# # => ActiveModel::MissingAttributeError: missing attribute: other_field
def select(*fields)
if block_given?
if fields.any?
raise ArgumentError, "`select' with block doesn't take arguments."
end
return super()
end
raise ArgumentError, "Call `select' with at least one field" if fields.empty?
spawn._select!(*fields)
end
def _select!(*fields) # :nodoc:
fields.compact_blank!
fields.flatten!
self.select_values += fields
self
end
# Allows you to change a previously set select statement.
#
# Post.select(:title, :body)
# # SELECT `posts`.`title`, `posts`.`body` FROM `posts`
#
# Post.select(:title, :body).reselect(:created_at)
# # SELECT `posts`.`created_at` FROM `posts`
#
# This is short-hand for <tt>unscope(:select).select(fields)</tt>.
# Note that we're unscoping the entire select statement.
def reselect(*args)
check_if_method_has_arguments!(:reselect, args)
spawn.reselect!(*args)
end
# Same as #reselect but operates on relation in-place instead of copying.
def reselect!(*args) # :nodoc:
self.select_values = args
self
end
# Allows to specify a group attribute:
#
# User.group(:name)
# # SELECT "users".* FROM "users" GROUP BY name
#
# Returns an array with distinct records based on the +group+ attribute:
#
# User.select([:id, :name])
# # => [#<User id: 1, name: "Oscar">, #<User id: 2, name: "Oscar">, #<User id: 3, name: "Foo">]
#
# User.group(:name)
# # => [#<User id: 3, name: "Foo", ...>, #<User id: 2, name: "Oscar", ...>]
#
# User.group('name AS grouped_name, age')
# # => [#<User id: 3, name: "Foo", age: 21, ...>, #<User id: 2, name: "Oscar", age: 21, ...>, #<User id: 5, name: "Foo", age: 23, ...>]
#
# Passing in an array of attributes to group by is also supported.
#
# User.select([:id, :first_name]).group(:id, :first_name).first(3)
# # => [#<User id: 1, first_name: "Bill">, #<User id: 2, first_name: "Earl">, #<User id: 3, first_name: "Beto">]
def group(*args)
check_if_method_has_arguments!(:group, args)
spawn.group!(*args)
end
def group!(*args) # :nodoc:
args.flatten!
self.group_values |= args
self
end
# Allows to specify an order attribute:
#
# User.order(:name)
# # SELECT "users".* FROM "users" ORDER BY "users"."name" ASC
#
# User.order(email: :desc)
# # SELECT "users".* FROM "users" ORDER BY "users"."email" DESC
#
# User.order(:name, email: :desc)
# # SELECT "users".* FROM "users" ORDER BY "users"."name" ASC, "users"."email" DESC
#
# User.order('name')
# # SELECT "users".* FROM "users" ORDER BY name
#
# User.order('name DESC')
# # SELECT "users".* FROM "users" ORDER BY name DESC
#
# User.order('name DESC, email')
# # SELECT "users".* FROM "users" ORDER BY name DESC, email
def order(*args)
check_if_method_has_arguments!(:order, args)
spawn.order!(*args)
end
# Same as #order but operates on relation in-place instead of copying.
def order!(*args) # :nodoc:
preprocess_order_args(args)
self.order_values += args
self
end
# Replaces any existing order defined on the relation with the specified order.
#
# User.order('email DESC').reorder('id ASC') # generated SQL has 'ORDER BY id ASC'
#
# Subsequent calls to order on the same relation will be appended. For example:
#
# User.order('email DESC').reorder('id ASC').order('name ASC')
#
# generates a query with 'ORDER BY id ASC, name ASC'.
def reorder(*args)
check_if_method_has_arguments!(:reorder, args)
spawn.reorder!(*args)
end
# Same as #reorder but operates on relation in-place instead of copying.
def reorder!(*args) # :nodoc:
preprocess_order_args(args) unless args.all?(&:blank?)
self.reordering_value = true
self.order_values = args
self
end
VALID_UNSCOPING_VALUES = Set.new([:where, :select, :group, :order, :lock,
:limit, :offset, :joins, :left_outer_joins, :annotate,
:includes, :from, :readonly, :having, :optimizer_hints])
# Removes an unwanted relation that is already defined on a chain of relations.
# This is useful when passing around chains of relations and would like to
# modify the relations without reconstructing the entire chain.
#
# User.order('email DESC').unscope(:order) == User.all
#
# The method arguments are symbols which correspond to the names of the methods
# which should be unscoped. The valid arguments are given in VALID_UNSCOPING_VALUES.
# The method can also be called with multiple arguments. For example:
#
# User.order('email DESC').select('id').where(name: "John")
# .unscope(:order, :select, :where) == User.all
#
# One can additionally pass a hash as an argument to unscope specific +:where+ values.
# This is done by passing a hash with a single key-value pair. The key should be
# +:where+ and the value should be the where value to unscope. For example:
#
# User.where(name: "John", active: true).unscope(where: :name)
# == User.where(active: true)
#
# This method is similar to #except, but unlike
# #except, it persists across merges:
#
# User.order('email').merge(User.except(:order))
# == User.order('email')
#
# User.order('email').merge(User.unscope(:order))
# == User.all
#
# This means it can be used in association definitions:
#
# has_many :comments, -> { unscope(where: :trashed) }
#
def unscope(*args)
check_if_method_has_arguments!(:unscope, args)
spawn.unscope!(*args)
end
def unscope!(*args) # :nodoc:
args.flatten!
self.unscope_values += args
args.each do |scope|
case scope
when Symbol
scope = :left_outer_joins if scope == :left_joins
if !VALID_UNSCOPING_VALUES.include?(scope)
raise ArgumentError, "Called unscope() with invalid unscoping argument ':#{scope}'. Valid arguments are :#{VALID_UNSCOPING_VALUES.to_a.join(", :")}."
end
assert_mutability!
@values.delete(scope)
when Hash
scope.each do |key, target_value|
if key != :where
raise ArgumentError, "Hash arguments in .unscope(*args) must have :where as the key."
end
target_values = Array(target_value).map(&:to_s)
self.where_clause = where_clause.except(*target_values)
end
else
raise ArgumentError, "Unrecognized scoping: #{args.inspect}. Use .unscope(where: :attribute_name) or .unscope(:order), for example."
end
end
self
end
# Performs a joins on +args+. The given symbol(s) should match the name of
# the association(s).
#
# User.joins(:posts)
# # SELECT "users".*
# # FROM "users"
# # INNER JOIN "posts" ON "posts"."user_id" = "users"."id"
#
# Multiple joins:
#
# User.joins(:posts, :account)
# # SELECT "users".*
# # FROM "users"
# # INNER JOIN "posts" ON "posts"."user_id" = "users"."id"
# # INNER JOIN "accounts" ON "accounts"."id" = "users"."account_id"
#
# Nested joins:
#
# User.joins(posts: [:comments])
# # SELECT "users".*
# # FROM "users"
# # INNER JOIN "posts" ON "posts"."user_id" = "users"."id"
# # INNER JOIN "comments" "comments_posts"
# # ON "comments_posts"."post_id" = "posts"."id"
#
# You can use strings in order to customize your joins:
#
# User.joins("LEFT JOIN bookmarks ON bookmarks.bookmarkable_type = 'Post' AND bookmarks.user_id = users.id")
# # SELECT "users".* FROM "users" LEFT JOIN bookmarks ON bookmarks.bookmarkable_type = 'Post' AND bookmarks.user_id = users.id
def joins(*args)
check_if_method_has_arguments!(:joins, args)
spawn.joins!(*args)
end
def joins!(*args) # :nodoc:
args.compact!
args.flatten!
self.joins_values |= args
self
end
# Performs a left outer joins on +args+:
#
# User.left_outer_joins(:posts)
# => SELECT "users".* FROM "users" LEFT OUTER JOIN "posts" ON "posts"."user_id" = "users"."id"
#
def left_outer_joins(*args)
check_if_method_has_arguments!(__callee__, args)
spawn.left_outer_joins!(*args)
end
alias :left_joins :left_outer_joins
def left_outer_joins!(*args) # :nodoc:
args.compact!
args.flatten!
self.left_outer_joins_values |= args
self
end
# Returns a new relation, which is the result of filtering the current relation
# according to the conditions in the arguments.
#
# #where accepts conditions in one of several formats. In the examples below, the resulting
# SQL is given as an illustration; the actual query generated may be different depending
# on the database adapter.
#
# === string
#
# A single string, without additional arguments, is passed to the query
# constructor as an SQL fragment, and used in the where clause of the query.
#
# Client.where("orders_count = '2'")
# # SELECT * from clients where orders_count = '2';
#
# Note that building your own string from user input may expose your application
# to injection attacks if not done properly. As an alternative, it is recommended
# to use one of the following methods.
#
# === array
#
# If an array is passed, then the first element of the array is treated as a template, and
# the remaining elements are inserted into the template to generate the condition.
# Active Record takes care of building the query to avoid injection attacks, and will
# convert from the ruby type to the database type where needed. Elements are inserted
# into the string in the order in which they appear.
#
# User.where(["name = ? and email = ?", "Joe", "[email protected]"])
# # SELECT * FROM users WHERE name = 'Joe' AND email = '[email protected]';
#
# Alternatively, you can use named placeholders in the template, and pass a hash as the
# second element of the array. The names in the template are replaced with the corresponding
# values from the hash.
#
# User.where(["name = :name and email = :email", { name: "Joe", email: "[email protected]" }])
# # SELECT * FROM users WHERE name = 'Joe' AND email = '[email protected]';
#
# This can make for more readable code in complex queries.
#
# Lastly, you can use sprintf-style % escapes in the template. This works slightly differently
# than the previous methods; you are responsible for ensuring that the values in the template
# are properly quoted. The values are passed to the connector for quoting, but the caller
# is responsible for ensuring they are enclosed in quotes in the resulting SQL. After quoting,
# the values are inserted using the same escapes as the Ruby core method +Kernel::sprintf+.
#
# User.where(["name = '%s' and email = '%s'", "Joe", "[email protected]"])
# # SELECT * FROM users WHERE name = 'Joe' AND email = '[email protected]';
#
# If #where is called with multiple arguments, these are treated as if they were passed as
# the elements of a single array.
#
# User.where("name = :name and email = :email", { name: "Joe", email: "[email protected]" })
# # SELECT * FROM users WHERE name = 'Joe' AND email = '[email protected]';
#
# When using strings to specify conditions, you can use any operator available from
# the database. While this provides the most flexibility, you can also unintentionally introduce
# dependencies on the underlying database. If your code is intended for general consumption,
# test with multiple database backends.
#
# === hash
#
# #where will also accept a hash condition, in which the keys are fields and the values
# are values to be searched for.
#
# Fields can be symbols or strings. Values can be single values, arrays, or ranges.
#
# User.where({ name: "Joe", email: "[email protected]" })
# # SELECT * FROM users WHERE name = 'Joe' AND email = '[email protected]'
#
# User.where({ name: ["Alice", "Bob"]})
# # SELECT * FROM users WHERE name IN ('Alice', 'Bob')
#
# User.where({ created_at: (Time.now.midnight - 1.day)..Time.now.midnight })
# # SELECT * FROM users WHERE (created_at BETWEEN '2012-06-09 07:00:00.000000' AND '2012-06-10 07:00:00.000000')
#
# In the case of a belongs_to relationship, an association key can be used
# to specify the model if an ActiveRecord object is used as the value.
#
# author = Author.find(1)
#
# # The following queries will be equivalent:
# Post.where(author: author)
# Post.where(author_id: author)
#
# This also works with polymorphic belongs_to relationships:
#
# treasure = Treasure.create(name: 'gold coins')
# treasure.price_estimates << PriceEstimate.create(price: 125)
#
# # The following queries will be equivalent:
# PriceEstimate.where(estimate_of: treasure)
# PriceEstimate.where(estimate_of_type: 'Treasure', estimate_of_id: treasure)
#
# === Joins
#
# If the relation is the result of a join, you may create a condition which uses any of the
# tables in the join. For string and array conditions, use the table name in the condition.
#
# User.joins(:posts).where("posts.created_at < ?", Time.now)
#
# For hash conditions, you can either use the table name in the key, or use a sub-hash.
#
# User.joins(:posts).where({ "posts.published" => true })
# User.joins(:posts).where({ posts: { published: true } })
#
# === no argument
#
# If no argument is passed, #where returns a new instance of WhereChain, that
# can be chained with #not to return a new relation that negates the where clause.
#
# User.where.not(name: "Jon")
# # SELECT * FROM users WHERE name != 'Jon'
#
# See WhereChain for more details on #not.
#
# === blank condition
#
# If the condition is any blank-ish object, then #where is a no-op and returns
# the current relation.
def where(opts = :chain, *rest)
if :chain == opts
WhereChain.new(spawn)
elsif opts.blank?
self
else
spawn.where!(opts, *rest)
end
end
def where!(opts, *rest) # :nodoc:
opts = sanitize_forbidden_attributes(opts)
references!(PredicateBuilder.references(opts)) if Hash === opts
self.where_clause += where_clause_factory.build(opts, rest)
self
end
# Allows you to change a previously set where condition for a given attribute, instead of appending to that condition.
#
# Post.where(trashed: true).where(trashed: false)
# # WHERE `trashed` = 1 AND `trashed` = 0
#
# Post.where(trashed: true).rewhere(trashed: false)
# # WHERE `trashed` = 0
#
# Post.where(active: true).where(trashed: true).rewhere(trashed: false)
# # WHERE `active` = 1 AND `trashed` = 0
#
# This is short-hand for <tt>unscope(where: conditions.keys).where(conditions)</tt>.
# Note that unlike reorder, we're only unscoping the named conditions -- not the entire where statement.
def rewhere(conditions)
unscope(where: conditions.keys).where(conditions)
end
# Returns a new relation, which is the logical union of this relation and the one passed as an
# argument.
#
# The two relations must be structurally compatible: they must be scoping the same model, and
# they must differ only by #where (if no #group has been defined) or #having (if a #group is
# present). Neither relation may have a #limit, #offset, or #distinct set.
#
# Post.where("id = 1").or(Post.where("author_id = 3"))
# # SELECT `posts`.* FROM `posts` WHERE ((id = 1) OR (author_id = 3))
#
def or(other)
unless other.is_a? Relation
raise ArgumentError, "You have passed #{other.class.name} object to #or. Pass an ActiveRecord::Relation object instead."
end
spawn.or!(other)
end
def or!(other) # :nodoc:
incompatible_values = structurally_incompatible_values_for_or(other)
unless incompatible_values.empty?
raise ArgumentError, "Relation passed to #or must be structurally compatible. Incompatible values: #{incompatible_values}"
end
self.where_clause = self.where_clause.or(other.where_clause)
self.having_clause = having_clause.or(other.having_clause)
self.references_values += other.references_values
self
end
# Allows to specify a HAVING clause. Note that you can't use HAVING
# without also specifying a GROUP clause.
#
# Order.having('SUM(price) > 30').group('user_id')
def having(opts, *rest)
opts.blank? ? self : spawn.having!(opts, *rest)
end
def having!(opts, *rest) # :nodoc:
opts = sanitize_forbidden_attributes(opts)
references!(PredicateBuilder.references(opts)) if Hash === opts
self.having_clause += having_clause_factory.build(opts, rest)
self
end
# Specifies a limit for the number of records to retrieve.
#
# User.limit(10) # generated SQL has 'LIMIT 10'
#
# User.limit(10).limit(20) # generated SQL has 'LIMIT 20'
def limit(value)
spawn.limit!(value)
end
def limit!(value) # :nodoc:
self.limit_value = value
self
end
# Specifies the number of rows to skip before returning rows.
#
# User.offset(10) # generated SQL has "OFFSET 10"
#
# Should be used with order.
#
# User.offset(10).order("name ASC")
def offset(value)
spawn.offset!(value)
end
def offset!(value) # :nodoc:
self.offset_value = value
self
end
# Specifies locking settings (default to +true+). For more information
# on locking, please see ActiveRecord::Locking.
def lock(locks = true)
spawn.lock!(locks)
end
def lock!(locks = true) # :nodoc:
case locks
when String, TrueClass, NilClass
self.lock_value = locks || true
else
self.lock_value = false
end
self
end
# Returns a chainable relation with zero records.
#
# The returned relation implements the Null Object pattern. It is an
# object with defined null behavior and always returns an empty array of
# records without querying the database.
#
# Any subsequent condition chained to the returned relation will continue
# generating an empty relation and will not fire any query to the database.
#
# Used in cases where a method or scope could return zero records but the
# result needs to be chainable.
#
# For example:
#
# @posts = current_user.visible_posts.where(name: params[:name])
# # the visible_posts method is expected to return a chainable Relation
#
# def visible_posts
# case role
# when 'Country Manager'
# Post.where(country: country)
# when 'Reviewer'
# Post.published
# when 'Bad User'
# Post.none # It can't be chained if [] is returned.
# end
# end
#
def none
spawn.none!
end
def none! # :nodoc:
where!("1=0").extending!(NullRelation)
end
# Sets readonly attributes for the returned relation. If value is
# true (default), attempting to update a record will result in an error.
#
# users = User.readonly
# users.first.save
# => ActiveRecord::ReadOnlyRecord: User is marked as readonly
def readonly(value = true)
spawn.readonly!(value)
end
def readonly!(value = true) # :nodoc:
self.readonly_value = value
self
end
# Sets attributes to be used when creating new records from a
# relation object.
#
# users = User.where(name: 'Oscar')
# users.new.name # => 'Oscar'
#
# users = users.create_with(name: 'DHH')
# users.new.name # => 'DHH'
#
# You can pass +nil+ to #create_with to reset attributes:
#
# users = users.create_with(nil)
# users.new.name # => 'Oscar'
def create_with(value)
spawn.create_with!(value)
end
def create_with!(value) # :nodoc:
if value
value = sanitize_forbidden_attributes(value)
self.create_with_value = create_with_value.merge(value)
else
self.create_with_value = FROZEN_EMPTY_HASH
end
self
end
# Specifies table from which the records will be fetched. For example:
#
# Topic.select('title').from('posts')
# # SELECT title FROM posts
#
# Can accept other relation objects. For example:
#
# Topic.select('title').from(Topic.approved)
# # SELECT title FROM (SELECT * FROM topics WHERE approved = 't') subquery
#
# Topic.select('a.title').from(Topic.approved, :a)
# # SELECT a.title FROM (SELECT * FROM topics WHERE approved = 't') a
#
def from(value, subquery_name = nil)
spawn.from!(value, subquery_name)
end
def from!(value, subquery_name = nil) # :nodoc:
self.from_clause = Relation::FromClause.new(value, subquery_name)
self
end
# Specifies whether the records should be unique or not. For example:
#
# User.select(:name)
# # Might return two records with the same name
#
# User.select(:name).distinct
# # Returns 1 record per distinct name
#
# User.select(:name).distinct.distinct(false)
# # You can also remove the uniqueness
def distinct(value = true)
spawn.distinct!(value)
end
# Like #distinct, but modifies relation in place.
def distinct!(value = true) # :nodoc:
self.distinct_value = value
self
end
# Used to extend a scope with additional methods, either through
# a module or through a block provided.
#
# The object returned is a relation, which can be further extended.
#
# === Using a module
#
# module Pagination
# def page(number)
# # pagination code goes here
# end
# end
#
# scope = Model.all.extending(Pagination)
# scope.page(params[:page])
#
# You can also pass a list of modules:
#
# scope = Model.all.extending(Pagination, SomethingElse)
#
# === Using a block
#
# scope = Model.all.extending do
# def page(number)
# # pagination code goes here
# end
# end
# scope.page(params[:page])
#
# You can also use a block and a module list:
#
# scope = Model.all.extending(Pagination) do
# def per_page(number)
# # pagination code goes here
# end
# end
def extending(*modules, &block)
if modules.any? || block
spawn.extending!(*modules, &block)
else
self
end
end
def extending!(*modules, &block) # :nodoc:
modules << Module.new(&block) if block
modules.flatten!
self.extending_values += modules
extend(*extending_values) if extending_values.any?
self
end
# Specify optimizer hints to be used in the SELECT statement.
#
# Example (for MySQL):
#
# Topic.optimizer_hints("MAX_EXECUTION_TIME(50000)", "NO_INDEX_MERGE(topics)")
# # SELECT /*+ MAX_EXECUTION_TIME(50000) NO_INDEX_MERGE(topics) */ `topics`.* FROM `topics`
#
# Example (for PostgreSQL with pg_hint_plan):
#
# Topic.optimizer_hints("SeqScan(topics)", "Parallel(topics 8)")
# # SELECT /*+ SeqScan(topics) Parallel(topics 8) */ "topics".* FROM "topics"
def optimizer_hints(*args)
check_if_method_has_arguments!(:optimizer_hints, args)
spawn.optimizer_hints!(*args)
end
def optimizer_hints!(*args) # :nodoc:
args.flatten!
self.optimizer_hints_values |= args
self
end
# Reverse the existing order clause on the relation.
#
# User.order('name ASC').reverse_order # generated SQL has 'ORDER BY name DESC'
def reverse_order
spawn.reverse_order!
end
def reverse_order! # :nodoc:
orders = order_values.uniq
orders.compact_blank!
self.order_values = reverse_sql_order(orders)
self
end
def skip_query_cache!(value = true) # :nodoc:
self.skip_query_cache_value = value
self
end
def skip_preloading! # :nodoc:
self.skip_preloading_value = true
self
end
# Adds an SQL comment to queries generated from this relation. For example:
#
# User.annotate("selecting user names").select(:name)
# # SELECT "users"."name" FROM "users" /* selecting user names */
#
# User.annotate("selecting", "user", "names").select(:name)
# # SELECT "users"."name" FROM "users" /* selecting */ /* user */ /* names */
#
# The SQL block comment delimiters, "/*" and "*/", will be added automatically.
def annotate(*args)
check_if_method_has_arguments!(:annotate, args)
spawn.annotate!(*args)
end
# Like #annotate, but modifies relation in place.
def annotate!(*args) # :nodoc:
self.annotate_values += args
self
end
# Returns the Arel object associated with the relation.
def arel(aliases = nil) # :nodoc:
@arel ||= build_arel(aliases)
end
def construct_join_dependency(associations, join_type) # :nodoc:
ActiveRecord::Associations::JoinDependency.new(
klass, table, associations, join_type
)
end
protected
def build_subquery(subquery_alias, select_value) # :nodoc:
subquery = except(:optimizer_hints).arel.as(subquery_alias)
Arel::SelectManager.new(subquery).project(select_value).tap do |arel|
arel.optimizer_hints(*optimizer_hints_values) unless optimizer_hints_values.empty?
end
end
private
def assert_mutability!
raise ImmutableRelation if @loaded
raise ImmutableRelation if defined?(@arel) && @arel
end
def build_arel(aliases)
arel = Arel::SelectManager.new(table)
if !joins_values.empty?
build_joins(arel, joins_values.flatten, aliases)
elsif !left_outer_joins_values.empty?
build_left_outer_joins(arel, left_outer_joins_values.flatten, aliases)
end
arel.where(where_clause.ast) unless where_clause.empty?
arel.having(having_clause.ast) unless having_clause.empty?
if limit_value
limit_attribute = ActiveModel::Attribute.with_cast_value(
"LIMIT",
connection.sanitize_limit(limit_value),
Type.default_value,
)
arel.take(Arel::Nodes::BindParam.new(limit_attribute))
end
if offset_value
offset_attribute = ActiveModel::Attribute.with_cast_value(
"OFFSET",
offset_value.to_i,
Type.default_value,
)
arel.skip(Arel::Nodes::BindParam.new(offset_attribute))
end
arel.group(*arel_columns(group_values.uniq.compact_blank)) unless group_values.empty?
build_order(arel)
build_select(arel)
arel.optimizer_hints(*optimizer_hints_values) unless optimizer_hints_values.empty?
arel.distinct(distinct_value)
arel.from(build_from) unless from_clause.empty?
arel.lock(lock_value) if lock_value
arel.comment(*annotate_values) unless annotate_values.empty?
arel
end
def build_from
opts = from_clause.value
name = from_clause.name
case opts
when Relation
if opts.eager_loading?
opts = opts.send(:apply_join_dependency)
end
name ||= "subquery"
opts.arel.as(name.to_s)
else
opts
end
end
def select_association_list(associations)
result = []
associations.each do |association|
case association
when Hash, Symbol, Array
result << association
else
yield if block_given?
end
end
result
end
def valid_association_list(associations)
select_association_list(associations) do
raise ArgumentError, "only Hash, Symbol and Array are allowed"
end
end
def build_left_outer_joins(manager, outer_joins, aliases)
buckets = Hash.new { |h, k| h[k] = [] }
buckets[:association_join] = valid_association_list(outer_joins)
build_join_query(manager, buckets, Arel::Nodes::OuterJoin, aliases)
end
def build_joins(manager, joins, aliases)
buckets = Hash.new { |h, k| h[k] = [] }
unless left_outer_joins_values.empty?
left_joins = valid_association_list(left_outer_joins_values.flatten)
buckets[:stashed_join] << construct_join_dependency(left_joins, Arel::Nodes::OuterJoin)
end
if joins.last.is_a?(ActiveRecord::Associations::JoinDependency)
buckets[:stashed_join] << joins.pop if joins.last.base_klass == klass
end
joins.map! do |join|
if join.is_a?(String)
table.create_string_join(Arel.sql(join.strip)) unless join.blank?
else
join
end
end.compact_blank!.uniq!
while joins.first.is_a?(Arel::Nodes::Join)
join_node = joins.shift
if join_node.is_a?(Arel::Nodes::StringJoin) && !buckets[:stashed_join].empty?
buckets[:join_node] << join_node
else
buckets[:leading_join] << join_node
end
end
joins.each do |join|
case join
when Hash, Symbol, Array
buckets[:association_join] << join
when ActiveRecord::Associations::JoinDependency
buckets[:stashed_join] << join
when Arel::Nodes::Join
buckets[:join_node] << join
else
raise "unknown class: %s" % join.class.name
end
end
build_join_query(manager, buckets, Arel::Nodes::InnerJoin, aliases)
end
def build_join_query(manager, buckets, join_type, aliases)
association_joins = buckets[:association_join]
stashed_joins = buckets[:stashed_join]
leading_joins = buckets[:leading_join]
join_nodes = buckets[:join_node]
join_sources = manager.join_sources
join_sources.concat(leading_joins) unless leading_joins.empty?
unless association_joins.empty? && stashed_joins.empty?
alias_tracker = alias_tracker(leading_joins + join_nodes, aliases)
join_dependency = construct_join_dependency(association_joins, join_type)
join_sources.concat(join_dependency.join_constraints(stashed_joins, alias_tracker))
end
join_sources.concat(join_nodes) unless join_nodes.empty?
end
def build_select(arel)
if select_values.any?
arel.project(*arel_columns(select_values.uniq))
elsif klass.ignored_columns.any?
arel.project(*klass.column_names.map { |field| arel_attribute(field) })
else
arel.project(table[Arel.star])
end
end
def arel_columns(columns)
columns.flat_map do |field|
case field
when Symbol
arel_column(field.to_s) do |attr_name|
connection.quote_table_name(attr_name)
end
when String
arel_column(field, &:itself)
when Proc
field.call
else
field
end
end
end
def arel_column(field)
field = klass.attribute_aliases[field] || field
from = from_clause.name || from_clause.value
if klass.columns_hash.key?(field) && (!from || table_name_matches?(from))
arel_attribute(field)
else
yield field
end
end
def table_name_matches?(from)
table_name = Regexp.escape(table.name)
quoted_table_name = Regexp.escape(connection.quote_table_name(table.name))
/(?:\A|(?<!FROM)\s)(?:\b#{table_name}\b|#{quoted_table_name})(?!\.)/i.match?(from.to_s)
end
def reverse_sql_order(order_query)
if order_query.empty?
return [arel_attribute(primary_key).desc] if primary_key
raise IrreversibleOrderError,
"Relation has no current order and table has no primary key to be used as default order"
end
order_query.flat_map do |o|
case o
when Arel::Attribute
o.desc
when Arel::Nodes::Ordering
o.reverse
when String
if does_not_support_reverse?(o)
raise IrreversibleOrderError, "Order #{o.inspect} cannot be reversed automatically"
end
o.split(",").map! do |s|
s.strip!
s.gsub!(/\sasc\Z/i, " DESC") || s.gsub!(/\sdesc\Z/i, " ASC") || (s << " DESC")
end
else
o
end
end
end
def does_not_support_reverse?(order)
# Account for String subclasses like Arel::Nodes::SqlLiteral that
# override methods like #count.
order = String.new(order) unless order.instance_of?(String)
# Uses SQL function with multiple arguments.
(order.include?(",") && order.split(",").find { |section| section.count("(") != section.count(")") }) ||
# Uses "nulls first" like construction.
/\bnulls\s+(?:first|last)\b/i.match?(order)
end
def build_order(arel)
orders = order_values.uniq
orders.compact_blank!
arel.order(*orders) unless orders.empty?
end
VALID_DIRECTIONS = [:asc, :desc, :ASC, :DESC,
"asc", "desc", "ASC", "DESC"].to_set # :nodoc:
def validate_order_args(args)
args.each do |arg|
next unless arg.is_a?(Hash)
arg.each do |_key, value|
unless VALID_DIRECTIONS.include?(value)
raise ArgumentError,
"Direction \"#{value}\" is invalid. Valid directions are: #{VALID_DIRECTIONS.to_a.inspect}"
end
end
end
end
def preprocess_order_args(order_args)
order_args.reject!(&:blank?)
order_args.map! do |arg|
klass.sanitize_sql_for_order(arg)
end
order_args.flatten!
@klass.disallow_raw_sql!(
order_args.flat_map { |a| a.is_a?(Hash) ? a.keys : a },
permit: connection.column_name_with_order_matcher
)
validate_order_args(order_args)
references = order_args.grep(String)
references.map! { |arg| arg =~ /^\W?(\w+)\W?\./ && $1 }.compact!
references!(references) if references.any?
# if a symbol is given we prepend the quoted table name
order_args.map! do |arg|
case arg
when Symbol
order_column(arg.to_s).asc
when Hash
arg.map { |field, dir|
case field
when Arel::Nodes::SqlLiteral
field.send(dir.downcase)
else
order_column(field.to_s).send(dir.downcase)
end
}
else
arg
end
end.flatten!
end
def order_column(field)
arel_column(field) do |attr_name|
if attr_name == "count" && !group_values.empty?
arel_attribute(attr_name)
else
Arel.sql(connection.quote_table_name(attr_name))
end
end
end
# Checks to make sure that the arguments are not blank. Note that if some
# blank-like object were initially passed into the query method, then this
# method will not raise an error.
#
# Example:
#
# Post.references() # raises an error
# Post.references([]) # does not raise an error
#
# This particular method should be called with a method_name and the args
# passed into that method as an input. For example:
#
# def references(*args)
# check_if_method_has_arguments!("references", args)
# ...
# end
def check_if_method_has_arguments!(method_name, args)
if args.blank?
raise ArgumentError, "The method .#{method_name}() must contain arguments."
end
end
STRUCTURAL_OR_METHODS = Relation::VALUE_METHODS - [:extending, :where, :having, :unscope, :references]
def structurally_incompatible_values_for_or(other)
values = other.values
STRUCTURAL_OR_METHODS.reject do |method|
v1, v2 = @values[method], values[method]
v1 == v2 || (!v1 || v1.empty?) && (!v2 || v2.empty?)
end
end
def where_clause_factory
@where_clause_factory ||= Relation::WhereClauseFactory.new(klass, predicate_builder)
end
alias having_clause_factory where_clause_factory
end
end
| 34.301565 | 161 | 0.614166 |
e81b3b0583da71f125493d24cbfcdfd05fa7a594 | 1,161 | require 'json'
MyApp.add_route('GET', '/api/v2/berry-flavor/', {
"resourcePath" => "/BerryFlavor",
"summary" => "",
"nickname" => "berry_flavor_list",
"responseClass" => "String",
"endpoint" => "/api/v2/berry-flavor/",
"notes" => "",
"parameters" => [
{
"name" => "limit",
"description" => "",
"dataType" => "Integer",
"allowableValues" => "",
"paramType" => "query",
},
{
"name" => "offset",
"description" => "",
"dataType" => "Integer",
"allowableValues" => "",
"paramType" => "query",
},
]}) do
cross_origin
# the guts live here
{"message" => "yes, it worked"}.to_json
end
MyApp.add_route('GET', '/api/v2/berry-flavor/{id}/', {
"resourcePath" => "/BerryFlavor",
"summary" => "",
"nickname" => "berry_flavor_read",
"responseClass" => "String",
"endpoint" => "/api/v2/berry-flavor/{id}/",
"notes" => "",
"parameters" => [
{
"name" => "id",
"description" => "",
"dataType" => "Integer",
"paramType" => "path",
},
]}) do
cross_origin
# the guts live here
{"message" => "yes, it worked"}.to_json
end
| 21.109091 | 54 | 0.512489 |
21cedca89d931e30bb6643812dfcf4743482f8b0 | 742 | class Repo < Formula
include Language::Python::Shebang
desc "Repository tool for Android development"
homepage "https://source.android.com/source/developing.html"
url "https://gerrit.googlesource.com/git-repo.git",
tag: "v2.16.4",
revision: "9122bfc3a80367ed303e8e2d3b3b3d7a8851c904"
license "Apache-2.0"
version_scheme 1
bottle do
sha256 cellar: :any_skip_relocation, all: "3f4851a4709c89b3f800aeb8f8e111b27c453f111262cccb4f166736d4409c52"
end
depends_on "[email protected]"
def install
bin.install "repo"
rewrite_shebang detected_python_shebang, bin/"repo"
doc.install (buildpath/"docs").children
end
test do
assert_match "usage:", shell_output("#{bin}/repo help 2>&1")
end
end
| 25.586207 | 112 | 0.729111 |
ffda6f2a183261f536dc105ffb19c49d4997be76 | 143 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_TestWebsite_session'
| 35.75 | 81 | 0.811189 |
1ac02888274eb5814dfd47b65f6f18f04197abe5 | 168 | require 'active_support/concern'
require "patterns/version"
require "patterns/service"
require "patterns/api_request"
require "patterns/notifier"
module Patterns
end
| 16.8 | 32 | 0.821429 |
1ab7d6be2150bb96f342fe95266f1a083a1c8b66 | 893 | require 'operation/rabbit_mq'
class Operation::RabbitMq::DelNodeAccount < Operation::RabbitMq
def steps
steps = super
steps << Operation::Step.new('delete_user') do
timeout_in(5.minutes)
raise ArgumentError, "Missing required :username argument!" unless self[:args][:username]
send_rabbitmq_command :delete_user, [ self[:args][:username] ]
success = true
self[:result_code] = 'Success'
self[:result_message] = "Request to delete messaging user for account '#{account.name}'"
operation_logs << OperationLog.new( {
:step_name => 'delete_user',
:is_success => success,
:result_code => self[:result_code],
:result_message => self[:result_message],
} )
unless success
fail! && next
else
succeed!
end
end
return steps
end
end
| 23.5 | 95 | 0.611422 |
e22d46cc824570f60b37993bced613f52e68cd56 | 565 | require 'open-uri'
require 'nokogiri'
class Videos
include Cinch::Plugin
listen_to :channel, method: :query
def query(m)
urls = m.message.split.grep URI.regexp
if urls.any?
urls.each do |url|
url = URI.parse(url) rescue next
next unless ['http', 'https'].include?(url.scheme)
if /(www\.)?(youtube|vimeo|dailymotion|youtu).*/.match url.host
doc = Nokogiri::HTML(open(url), nil, 'utf-8')
m.reply "Video: #{doc.at('meta[@property="og:title"]')['content']}"
end
end
end
end
end
| 20.925926 | 77 | 0.59469 |
216aaf1b042899abb4d85e95ddcaea2d74cd775d | 118 | class AddParentTask < ActiveRecord::Migration[4.2]
def change
add_column :tasks, :parend_id, :integer
end
end
| 19.666667 | 50 | 0.737288 |
61713e79ced44cde102e6517a5e1543f093c2bda | 1,605 | # -*- coding: UTF-8 -*-
module RRD
class Builder
attr_accessor :output, :parameters, :datasources, :archives
DATASOURCE_TYPES = [:gauge, :counter, :derive, :absolute]
ARCHIVE_TYPES = [:average, :min, :max, :last]
def initialize(output, parameters = {})
@output = output
@parameters = {:step => 5.minutes, :start => Time.now - 10.seconds }.merge parameters
@parameters[:start] = @parameters[:start].to_i
@datasources = []
@archives = []
end
def datasource(name, options = {})
options = {:type => :gauge, :heartbeat => 10.minutes, :min => 0, :max => :unlimited}.merge options
options[:max] = "U" if options[:max] == :unlimited
datasource = "DS:#{name}:#{options[:type].to_s.upcase}:#{options[:heartbeat]}:#{options[:min]}:#{options[:max]}"
datasources << datasource
datasource
end
def archive(consolidation_function, options = {})
options = {:every => 5.minutes, :during => 1.day}.merge options
# steps and rows must be integer, so we need to convert float values
archive_steps = (options[:every]/parameters[:step]).to_i
archive_rows = (options[:during]/options[:every]).to_i
archive = "RRA:#{consolidation_function.to_s.upcase}:0.5:#{archive_steps}:#{archive_rows}"
archives << archive
archive
end
def save
args = [output]
line_parameters = RRD.to_line_parameters(parameters)
args += line_parameters
args += datasources
args += archives
Wrapper.create(*args)
end
end
end | 34.148936 | 118 | 0.609346 |
d5ab4001187c6e1a075e7aad57f7aba2a792d5d4 | 316 | class CreateTravelers < ActiveRecord::Migration[5.2]
def change
create_table :travelers do |t|
t.string :name
t.string :email
t.string :password
t.integer :budget
t.string :nationality
t.string :interests
t.string :password_digest
t.timestamps
end
end
end
| 21.066667 | 52 | 0.64557 |
26fd084fc137c45ce549675d7427eff04cae7b93 | 223 | require 'mollie-api-ruby'
begin
payment = Mollie::Payment.get(
"tr_7UhSN1zuXS",
api_key: 'test_dHar4XY7LxsDOtmnkVtjNVWXLSlXsM'
)
rescue Mollie::Exception => e
puts 'An error has occurred: ' << e.message
end
| 20.272727 | 51 | 0.70852 |
6a965bd0ed1d051178eaf28c2ac742e1231d2891 | 245 | if platform?("centos")
cookbook_file "/etc/sysconfig/iptables" do
source "iptables"
notifies :run, "execute[restart iptables]", :immediately
end
execute "restart iptables" do
command "/etc/init.d/iptables restart"
end
end
| 18.846154 | 60 | 0.702041 |
fff428c917b1633a22df4120c0669cc1db02b48d | 9,329 | # frozen_string_literal: true
namespace :benchmarks do
# https://github.com/evanphx/benchmark-ips
# Enable and start GC before each job run. Disable GC afterwards.
#
# Inspired by https://www.omniref.com/ruby/2.2.1/symbols/Benchmark/bm?#annotation=4095926&line=182
class GCSuite
def warming(*)
run_gc
end
def running(*)
run_gc
end
def warmup_stats(*)
end
def add_report(*)
end
private
def run_gc
GC.enable
GC.start
GC.disable
end
end
def classifier_dir
File.join(File.dirname(__FILE__), 'classifier-reborn')
end
def clone_classifier
# rubocop:disable Style/IfUnlessModifier
unless Dir.exist? classifier_dir
system "git clone https://github.com/jekyll/classifier-reborn.git #{classifier_dir}"
end
# rubocop:enable Style/IfUnlessModifier
end
# desc 'setup standard benchmark'
task :setup do
clone_classifier
$LOAD_PATH.unshift(File.join(classifier_dir, 'lib'))
require 'benchmark'
require 'benchmark/ips'
# TODO: ok this is interesting and weird
# basically the earlier I require coverage and
# then require files the larger perf impact
# this is somewhat expected but can lead to significant perf diffs
# for example moving `require 'classifier-reborn'` below the coverage.start
# results in 1.5x slower vs "difference falls within error"
# moving from 5 second of time to 12 still shows slower based on when classifier is required
# make sure to be plugged in while benchmarking ;) Otherwise you get very unreliable results
require 'classifier-reborn'
if ENV['COVERAGE']
require 'coverage'
::Coverage.start
end
require 'redis'
require 'coverband'
require File.join(File.dirname(__FILE__), 'dog')
end
def benchmark_redis_store
redis = if ENV['REDIS_TEST_URL']
Redis.new(url: ENV['REDIS_TEST_URL'])
else
Redis.new
end
Coverband::Adapters::RedisStore.new(redis,
redis_namespace: 'coverband_bench')
end
# desc 'set up coverband with Redis'
task :setup_redis do
Coverband.configure do |config|
config.root = Dir.pwd
config.reporting_frequency = 100.0
config.logger = $stdout
config.store = benchmark_redis_store
end
end
# desc 'set up coverband with filestore'
task :setup_file do
Coverband.configure do |config|
config.root = Dir.pwd
config.reporting_frequency = 100.0
config.logger = $stdout
file_path = '/tmp/benchmark_store.json'
config.store = Coverband::Adapters::FileStore.new(file_path)
end
end
def bayes_classification
b = ClassifierReborn::Bayes.new 'Interesting', 'Uninteresting'
b.train_interesting 'here are some good words. I hope you love them'
b.train_uninteresting 'here are some bad words, I hate you'
b.classify 'I hate bad words and you' # returns 'Uninteresting'
end
def lsi_classification
lsi = ClassifierReborn::LSI.new
strings = [['This text deals with dogs. Dogs.', :dog],
['This text involves dogs too. Dogs! ', :dog],
['This text revolves around cats. Cats.', :cat],
['This text also involves cats. Cats!', :cat],
['This text involves birds. Birds.', :bird]]
strings.each { |x| lsi.add_item x.first, x.last }
lsi.search('dog', 3)
lsi.find_related(strings[2], 2)
lsi.classify 'This text is also about dogs!'
end
def work
5.times do
bayes_classification
lsi_classification
end
# simulate many calls to the same line
10_000.times { Dog.new.bark }
end
# puts "benchmark for: #{Coverband.configuration.inspect}"
# puts "store: #{Coverband.configuration.store.inspect}"
def run_work(hold_work = false)
suite = GCSuite.new
Benchmark.ips do |x|
x.config(time: 12, warmup: 5, suite: suite)
x.report 'coverband' do
work
Coverband.report_coverage(true)
end
x.report 'no coverband' do
work
end
x.hold! 'temp_results' if hold_work
x.compare!
end
Coverband::Collectors::Coverage.instance.reset_instance
end
def fake_line_numbers
24.times.each_with_object({}) do |line, line_hash|
line_hash[(line + 1).to_s] = rand(5)
end
end
def fake_report
2934.times.each_with_object({}) do |file_number, hash|
hash["file#{file_number + 1}.rb"] = fake_line_numbers
end
end
def adjust_report(report)
report.keys.each do |file|
next unless rand < 0.15
report[file] = fake_line_numbers
end
report
end
def mock_files(store)
###
# this is a hack because in the benchmark we don't have real files
###
def store.file_hash(file)
if @file_hash_cache[file]
@file_hash_cache[file]
else
@file_hash_cache[file] = Digest::MD5.file(__FILE__).hexdigest
end
end
def store.full_path_to_relative(file)
file
end
end
def reporting_speed
report = fake_report
store = benchmark_redis_store
store.clear!
mock_files(store)
5.times { store.save_report(report) }
Benchmark.ips do |x|
x.config(time: 15, warmup: 5)
x.report('store_reports') { store.save_report(report) }
end
end
def measure_memory
require 'memory_profiler'
report = fake_report
store = benchmark_redis_store
store.clear!
mock_files(store)
# warmup
3.times { store.save_report(report) }
previous_out = $stdout
capture = StringIO.new
$stdout = capture
MemoryProfiler.report do
10.times { store.save_report(report) }
end.pretty_print
data = $stdout.string
$stdout = previous_out
raise 'leaking memory!!!' unless data.match('Total retained: 0 bytes')
ensure
$stdout = previous_out
end
def measure_configure_memory
require 'memory_profiler'
# warmup
3.times { Coverband.configure }
previous_out = $stdout
capture = StringIO.new
$stdout = capture
MemoryProfiler.report do
10.times do
Coverband.configure do |config|
redis_url = ENV['CACHE_REDIS_URL'] || ENV['REDIS_URL']
config.store = Coverband::Adapters::RedisStore.new(Redis.new(url: redis_url), redis_namespace: 'coverband_data')
end
end
end.pretty_print
data = $stdout.string
$stdout = previous_out
puts data
raise 'leaking memory!!!' unless data.match('Total retained: 0 bytes')
ensure
$stdout = previous_out
end
desc 'runs memory reporting on Redis store'
task memory_reporting: [:setup] do
puts 'runs memory benchmarking to ensure we dont leak'
measure_memory
end
desc 'runs memory reporting on configure'
task memory_configure_reporting: [:setup] do
puts 'runs memory benchmarking on configure to ensure we dont leak'
measure_configure_memory
end
desc 'runs memory leak check via Rails tests'
task memory_rails: [:setup] do
puts 'runs memory rails test to ensure we dont leak'
puts `COVERBAND_MEMORY_TEST=true bundle exec m test/unit/rails_full_stack_test.rb:22`
end
desc 'runs benchmarks on reporting large sets of files to redis'
task redis_reporting: [:setup] do
puts 'runs benchmarks on reporting large sets of files to redis'
reporting_speed
end
# desc 'runs benchmarks on default redis setup'
task run_redis: [:setup, :setup_redis] do
puts 'Coverband configured with default Redis store'
run_work(true)
end
# desc 'runs benchmarks file store'
task run_file: [:setup, :setup_file] do
puts 'Coverband configured with file store'
run_work(true)
end
desc 'benchmarks external requests to coverband_demo site'
task :coverband_demo do
# for local testing
# puts `ab -n 500 -c 5 "http://127.0.0.1:3000/posts"`
puts `ab -n 2000 -c 10 "https://coverband-demo.herokuapp.com/posts"`
end
desc 'benchmarks external requests to coverband_demo site'
task :coverband_demo_graph do
# for local testing
# puts `ab -n 200 -c 5 "http://127.0.0.1:3000/posts"`
# puts `ab -n 500 -c 10 -g tmp/ab_brench.tsv "http://127.0.0.1:3000/posts"`
puts `ab -n 2000 -c 10 -g tmp/ab_brench.tsv "https://coverband-demo.herokuapp.com/posts"`
puts `test/benchmarks/graph_bench.sh`
`open tmp/timeseries.jpg`
end
desc 'compare Coverband Ruby Coverage with Filestore with normal Ruby'
task :compare_file do
puts 'comparing Coverage loaded/not, this takes some time for output...'
puts 'coverage loaded'
puts `COVERAGE=true rake benchmarks:run_file`
puts 'just the work'
puts `rake benchmarks:run_file`
end
desc 'compare Coverband Ruby Coverage with Redis and normal Ruby'
task :compare_redis do
puts 'comparing Coverage loaded/not, this takes some time for output...'
puts 'coverage loaded'
puts `COVERAGE=true rake benchmarks:run_redis`
puts 'just the work'
puts `rake benchmarks:run_redis`
end
end
desc 'runs benchmarks'
task benchmarks: ['benchmarks:redis_reporting',
'benchmarks:compare_file',
'benchmarks:compare_redis']
| 28.882353 | 122 | 0.665452 |
62840335c86045ef254bac5b1103fbd3b3e84967 | 98 | class DropRolesTable < ActiveRecord::Migration[5.2]
def change
drop_table :roles
end
end
| 16.333333 | 51 | 0.734694 |
e95192397c700c9a45b4d40bb08b230c49c70ac0 | 91,513 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::XRay
module Types
# An alias for an edge.
#
# @!attribute [rw] name
# The canonical name of the alias.
# @return [String]
#
# @!attribute [rw] names
# A list of names for the alias, including the canonical name.
# @return [Array<String>]
#
# @!attribute [rw] type
# The type of the alias.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/Alias AWS API Documentation
#
class Alias < Struct.new(
:name,
:names,
:type)
SENSITIVE = []
include Aws::Structure
end
# Value of a segment annotation. Has one of three value types: Number,
# Boolean, or String.
#
# @!attribute [rw] number_value
# Value for a Number annotation.
# @return [Float]
#
# @!attribute [rw] boolean_value
# Value for a Boolean annotation.
# @return [Boolean]
#
# @!attribute [rw] string_value
# Value for a String annotation.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/AnnotationValue AWS API Documentation
#
class AnnotationValue < Struct.new(
:number_value,
:boolean_value,
:string_value)
SENSITIVE = []
include Aws::Structure
end
# A list of Availability Zones corresponding to the segments in a trace.
#
# @!attribute [rw] name
# The name of a corresponding Availability Zone.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/AvailabilityZoneDetail AWS API Documentation
#
class AvailabilityZoneDetail < Struct.new(
:name)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass BackendConnectionErrors
# data as a hash:
#
# {
# timeout_count: 1,
# connection_refused_count: 1,
# http_code_4_xx_count: 1,
# http_code_5_xx_count: 1,
# unknown_host_count: 1,
# other_count: 1,
# }
#
# @!attribute [rw] timeout_count
# @return [Integer]
#
# @!attribute [rw] connection_refused_count
# @return [Integer]
#
# @!attribute [rw] http_code_4_xx_count
# @return [Integer]
#
# @!attribute [rw] http_code_5_xx_count
# @return [Integer]
#
# @!attribute [rw] unknown_host_count
# @return [Integer]
#
# @!attribute [rw] other_count
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/BackendConnectionErrors AWS API Documentation
#
class BackendConnectionErrors < Struct.new(
:timeout_count,
:connection_refused_count,
:http_code_4_xx_count,
:http_code_5_xx_count,
:unknown_host_count,
:other_count)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass BatchGetTracesRequest
# data as a hash:
#
# {
# trace_ids: ["TraceId"], # required
# next_token: "String",
# }
#
# @!attribute [rw] trace_ids
# Specify the trace IDs of requests for which to retrieve segments.
# @return [Array<String>]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/BatchGetTracesRequest AWS API Documentation
#
class BatchGetTracesRequest < Struct.new(
:trace_ids,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] traces
# Full traces for the specified requests.
# @return [Array<Types::Trace>]
#
# @!attribute [rw] unprocessed_trace_ids
# Trace IDs of requests that haven't been processed.
# @return [Array<String>]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/BatchGetTracesResult AWS API Documentation
#
class BatchGetTracesResult < Struct.new(
:traces,
:unprocessed_trace_ids,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass CreateGroupRequest
# data as a hash:
#
# {
# group_name: "GroupName", # required
# filter_expression: "FilterExpression",
# insights_configuration: {
# insights_enabled: false,
# notifications_enabled: false,
# },
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# }
#
# @!attribute [rw] group_name
# The case-sensitive name of the new group. Default is a reserved name
# and names must be unique.
# @return [String]
#
# @!attribute [rw] filter_expression
# The filter expression defining criteria by which to group traces.
# @return [String]
#
# @!attribute [rw] insights_configuration
# The structure containing configurations related to insights.
#
# * The InsightsEnabled boolean can be set to true to enable insights
# for the new group or false to disable insights for the new group.
#
# * The NotifcationsEnabled boolean can be set to true to enable
# insights notifications for the new group. Notifications may only
# be enabled on a group with InsightsEnabled set to true.
# @return [Types::InsightsConfiguration]
#
# @!attribute [rw] tags
# A map that contains one or more tag keys and tag values to attach to
# an X-Ray group. For more information about ways to use tags, see
# [Tagging AWS resources][1] in the *AWS General Reference*.
#
# The following restrictions apply to tags:
#
# * Maximum number of user-applied tags per resource: 50
#
# * Maximum tag key length: 128 Unicode characters
#
# * Maximum tag value length: 256 Unicode characters
#
# * Valid values for key and value: a-z, A-Z, 0-9, space, and the
# following characters: \_ . : / = + - and @
#
# * Tag keys and values are case sensitive.
#
# * Don't use `aws:` as a prefix for keys; it's reserved for AWS
# use.
#
#
#
# [1]: https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html
# @return [Array<Types::Tag>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/CreateGroupRequest AWS API Documentation
#
class CreateGroupRequest < Struct.new(
:group_name,
:filter_expression,
:insights_configuration,
:tags)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] group
# The group that was created. Contains the name of the group that was
# created, the Amazon Resource Name (ARN) of the group that was
# generated based on the group name, the filter expression, and the
# insight configuration that was assigned to the group.
# @return [Types::Group]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/CreateGroupResult AWS API Documentation
#
class CreateGroupResult < Struct.new(
:group)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass CreateSamplingRuleRequest
# data as a hash:
#
# {
# sampling_rule: { # required
# rule_name: "RuleName",
# rule_arn: "String",
# resource_arn: "ResourceARN", # required
# priority: 1, # required
# fixed_rate: 1.0, # required
# reservoir_size: 1, # required
# service_name: "ServiceName", # required
# service_type: "ServiceType", # required
# host: "Host", # required
# http_method: "HTTPMethod", # required
# url_path: "URLPath", # required
# version: 1, # required
# attributes: {
# "AttributeKey" => "AttributeValue",
# },
# },
# tags: [
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# }
#
# @!attribute [rw] sampling_rule
# The rule definition.
# @return [Types::SamplingRule]
#
# @!attribute [rw] tags
# A map that contains one or more tag keys and tag values to attach to
# an X-Ray sampling rule. For more information about ways to use tags,
# see [Tagging AWS resources][1] in the *AWS General Reference*.
#
# The following restrictions apply to tags:
#
# * Maximum number of user-applied tags per resource: 50
#
# * Maximum tag key length: 128 Unicode characters
#
# * Maximum tag value length: 256 Unicode characters
#
# * Valid values for key and value: a-z, A-Z, 0-9, space, and the
# following characters: \_ . : / = + - and @
#
# * Tag keys and values are case sensitive.
#
# * Don't use `aws:` as a prefix for keys; it's reserved for AWS
# use.
#
#
#
# [1]: https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html
# @return [Array<Types::Tag>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/CreateSamplingRuleRequest AWS API Documentation
#
class CreateSamplingRuleRequest < Struct.new(
:sampling_rule,
:tags)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] sampling_rule_record
# The saved rule definition and metadata.
# @return [Types::SamplingRuleRecord]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/CreateSamplingRuleResult AWS API Documentation
#
class CreateSamplingRuleResult < Struct.new(
:sampling_rule_record)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass DeleteGroupRequest
# data as a hash:
#
# {
# group_name: "GroupName",
# group_arn: "GroupARN",
# }
#
# @!attribute [rw] group_name
# The case-sensitive name of the group.
# @return [String]
#
# @!attribute [rw] group_arn
# The ARN of the group that was generated on creation.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/DeleteGroupRequest AWS API Documentation
#
class DeleteGroupRequest < Struct.new(
:group_name,
:group_arn)
SENSITIVE = []
include Aws::Structure
end
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/DeleteGroupResult AWS API Documentation
#
class DeleteGroupResult < Aws::EmptyStructure; end
# @note When making an API call, you may pass DeleteSamplingRuleRequest
# data as a hash:
#
# {
# rule_name: "String",
# rule_arn: "String",
# }
#
# @!attribute [rw] rule_name
# The name of the sampling rule. Specify a rule by either name or ARN,
# but not both.
# @return [String]
#
# @!attribute [rw] rule_arn
# The ARN of the sampling rule. Specify a rule by either name or ARN,
# but not both.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/DeleteSamplingRuleRequest AWS API Documentation
#
class DeleteSamplingRuleRequest < Struct.new(
:rule_name,
:rule_arn)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] sampling_rule_record
# The deleted rule definition and metadata.
# @return [Types::SamplingRuleRecord]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/DeleteSamplingRuleResult AWS API Documentation
#
class DeleteSamplingRuleResult < Struct.new(
:sampling_rule_record)
SENSITIVE = []
include Aws::Structure
end
# Information about a connection between two services.
#
# @!attribute [rw] reference_id
# Identifier of the edge. Unique within a service map.
# @return [Integer]
#
# @!attribute [rw] start_time
# The start time of the first segment on the edge.
# @return [Time]
#
# @!attribute [rw] end_time
# The end time of the last segment on the edge.
# @return [Time]
#
# @!attribute [rw] summary_statistics
# Response statistics for segments on the edge.
# @return [Types::EdgeStatistics]
#
# @!attribute [rw] response_time_histogram
# A histogram that maps the spread of client response times on an
# edge.
# @return [Array<Types::HistogramEntry>]
#
# @!attribute [rw] aliases
# Aliases for the edge.
# @return [Array<Types::Alias>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/Edge AWS API Documentation
#
class Edge < Struct.new(
:reference_id,
:start_time,
:end_time,
:summary_statistics,
:response_time_histogram,
:aliases)
SENSITIVE = []
include Aws::Structure
end
# Response statistics for an edge.
#
# @!attribute [rw] ok_count
# The number of requests that completed with a 2xx Success status
# code.
# @return [Integer]
#
# @!attribute [rw] error_statistics
# Information about requests that failed with a 4xx Client Error
# status code.
# @return [Types::ErrorStatistics]
#
# @!attribute [rw] fault_statistics
# Information about requests that failed with a 5xx Server Error
# status code.
# @return [Types::FaultStatistics]
#
# @!attribute [rw] total_count
# The total number of completed requests.
# @return [Integer]
#
# @!attribute [rw] total_response_time
# The aggregate response time of completed requests.
# @return [Float]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/EdgeStatistics AWS API Documentation
#
class EdgeStatistics < Struct.new(
:ok_count,
:error_statistics,
:fault_statistics,
:total_count,
:total_response_time)
SENSITIVE = []
include Aws::Structure
end
# A configuration document that specifies encryption configuration
# settings.
#
# @!attribute [rw] key_id
# The ID of the customer master key (CMK) used for encryption, if
# applicable.
# @return [String]
#
# @!attribute [rw] status
# The encryption status. While the status is `UPDATING`, X-Ray may
# encrypt data with a combination of the new and old settings.
# @return [String]
#
# @!attribute [rw] type
# The type of encryption. Set to `KMS` for encryption with CMKs. Set
# to `NONE` for default encryption.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/EncryptionConfig AWS API Documentation
#
class EncryptionConfig < Struct.new(
:key_id,
:status,
:type)
SENSITIVE = []
include Aws::Structure
end
# The root cause of a trace summary error.
#
# @!attribute [rw] services
# A list of services corresponding to an error. A service identifies a
# segment and it contains a name, account ID, type, and inferred flag.
# @return [Array<Types::ErrorRootCauseService>]
#
# @!attribute [rw] client_impacting
# A flag that denotes that the root cause impacts the trace client.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ErrorRootCause AWS API Documentation
#
class ErrorRootCause < Struct.new(
:services,
:client_impacting)
SENSITIVE = []
include Aws::Structure
end
# A collection of segments and corresponding subsegments associated to a
# trace summary error.
#
# @!attribute [rw] name
# The name of the entity.
# @return [String]
#
# @!attribute [rw] exceptions
# The types and messages of the exceptions.
# @return [Array<Types::RootCauseException>]
#
# @!attribute [rw] remote
# A flag that denotes a remote subsegment.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ErrorRootCauseEntity AWS API Documentation
#
class ErrorRootCauseEntity < Struct.new(
:name,
:exceptions,
:remote)
SENSITIVE = []
include Aws::Structure
end
# A collection of fields identifying the services in a trace summary
# error.
#
# @!attribute [rw] name
# The service name.
# @return [String]
#
# @!attribute [rw] names
# A collection of associated service names.
# @return [Array<String>]
#
# @!attribute [rw] type
# The type associated to the service.
# @return [String]
#
# @!attribute [rw] account_id
# The account ID associated to the service.
# @return [String]
#
# @!attribute [rw] entity_path
# The path of root cause entities found on the service.
# @return [Array<Types::ErrorRootCauseEntity>]
#
# @!attribute [rw] inferred
# A Boolean value indicating if the service is inferred from the
# trace.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ErrorRootCauseService AWS API Documentation
#
class ErrorRootCauseService < Struct.new(
:name,
:names,
:type,
:account_id,
:entity_path,
:inferred)
SENSITIVE = []
include Aws::Structure
end
# Information about requests that failed with a 4xx Client Error status
# code.
#
# @!attribute [rw] throttle_count
# The number of requests that failed with a 419 throttling status
# code.
# @return [Integer]
#
# @!attribute [rw] other_count
# The number of requests that failed with untracked 4xx Client Error
# status codes.
# @return [Integer]
#
# @!attribute [rw] total_count
# The total number of requests that failed with a 4xx Client Error
# status code.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ErrorStatistics AWS API Documentation
#
class ErrorStatistics < Struct.new(
:throttle_count,
:other_count,
:total_count)
SENSITIVE = []
include Aws::Structure
end
# The root cause information for a trace summary fault.
#
# @!attribute [rw] services
# A list of corresponding services. A service identifies a segment and
# it contains a name, account ID, type, and inferred flag.
# @return [Array<Types::FaultRootCauseService>]
#
# @!attribute [rw] client_impacting
# A flag that denotes that the root cause impacts the trace client.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/FaultRootCause AWS API Documentation
#
class FaultRootCause < Struct.new(
:services,
:client_impacting)
SENSITIVE = []
include Aws::Structure
end
# A collection of segments and corresponding subsegments associated to a
# trace summary fault error.
#
# @!attribute [rw] name
# The name of the entity.
# @return [String]
#
# @!attribute [rw] exceptions
# The types and messages of the exceptions.
# @return [Array<Types::RootCauseException>]
#
# @!attribute [rw] remote
# A flag that denotes a remote subsegment.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/FaultRootCauseEntity AWS API Documentation
#
class FaultRootCauseEntity < Struct.new(
:name,
:exceptions,
:remote)
SENSITIVE = []
include Aws::Structure
end
# A collection of fields identifying the services in a trace summary
# fault.
#
# @!attribute [rw] name
# The service name.
# @return [String]
#
# @!attribute [rw] names
# A collection of associated service names.
# @return [Array<String>]
#
# @!attribute [rw] type
# The type associated to the service.
# @return [String]
#
# @!attribute [rw] account_id
# The account ID associated to the service.
# @return [String]
#
# @!attribute [rw] entity_path
# The path of root cause entities found on the service.
# @return [Array<Types::FaultRootCauseEntity>]
#
# @!attribute [rw] inferred
# A Boolean value indicating if the service is inferred from the
# trace.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/FaultRootCauseService AWS API Documentation
#
class FaultRootCauseService < Struct.new(
:name,
:names,
:type,
:account_id,
:entity_path,
:inferred)
SENSITIVE = []
include Aws::Structure
end
# Information about requests that failed with a 5xx Server Error status
# code.
#
# @!attribute [rw] other_count
# The number of requests that failed with untracked 5xx Server Error
# status codes.
# @return [Integer]
#
# @!attribute [rw] total_count
# The total number of requests that failed with a 5xx Server Error
# status code.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/FaultStatistics AWS API Documentation
#
class FaultStatistics < Struct.new(
:other_count,
:total_count)
SENSITIVE = []
include Aws::Structure
end
# @api private
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetEncryptionConfigRequest AWS API Documentation
#
class GetEncryptionConfigRequest < Aws::EmptyStructure; end
# @!attribute [rw] encryption_config
# The encryption configuration document.
# @return [Types::EncryptionConfig]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetEncryptionConfigResult AWS API Documentation
#
class GetEncryptionConfigResult < Struct.new(
:encryption_config)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetGroupRequest
# data as a hash:
#
# {
# group_name: "GroupName",
# group_arn: "GroupARN",
# }
#
# @!attribute [rw] group_name
# The case-sensitive name of the group.
# @return [String]
#
# @!attribute [rw] group_arn
# The ARN of the group that was generated on creation.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetGroupRequest AWS API Documentation
#
class GetGroupRequest < Struct.new(
:group_name,
:group_arn)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] group
# The group that was requested. Contains the name of the group, the
# ARN of the group, the filter expression, and the insight
# configuration assigned to the group.
# @return [Types::Group]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetGroupResult AWS API Documentation
#
class GetGroupResult < Struct.new(
:group)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetGroupsRequest
# data as a hash:
#
# {
# next_token: "GetGroupsNextToken",
# }
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetGroupsRequest AWS API Documentation
#
class GetGroupsRequest < Struct.new(
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] groups
# The collection of all active groups.
# @return [Array<Types::GroupSummary>]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetGroupsResult AWS API Documentation
#
class GetGroupsResult < Struct.new(
:groups,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetSamplingRulesRequest
# data as a hash:
#
# {
# next_token: "String",
# }
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetSamplingRulesRequest AWS API Documentation
#
class GetSamplingRulesRequest < Struct.new(
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] sampling_rule_records
# Rule definitions and metadata.
# @return [Array<Types::SamplingRuleRecord>]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetSamplingRulesResult AWS API Documentation
#
class GetSamplingRulesResult < Struct.new(
:sampling_rule_records,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetSamplingStatisticSummariesRequest
# data as a hash:
#
# {
# next_token: "String",
# }
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetSamplingStatisticSummariesRequest AWS API Documentation
#
class GetSamplingStatisticSummariesRequest < Struct.new(
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] sampling_statistic_summaries
# Information about the number of requests instrumented for each
# sampling rule.
# @return [Array<Types::SamplingStatisticSummary>]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetSamplingStatisticSummariesResult AWS API Documentation
#
class GetSamplingStatisticSummariesResult < Struct.new(
:sampling_statistic_summaries,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetSamplingTargetsRequest
# data as a hash:
#
# {
# sampling_statistics_documents: [ # required
# {
# rule_name: "RuleName", # required
# client_id: "ClientID", # required
# timestamp: Time.now, # required
# request_count: 1, # required
# sampled_count: 1, # required
# borrow_count: 1,
# },
# ],
# }
#
# @!attribute [rw] sampling_statistics_documents
# Information about rules that the service is using to sample
# requests.
# @return [Array<Types::SamplingStatisticsDocument>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetSamplingTargetsRequest AWS API Documentation
#
class GetSamplingTargetsRequest < Struct.new(
:sampling_statistics_documents)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] sampling_target_documents
# Updated rules that the service should use to sample requests.
# @return [Array<Types::SamplingTargetDocument>]
#
# @!attribute [rw] last_rule_modification
# The last time a user changed the sampling rule configuration. If the
# sampling rule configuration changed since the service last retrieved
# it, the service should call GetSamplingRules to get the latest
# version.
# @return [Time]
#
# @!attribute [rw] unprocessed_statistics
# Information about SamplingStatisticsDocument that X-Ray could not
# process.
# @return [Array<Types::UnprocessedStatistics>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetSamplingTargetsResult AWS API Documentation
#
class GetSamplingTargetsResult < Struct.new(
:sampling_target_documents,
:last_rule_modification,
:unprocessed_statistics)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetServiceGraphRequest
# data as a hash:
#
# {
# start_time: Time.now, # required
# end_time: Time.now, # required
# group_name: "GroupName",
# group_arn: "GroupARN",
# next_token: "String",
# }
#
# @!attribute [rw] start_time
# The start of the time frame for which to generate a graph.
# @return [Time]
#
# @!attribute [rw] end_time
# The end of the timeframe for which to generate a graph.
# @return [Time]
#
# @!attribute [rw] group_name
# The name of a group based on which you want to generate a graph.
# @return [String]
#
# @!attribute [rw] group_arn
# The Amazon Resource Name (ARN) of a group based on which you want to
# generate a graph.
# @return [String]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetServiceGraphRequest AWS API Documentation
#
class GetServiceGraphRequest < Struct.new(
:start_time,
:end_time,
:group_name,
:group_arn,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] start_time
# The start of the time frame for which the graph was generated.
# @return [Time]
#
# @!attribute [rw] end_time
# The end of the time frame for which the graph was generated.
# @return [Time]
#
# @!attribute [rw] services
# The services that have processed a traced request during the
# specified time frame.
# @return [Array<Types::Service>]
#
# @!attribute [rw] contains_old_group_versions
# A flag indicating whether the group's filter expression has been
# consistent, or if the returned service graph may show traces from an
# older version of the group's filter expression.
# @return [Boolean]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetServiceGraphResult AWS API Documentation
#
class GetServiceGraphResult < Struct.new(
:start_time,
:end_time,
:services,
:contains_old_group_versions,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetTimeSeriesServiceStatisticsRequest
# data as a hash:
#
# {
# start_time: Time.now, # required
# end_time: Time.now, # required
# group_name: "GroupName",
# group_arn: "GroupARN",
# entity_selector_expression: "EntitySelectorExpression",
# period: 1,
# next_token: "String",
# }
#
# @!attribute [rw] start_time
# The start of the time frame for which to aggregate statistics.
# @return [Time]
#
# @!attribute [rw] end_time
# The end of the time frame for which to aggregate statistics.
# @return [Time]
#
# @!attribute [rw] group_name
# The case-sensitive name of the group for which to pull statistics
# from.
# @return [String]
#
# @!attribute [rw] group_arn
# The Amazon Resource Name (ARN) of the group for which to pull
# statistics from.
# @return [String]
#
# @!attribute [rw] entity_selector_expression
# A filter expression defining entities that will be aggregated for
# statistics. Supports ID, service, and edge functions. If no selector
# expression is specified, edge statistics are returned.
# @return [String]
#
# @!attribute [rw] period
# Aggregation period in seconds.
# @return [Integer]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetTimeSeriesServiceStatisticsRequest AWS API Documentation
#
class GetTimeSeriesServiceStatisticsRequest < Struct.new(
:start_time,
:end_time,
:group_name,
:group_arn,
:entity_selector_expression,
:period,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] time_series_service_statistics
# The collection of statistics.
# @return [Array<Types::TimeSeriesServiceStatistics>]
#
# @!attribute [rw] contains_old_group_versions
# A flag indicating whether or not a group's filter expression has
# been consistent, or if a returned aggregation might show statistics
# from an older version of the group's filter expression.
# @return [Boolean]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetTimeSeriesServiceStatisticsResult AWS API Documentation
#
class GetTimeSeriesServiceStatisticsResult < Struct.new(
:time_series_service_statistics,
:contains_old_group_versions,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetTraceGraphRequest
# data as a hash:
#
# {
# trace_ids: ["TraceId"], # required
# next_token: "String",
# }
#
# @!attribute [rw] trace_ids
# Trace IDs of requests for which to generate a service graph.
# @return [Array<String>]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetTraceGraphRequest AWS API Documentation
#
class GetTraceGraphRequest < Struct.new(
:trace_ids,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] services
# The services that have processed one of the specified requests.
# @return [Array<Types::Service>]
#
# @!attribute [rw] next_token
# Pagination token.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetTraceGraphResult AWS API Documentation
#
class GetTraceGraphResult < Struct.new(
:services,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass GetTraceSummariesRequest
# data as a hash:
#
# {
# start_time: Time.now, # required
# end_time: Time.now, # required
# time_range_type: "TraceId", # accepts TraceId, Event
# sampling: false,
# sampling_strategy: {
# name: "PartialScan", # accepts PartialScan, FixedRate
# value: 1.0,
# },
# filter_expression: "FilterExpression",
# next_token: "String",
# }
#
# @!attribute [rw] start_time
# The start of the time frame for which to retrieve traces.
# @return [Time]
#
# @!attribute [rw] end_time
# The end of the time frame for which to retrieve traces.
# @return [Time]
#
# @!attribute [rw] time_range_type
# A parameter to indicate whether to query trace summaries by TraceId
# or Event time.
# @return [String]
#
# @!attribute [rw] sampling
# Set to `true` to get summaries for only a subset of available
# traces.
# @return [Boolean]
#
# @!attribute [rw] sampling_strategy
# A parameter to indicate whether to enable sampling on trace
# summaries. Input parameters are Name and Value.
# @return [Types::SamplingStrategy]
#
# @!attribute [rw] filter_expression
# Specify a filter expression to retrieve trace summaries for services
# or requests that meet certain requirements.
# @return [String]
#
# @!attribute [rw] next_token
# Specify the pagination token returned by a previous request to
# retrieve the next page of results.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetTraceSummariesRequest AWS API Documentation
#
class GetTraceSummariesRequest < Struct.new(
:start_time,
:end_time,
:time_range_type,
:sampling,
:sampling_strategy,
:filter_expression,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] trace_summaries
# Trace IDs and annotations for traces that were found in the
# specified time frame.
# @return [Array<Types::TraceSummary>]
#
# @!attribute [rw] approximate_time
# The start time of this page of results.
# @return [Time]
#
# @!attribute [rw] traces_processed_count
# The total number of traces processed, including traces that did not
# match the specified filter expression.
# @return [Integer]
#
# @!attribute [rw] next_token
# If the requested time frame contained more than one page of results,
# you can use this token to retrieve the next page. The first page
# contains the most recent results, closest to the end of the time
# frame.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GetTraceSummariesResult AWS API Documentation
#
class GetTraceSummariesResult < Struct.new(
:trace_summaries,
:approximate_time,
:traces_processed_count,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# Details and metadata for a group.
#
# @!attribute [rw] group_name
# The unique case-sensitive name of the group.
# @return [String]
#
# @!attribute [rw] group_arn
# The Amazon Resource Name (ARN) of the group generated based on the
# GroupName.
# @return [String]
#
# @!attribute [rw] filter_expression
# The filter expression defining the parameters to include traces.
# @return [String]
#
# @!attribute [rw] insights_configuration
# The structure containing configurations related to insights.
#
# * The InsightsEnabled boolean can be set to true to enable insights
# for the group or false to disable insights for the group.
#
# * The NotifcationsEnabled boolean can be set to true to enable
# insights notifications through Amazon EventBridge for the group.
# @return [Types::InsightsConfiguration]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/Group AWS API Documentation
#
class Group < Struct.new(
:group_name,
:group_arn,
:filter_expression,
:insights_configuration)
SENSITIVE = []
include Aws::Structure
end
# Details for a group without metadata.
#
# @!attribute [rw] group_name
# The unique case-sensitive name of the group.
# @return [String]
#
# @!attribute [rw] group_arn
# The ARN of the group generated based on the GroupName.
# @return [String]
#
# @!attribute [rw] filter_expression
# The filter expression defining the parameters to include traces.
# @return [String]
#
# @!attribute [rw] insights_configuration
# The structure containing configurations related to insights.
#
# * The InsightsEnabled boolean can be set to true to enable insights
# for the group or false to disable insights for the group.
#
# * The NotificationsEnabled boolean can be set to true to enable
# insights notifications. Notifications can only be enabled on a
# group with InsightsEnabled set to true.
# @return [Types::InsightsConfiguration]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/GroupSummary AWS API Documentation
#
class GroupSummary < Struct.new(
:group_name,
:group_arn,
:filter_expression,
:insights_configuration)
SENSITIVE = []
include Aws::Structure
end
# An entry in a histogram for a statistic. A histogram maps the range of
# observed values on the X axis, and the prevalence of each value on the
# Y axis.
#
# @!attribute [rw] value
# The value of the entry.
# @return [Float]
#
# @!attribute [rw] count
# The prevalence of the entry.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/HistogramEntry AWS API Documentation
#
class HistogramEntry < Struct.new(
:value,
:count)
SENSITIVE = []
include Aws::Structure
end
# Information about an HTTP request.
#
# @!attribute [rw] http_url
# The request URL.
# @return [String]
#
# @!attribute [rw] http_status
# The response status.
# @return [Integer]
#
# @!attribute [rw] http_method
# The request method.
# @return [String]
#
# @!attribute [rw] user_agent
# The request's user agent string.
# @return [String]
#
# @!attribute [rw] client_ip
# The IP address of the requestor.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/Http AWS API Documentation
#
class Http < Struct.new(
:http_url,
:http_status,
:http_method,
:user_agent,
:client_ip)
SENSITIVE = []
include Aws::Structure
end
# The structure containing configurations related to insights.
#
# @note When making an API call, you may pass InsightsConfiguration
# data as a hash:
#
# {
# insights_enabled: false,
# notifications_enabled: false,
# }
#
# @!attribute [rw] insights_enabled
# Set the InsightsEnabled value to true to enable insights or false to
# disable insights.
# @return [Boolean]
#
# @!attribute [rw] notifications_enabled
# Set the NotificationsEnabled value to true to enable insights
# notifications. Notifications can only be enabled on a group with
# InsightsEnabled set to true.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/InsightsConfiguration AWS API Documentation
#
class InsightsConfiguration < Struct.new(
:insights_enabled,
:notifications_enabled)
SENSITIVE = []
include Aws::Structure
end
# A list of EC2 instance IDs corresponding to the segments in a trace.
#
# @!attribute [rw] id
# The ID of a corresponding EC2 instance.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/InstanceIdDetail AWS API Documentation
#
class InstanceIdDetail < Struct.new(
:id)
SENSITIVE = []
include Aws::Structure
end
# The request is missing required parameters or has invalid parameters.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/InvalidRequestException AWS API Documentation
#
class InvalidRequestException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass ListTagsForResourceRequest
# data as a hash:
#
# {
# resource_arn: "AmazonResourceName", # required
# next_token: "String",
# }
#
# @!attribute [rw] resource_arn
# The Amazon Resource Number (ARN) of an X-Ray group or sampling rule.
# @return [String]
#
# @!attribute [rw] next_token
# A pagination token. If multiple pages of results are returned, use
# the `NextToken` value returned with the current page of results as
# the value of this parameter to get the next page of results.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ListTagsForResourceRequest AWS API Documentation
#
class ListTagsForResourceRequest < Struct.new(
:resource_arn,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] tags
# A list of tags, as key and value pairs, that is associated with the
# specified X-Ray group or sampling rule.
# @return [Array<Types::Tag>]
#
# @!attribute [rw] next_token
# A pagination token. If multiple pages of results are returned, use
# the `NextToken` value returned with the current page of results to
# get the next page of results.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ListTagsForResourceResponse AWS API Documentation
#
class ListTagsForResourceResponse < Struct.new(
:tags,
:next_token)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass PutEncryptionConfigRequest
# data as a hash:
#
# {
# key_id: "EncryptionKeyId",
# type: "NONE", # required, accepts NONE, KMS
# }
#
# @!attribute [rw] key_id
# An AWS KMS customer master key (CMK) in one of the following
# formats:
#
# * **Alias** - The name of the key. For example, `alias/MyKey`.
#
# * **Key ID** - The KMS key ID of the key. For example,
# `ae4aa6d49-a4d8-9df9-a475-4ff6d7898456`. AWS X-Ray does not
# support asymmetric CMKs.
#
# * **ARN** - The full Amazon Resource Name of the key ID or alias.
# For example,
# `arn:aws:kms:us-east-2:123456789012:key/ae4aa6d49-a4d8-9df9-a475-4ff6d7898456`.
# Use this format to specify a key in a different account.
#
# Omit this key if you set `Type` to `NONE`.
# @return [String]
#
# @!attribute [rw] type
# The type of encryption. Set to `KMS` to use your own key for
# encryption. Set to `NONE` for default encryption.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/PutEncryptionConfigRequest AWS API Documentation
#
class PutEncryptionConfigRequest < Struct.new(
:key_id,
:type)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] encryption_config
# The new encryption configuration.
# @return [Types::EncryptionConfig]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/PutEncryptionConfigResult AWS API Documentation
#
class PutEncryptionConfigResult < Struct.new(
:encryption_config)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass PutTelemetryRecordsRequest
# data as a hash:
#
# {
# telemetry_records: [ # required
# {
# timestamp: Time.now, # required
# segments_received_count: 1,
# segments_sent_count: 1,
# segments_spillover_count: 1,
# segments_rejected_count: 1,
# backend_connection_errors: {
# timeout_count: 1,
# connection_refused_count: 1,
# http_code_4_xx_count: 1,
# http_code_5_xx_count: 1,
# unknown_host_count: 1,
# other_count: 1,
# },
# },
# ],
# ec2_instance_id: "EC2InstanceId",
# hostname: "Hostname",
# resource_arn: "ResourceARN",
# }
#
# @!attribute [rw] telemetry_records
# @return [Array<Types::TelemetryRecord>]
#
# @!attribute [rw] ec2_instance_id
# @return [String]
#
# @!attribute [rw] hostname
# @return [String]
#
# @!attribute [rw] resource_arn
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/PutTelemetryRecordsRequest AWS API Documentation
#
class PutTelemetryRecordsRequest < Struct.new(
:telemetry_records,
:ec2_instance_id,
:hostname,
:resource_arn)
SENSITIVE = []
include Aws::Structure
end
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/PutTelemetryRecordsResult AWS API Documentation
#
class PutTelemetryRecordsResult < Aws::EmptyStructure; end
# @note When making an API call, you may pass PutTraceSegmentsRequest
# data as a hash:
#
# {
# trace_segment_documents: ["TraceSegmentDocument"], # required
# }
#
# @!attribute [rw] trace_segment_documents
# A string containing a JSON document defining one or more segments or
# subsegments.
# @return [Array<String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/PutTraceSegmentsRequest AWS API Documentation
#
class PutTraceSegmentsRequest < Struct.new(
:trace_segment_documents)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] unprocessed_trace_segments
# Segments that failed processing.
# @return [Array<Types::UnprocessedTraceSegment>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/PutTraceSegmentsResult AWS API Documentation
#
class PutTraceSegmentsResult < Struct.new(
:unprocessed_trace_segments)
SENSITIVE = []
include Aws::Structure
end
# A list of resources ARNs corresponding to the segments in a trace.
#
# @!attribute [rw] arn
# The ARN of a corresponding resource.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ResourceARNDetail AWS API Documentation
#
class ResourceARNDetail < Struct.new(
:arn)
SENSITIVE = []
include Aws::Structure
end
# The resource was not found. Verify that the name or Amazon Resource
# Name (ARN) of the resource is correct.
#
# @!attribute [rw] message
# @return [String]
#
# @!attribute [rw] resource_name
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ResourceNotFoundException AWS API Documentation
#
class ResourceNotFoundException < Struct.new(
:message,
:resource_name)
SENSITIVE = []
include Aws::Structure
end
# The root cause information for a response time warning.
#
# @!attribute [rw] services
# A list of corresponding services. A service identifies a segment and
# contains a name, account ID, type, and inferred flag.
# @return [Array<Types::ResponseTimeRootCauseService>]
#
# @!attribute [rw] client_impacting
# A flag that denotes that the root cause impacts the trace client.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ResponseTimeRootCause AWS API Documentation
#
class ResponseTimeRootCause < Struct.new(
:services,
:client_impacting)
SENSITIVE = []
include Aws::Structure
end
# A collection of segments and corresponding subsegments associated to a
# response time warning.
#
# @!attribute [rw] name
# The name of the entity.
# @return [String]
#
# @!attribute [rw] coverage
# The type and messages of the exceptions.
# @return [Float]
#
# @!attribute [rw] remote
# A flag that denotes a remote subsegment.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ResponseTimeRootCauseEntity AWS API Documentation
#
class ResponseTimeRootCauseEntity < Struct.new(
:name,
:coverage,
:remote)
SENSITIVE = []
include Aws::Structure
end
# A collection of fields identifying the service in a response time
# warning.
#
# @!attribute [rw] name
# The service name.
# @return [String]
#
# @!attribute [rw] names
# A collection of associated service names.
# @return [Array<String>]
#
# @!attribute [rw] type
# The type associated to the service.
# @return [String]
#
# @!attribute [rw] account_id
# The account ID associated to the service.
# @return [String]
#
# @!attribute [rw] entity_path
# The path of root cause entities found on the service.
# @return [Array<Types::ResponseTimeRootCauseEntity>]
#
# @!attribute [rw] inferred
# A Boolean value indicating if the service is inferred from the
# trace.
# @return [Boolean]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ResponseTimeRootCauseService AWS API Documentation
#
class ResponseTimeRootCauseService < Struct.new(
:name,
:names,
:type,
:account_id,
:entity_path,
:inferred)
SENSITIVE = []
include Aws::Structure
end
# The exception associated with a root cause.
#
# @!attribute [rw] name
# The name of the exception.
# @return [String]
#
# @!attribute [rw] message
# The message of the exception.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/RootCauseException AWS API Documentation
#
class RootCauseException < Struct.new(
:name,
:message)
SENSITIVE = []
include Aws::Structure
end
# You have reached the maximum number of sampling rules.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/RuleLimitExceededException AWS API Documentation
#
class RuleLimitExceededException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# A sampling rule that services use to decide whether to instrument a
# request. Rule fields can match properties of the service, or
# properties of a request. The service can ignore rules that don't
# match its properties.
#
# @note When making an API call, you may pass SamplingRule
# data as a hash:
#
# {
# rule_name: "RuleName",
# rule_arn: "String",
# resource_arn: "ResourceARN", # required
# priority: 1, # required
# fixed_rate: 1.0, # required
# reservoir_size: 1, # required
# service_name: "ServiceName", # required
# service_type: "ServiceType", # required
# host: "Host", # required
# http_method: "HTTPMethod", # required
# url_path: "URLPath", # required
# version: 1, # required
# attributes: {
# "AttributeKey" => "AttributeValue",
# },
# }
#
# @!attribute [rw] rule_name
# The name of the sampling rule. Specify a rule by either name or ARN,
# but not both.
# @return [String]
#
# @!attribute [rw] rule_arn
# The ARN of the sampling rule. Specify a rule by either name or ARN,
# but not both.
# @return [String]
#
# @!attribute [rw] resource_arn
# Matches the ARN of the AWS resource on which the service runs.
# @return [String]
#
# @!attribute [rw] priority
# The priority of the sampling rule.
# @return [Integer]
#
# @!attribute [rw] fixed_rate
# The percentage of matching requests to instrument, after the
# reservoir is exhausted.
# @return [Float]
#
# @!attribute [rw] reservoir_size
# A fixed number of matching requests to instrument per second, prior
# to applying the fixed rate. The reservoir is not used directly by
# services, but applies to all services using the rule collectively.
# @return [Integer]
#
# @!attribute [rw] service_name
# Matches the `name` that the service uses to identify itself in
# segments.
# @return [String]
#
# @!attribute [rw] service_type
# Matches the `origin` that the service uses to identify its type in
# segments.
# @return [String]
#
# @!attribute [rw] host
# Matches the hostname from a request URL.
# @return [String]
#
# @!attribute [rw] http_method
# Matches the HTTP method of a request.
# @return [String]
#
# @!attribute [rw] url_path
# Matches the path from a request URL.
# @return [String]
#
# @!attribute [rw] version
# The version of the sampling rule format (`1`).
# @return [Integer]
#
# @!attribute [rw] attributes
# Matches attributes derived from the request.
# @return [Hash<String,String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/SamplingRule AWS API Documentation
#
class SamplingRule < Struct.new(
:rule_name,
:rule_arn,
:resource_arn,
:priority,
:fixed_rate,
:reservoir_size,
:service_name,
:service_type,
:host,
:http_method,
:url_path,
:version,
:attributes)
SENSITIVE = []
include Aws::Structure
end
# A SamplingRule and its metadata.
#
# @!attribute [rw] sampling_rule
# The sampling rule.
# @return [Types::SamplingRule]
#
# @!attribute [rw] created_at
# When the rule was created.
# @return [Time]
#
# @!attribute [rw] modified_at
# When the rule was last modified.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/SamplingRuleRecord AWS API Documentation
#
class SamplingRuleRecord < Struct.new(
:sampling_rule,
:created_at,
:modified_at)
SENSITIVE = []
include Aws::Structure
end
# A document specifying changes to a sampling rule's configuration.
#
# @note When making an API call, you may pass SamplingRuleUpdate
# data as a hash:
#
# {
# rule_name: "RuleName",
# rule_arn: "String",
# resource_arn: "ResourceARN",
# priority: 1,
# fixed_rate: 1.0,
# reservoir_size: 1,
# host: "Host",
# service_name: "ServiceName",
# service_type: "ServiceType",
# http_method: "HTTPMethod",
# url_path: "URLPath",
# attributes: {
# "AttributeKey" => "AttributeValue",
# },
# }
#
# @!attribute [rw] rule_name
# The name of the sampling rule. Specify a rule by either name or ARN,
# but not both.
# @return [String]
#
# @!attribute [rw] rule_arn
# The ARN of the sampling rule. Specify a rule by either name or ARN,
# but not both.
# @return [String]
#
# @!attribute [rw] resource_arn
# Matches the ARN of the AWS resource on which the service runs.
# @return [String]
#
# @!attribute [rw] priority
# The priority of the sampling rule.
# @return [Integer]
#
# @!attribute [rw] fixed_rate
# The percentage of matching requests to instrument, after the
# reservoir is exhausted.
# @return [Float]
#
# @!attribute [rw] reservoir_size
# A fixed number of matching requests to instrument per second, prior
# to applying the fixed rate. The reservoir is not used directly by
# services, but applies to all services using the rule collectively.
# @return [Integer]
#
# @!attribute [rw] host
# Matches the hostname from a request URL.
# @return [String]
#
# @!attribute [rw] service_name
# Matches the `name` that the service uses to identify itself in
# segments.
# @return [String]
#
# @!attribute [rw] service_type
# Matches the `origin` that the service uses to identify its type in
# segments.
# @return [String]
#
# @!attribute [rw] http_method
# Matches the HTTP method of a request.
# @return [String]
#
# @!attribute [rw] url_path
# Matches the path from a request URL.
# @return [String]
#
# @!attribute [rw] attributes
# Matches attributes derived from the request.
# @return [Hash<String,String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/SamplingRuleUpdate AWS API Documentation
#
class SamplingRuleUpdate < Struct.new(
:rule_name,
:rule_arn,
:resource_arn,
:priority,
:fixed_rate,
:reservoir_size,
:host,
:service_name,
:service_type,
:http_method,
:url_path,
:attributes)
SENSITIVE = []
include Aws::Structure
end
# Aggregated request sampling data for a sampling rule across all
# services for a 10-second window.
#
# @!attribute [rw] rule_name
# The name of the sampling rule.
# @return [String]
#
# @!attribute [rw] timestamp
# The start time of the reporting window.
# @return [Time]
#
# @!attribute [rw] request_count
# The number of requests that matched the rule.
# @return [Integer]
#
# @!attribute [rw] borrow_count
# The number of requests recorded with borrowed reservoir quota.
# @return [Integer]
#
# @!attribute [rw] sampled_count
# The number of requests recorded.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/SamplingStatisticSummary AWS API Documentation
#
class SamplingStatisticSummary < Struct.new(
:rule_name,
:timestamp,
:request_count,
:borrow_count,
:sampled_count)
SENSITIVE = []
include Aws::Structure
end
# Request sampling results for a single rule from a service. Results are
# for the last 10 seconds unless the service has been assigned a longer
# reporting interval after a previous call to GetSamplingTargets.
#
# @note When making an API call, you may pass SamplingStatisticsDocument
# data as a hash:
#
# {
# rule_name: "RuleName", # required
# client_id: "ClientID", # required
# timestamp: Time.now, # required
# request_count: 1, # required
# sampled_count: 1, # required
# borrow_count: 1,
# }
#
# @!attribute [rw] rule_name
# The name of the sampling rule.
# @return [String]
#
# @!attribute [rw] client_id
# A unique identifier for the service in hexadecimal.
# @return [String]
#
# @!attribute [rw] timestamp
# The current time.
# @return [Time]
#
# @!attribute [rw] request_count
# The number of requests that matched the rule.
# @return [Integer]
#
# @!attribute [rw] sampled_count
# The number of requests recorded.
# @return [Integer]
#
# @!attribute [rw] borrow_count
# The number of requests recorded with borrowed reservoir quota.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/SamplingStatisticsDocument AWS API Documentation
#
class SamplingStatisticsDocument < Struct.new(
:rule_name,
:client_id,
:timestamp,
:request_count,
:sampled_count,
:borrow_count)
SENSITIVE = []
include Aws::Structure
end
# The name and value of a sampling rule to apply to a trace summary.
#
# @note When making an API call, you may pass SamplingStrategy
# data as a hash:
#
# {
# name: "PartialScan", # accepts PartialScan, FixedRate
# value: 1.0,
# }
#
# @!attribute [rw] name
# The name of a sampling rule.
# @return [String]
#
# @!attribute [rw] value
# The value of a sampling rule.
# @return [Float]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/SamplingStrategy AWS API Documentation
#
class SamplingStrategy < Struct.new(
:name,
:value)
SENSITIVE = []
include Aws::Structure
end
# Temporary changes to a sampling rule configuration. To meet the global
# sampling target for a rule, X-Ray calculates a new reservoir for each
# service based on the recent sampling results of all services that
# called GetSamplingTargets.
#
# @!attribute [rw] rule_name
# The name of the sampling rule.
# @return [String]
#
# @!attribute [rw] fixed_rate
# The percentage of matching requests to instrument, after the
# reservoir is exhausted.
# @return [Float]
#
# @!attribute [rw] reservoir_quota
# The number of requests per second that X-Ray allocated for this
# service.
# @return [Integer]
#
# @!attribute [rw] reservoir_quota_ttl
# When the reservoir quota expires.
# @return [Time]
#
# @!attribute [rw] interval
# The number of seconds for the service to wait before getting
# sampling targets again.
# @return [Integer]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/SamplingTargetDocument AWS API Documentation
#
class SamplingTargetDocument < Struct.new(
:rule_name,
:fixed_rate,
:reservoir_quota,
:reservoir_quota_ttl,
:interval)
SENSITIVE = []
include Aws::Structure
end
# A segment from a trace that has been ingested by the X-Ray service.
# The segment can be compiled from documents uploaded with
# PutTraceSegments, or an `inferred` segment for a downstream service,
# generated from a subsegment sent by the service that called it.
#
# For the full segment document schema, see [AWS X-Ray Segment
# Documents][1] in the *AWS X-Ray Developer Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/xray/latest/devguide/xray-api-segmentdocuments.html
#
# @!attribute [rw] id
# The segment's ID.
# @return [String]
#
# @!attribute [rw] document
# The segment document.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/Segment AWS API Documentation
#
class Segment < Struct.new(
:id,
:document)
SENSITIVE = []
include Aws::Structure
end
# Information about an application that processed requests, users that
# made requests, or downstream services, resources, and applications
# that an application used.
#
# @!attribute [rw] reference_id
# Identifier for the service. Unique within the service map.
# @return [Integer]
#
# @!attribute [rw] name
# The canonical name of the service.
# @return [String]
#
# @!attribute [rw] names
# A list of names for the service, including the canonical name.
# @return [Array<String>]
#
# @!attribute [rw] root
# Indicates that the service was the first service to process a
# request.
# @return [Boolean]
#
# @!attribute [rw] account_id
# Identifier of the AWS account in which the service runs.
# @return [String]
#
# @!attribute [rw] type
# The type of service.
#
# * AWS Resource - The type of an AWS resource. For example,
# `AWS::EC2::Instance` for an application running on Amazon EC2 or
# `AWS::DynamoDB::Table` for an Amazon DynamoDB table that the
# application used.
#
# * AWS Service - The type of an AWS service. For example,
# `AWS::DynamoDB` for downstream calls to Amazon DynamoDB that
# didn't target a specific table.
#
# * `client` - Represents the clients that sent requests to a root
# service.
#
# * `remote` - A downstream service of indeterminate type.
# @return [String]
#
# @!attribute [rw] state
# The service's state.
# @return [String]
#
# @!attribute [rw] start_time
# The start time of the first segment that the service generated.
# @return [Time]
#
# @!attribute [rw] end_time
# The end time of the last segment that the service generated.
# @return [Time]
#
# @!attribute [rw] edges
# Connections to downstream services.
# @return [Array<Types::Edge>]
#
# @!attribute [rw] summary_statistics
# Aggregated statistics for the service.
# @return [Types::ServiceStatistics]
#
# @!attribute [rw] duration_histogram
# A histogram that maps the spread of service durations.
# @return [Array<Types::HistogramEntry>]
#
# @!attribute [rw] response_time_histogram
# A histogram that maps the spread of service response times.
# @return [Array<Types::HistogramEntry>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/Service AWS API Documentation
#
class Service < Struct.new(
:reference_id,
:name,
:names,
:root,
:account_id,
:type,
:state,
:start_time,
:end_time,
:edges,
:summary_statistics,
:duration_histogram,
:response_time_histogram)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] name
# @return [String]
#
# @!attribute [rw] names
# @return [Array<String>]
#
# @!attribute [rw] account_id
# @return [String]
#
# @!attribute [rw] type
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ServiceId AWS API Documentation
#
class ServiceId < Struct.new(
:name,
:names,
:account_id,
:type)
SENSITIVE = []
include Aws::Structure
end
# Response statistics for a service.
#
# @!attribute [rw] ok_count
# The number of requests that completed with a 2xx Success status
# code.
# @return [Integer]
#
# @!attribute [rw] error_statistics
# Information about requests that failed with a 4xx Client Error
# status code.
# @return [Types::ErrorStatistics]
#
# @!attribute [rw] fault_statistics
# Information about requests that failed with a 5xx Server Error
# status code.
# @return [Types::FaultStatistics]
#
# @!attribute [rw] total_count
# The total number of completed requests.
# @return [Integer]
#
# @!attribute [rw] total_response_time
# The aggregate response time of completed requests.
# @return [Float]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ServiceStatistics AWS API Documentation
#
class ServiceStatistics < Struct.new(
:ok_count,
:error_statistics,
:fault_statistics,
:total_count,
:total_response_time)
SENSITIVE = []
include Aws::Structure
end
# A map that contains tag keys and tag values to attach to an AWS X-Ray
# group or sampling rule. For more information about ways to use tags,
# see [Tagging AWS resources][1] in the *AWS General Reference*.
#
# The following restrictions apply to tags:
#
# * Maximum number of user-applied tags per resource: 50
#
# * Tag keys and values are case sensitive.
#
# * Don't use `aws:` as a prefix for keys; it's reserved for AWS use.
# You cannot edit or delete system tags.
#
#
#
# [1]: https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html
#
# @note When making an API call, you may pass Tag
# data as a hash:
#
# {
# key: "TagKey", # required
# value: "TagValue", # required
# }
#
# @!attribute [rw] key
# A tag key, such as `Stage` or `Name`. A tag key cannot be empty. The
# key can be a maximum of 128 characters, and can contain only Unicode
# letters, numbers, or separators, or the following special
# characters: `+ - = . _ : /`
# @return [String]
#
# @!attribute [rw] value
# An optional tag value, such as `Production` or `test-only`. The
# value can be a maximum of 255 characters, and contain only Unicode
# letters, numbers, or separators, or the following special
# characters: `+ - = . _ : /`
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/Tag AWS API Documentation
#
class Tag < Struct.new(
:key,
:value)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass TagResourceRequest
# data as a hash:
#
# {
# resource_arn: "AmazonResourceName", # required
# tags: [ # required
# {
# key: "TagKey", # required
# value: "TagValue", # required
# },
# ],
# }
#
# @!attribute [rw] resource_arn
# The Amazon Resource Number (ARN) of an X-Ray group or sampling rule.
# @return [String]
#
# @!attribute [rw] tags
# A map that contains one or more tag keys and tag values to attach to
# an X-Ray group or sampling rule. For more information about ways to
# use tags, see [Tagging AWS resources][1] in the *AWS General
# Reference*.
#
# The following restrictions apply to tags:
#
# * Maximum number of user-applied tags per resource: 50
#
# * Maximum tag key length: 128 Unicode characters
#
# * Maximum tag value length: 256 Unicode characters
#
# * Valid values for key and value: a-z, A-Z, 0-9, space, and the
# following characters: \_ . : / = + - and @
#
# * Tag keys and values are case sensitive.
#
# * Don't use `aws:` as a prefix for keys; it's reserved for AWS
# use. You cannot edit or delete system tags.
#
#
#
# [1]: https://docs.aws.amazon.com/general/latest/gr/aws_tagging.html
# @return [Array<Types::Tag>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/TagResourceRequest AWS API Documentation
#
class TagResourceRequest < Struct.new(
:resource_arn,
:tags)
SENSITIVE = []
include Aws::Structure
end
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/TagResourceResponse AWS API Documentation
#
class TagResourceResponse < Aws::EmptyStructure; end
# @note When making an API call, you may pass TelemetryRecord
# data as a hash:
#
# {
# timestamp: Time.now, # required
# segments_received_count: 1,
# segments_sent_count: 1,
# segments_spillover_count: 1,
# segments_rejected_count: 1,
# backend_connection_errors: {
# timeout_count: 1,
# connection_refused_count: 1,
# http_code_4_xx_count: 1,
# http_code_5_xx_count: 1,
# unknown_host_count: 1,
# other_count: 1,
# },
# }
#
# @!attribute [rw] timestamp
# @return [Time]
#
# @!attribute [rw] segments_received_count
# @return [Integer]
#
# @!attribute [rw] segments_sent_count
# @return [Integer]
#
# @!attribute [rw] segments_spillover_count
# @return [Integer]
#
# @!attribute [rw] segments_rejected_count
# @return [Integer]
#
# @!attribute [rw] backend_connection_errors
# @return [Types::BackendConnectionErrors]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/TelemetryRecord AWS API Documentation
#
class TelemetryRecord < Struct.new(
:timestamp,
:segments_received_count,
:segments_sent_count,
:segments_spillover_count,
:segments_rejected_count,
:backend_connection_errors)
SENSITIVE = []
include Aws::Structure
end
# The request exceeds the maximum number of requests per second.
#
# @!attribute [rw] message
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ThrottledException AWS API Documentation
#
class ThrottledException < Struct.new(
:message)
SENSITIVE = []
include Aws::Structure
end
# A list of TimeSeriesStatistic structures.
#
# @!attribute [rw] timestamp
# Timestamp of the window for which statistics are aggregated.
# @return [Time]
#
# @!attribute [rw] edge_summary_statistics
# Response statistics for an edge.
# @return [Types::EdgeStatistics]
#
# @!attribute [rw] service_summary_statistics
# Response statistics for a service.
# @return [Types::ServiceStatistics]
#
# @!attribute [rw] response_time_histogram
# The response time histogram for the selected entities.
# @return [Array<Types::HistogramEntry>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/TimeSeriesServiceStatistics AWS API Documentation
#
class TimeSeriesServiceStatistics < Struct.new(
:timestamp,
:edge_summary_statistics,
:service_summary_statistics,
:response_time_histogram)
SENSITIVE = []
include Aws::Structure
end
# You have exceeded the maximum number of tags you can apply to this
# resource.
#
# @!attribute [rw] message
# @return [String]
#
# @!attribute [rw] resource_name
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/TooManyTagsException AWS API Documentation
#
class TooManyTagsException < Struct.new(
:message,
:resource_name)
SENSITIVE = []
include Aws::Structure
end
# A collection of segment documents with matching trace IDs.
#
# @!attribute [rw] id
# The unique identifier for the request that generated the trace's
# segments and subsegments.
# @return [String]
#
# @!attribute [rw] duration
# The length of time in seconds between the start time of the root
# segment and the end time of the last segment that completed.
# @return [Float]
#
# @!attribute [rw] limit_exceeded
# LimitExceeded is set to true when the trace has exceeded one of the
# defined quotas. For more information about quotas, see [AWS X-Ray
# endpoints and quotas][1].
#
#
#
# [1]: https://docs.aws.amazon.com/general/latest/gr/xray.html
# @return [Boolean]
#
# @!attribute [rw] segments
# Segment documents for the segments and subsegments that comprise the
# trace.
# @return [Array<Types::Segment>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/Trace AWS API Documentation
#
class Trace < Struct.new(
:id,
:duration,
:limit_exceeded,
:segments)
SENSITIVE = []
include Aws::Structure
end
# Metadata generated from the segment documents in a trace.
#
# @!attribute [rw] id
# The unique identifier for the request that generated the trace's
# segments and subsegments.
# @return [String]
#
# @!attribute [rw] duration
# The length of time in seconds between the start time of the root
# segment and the end time of the last segment that completed.
# @return [Float]
#
# @!attribute [rw] response_time
# The length of time in seconds between the start and end times of the
# root segment. If the service performs work asynchronously, the
# response time measures the time before the response is sent to the
# user, while the duration measures the amount of time before the last
# traced activity completes.
# @return [Float]
#
# @!attribute [rw] has_fault
# The root segment document has a 500 series error.
# @return [Boolean]
#
# @!attribute [rw] has_error
# The root segment document has a 400 series error.
# @return [Boolean]
#
# @!attribute [rw] has_throttle
# One or more of the segment documents has a 429 throttling error.
# @return [Boolean]
#
# @!attribute [rw] is_partial
# One or more of the segment documents is in progress.
# @return [Boolean]
#
# @!attribute [rw] http
# Information about the HTTP request served by the trace.
# @return [Types::Http]
#
# @!attribute [rw] annotations
# Annotations from the trace's segment documents.
# @return [Hash<String,Array<Types::ValueWithServiceIds>>]
#
# @!attribute [rw] users
# Users from the trace's segment documents.
# @return [Array<Types::TraceUser>]
#
# @!attribute [rw] service_ids
# Service IDs from the trace's segment documents.
# @return [Array<Types::ServiceId>]
#
# @!attribute [rw] resource_arns
# A list of resource ARNs for any resource corresponding to the trace
# segments.
# @return [Array<Types::ResourceARNDetail>]
#
# @!attribute [rw] instance_ids
# A list of EC2 instance IDs for any instance corresponding to the
# trace segments.
# @return [Array<Types::InstanceIdDetail>]
#
# @!attribute [rw] availability_zones
# A list of Availability Zones for any zone corresponding to the trace
# segments.
# @return [Array<Types::AvailabilityZoneDetail>]
#
# @!attribute [rw] entry_point
# The root of a trace.
# @return [Types::ServiceId]
#
# @!attribute [rw] fault_root_causes
# A collection of FaultRootCause structures corresponding to the trace
# segments.
# @return [Array<Types::FaultRootCause>]
#
# @!attribute [rw] error_root_causes
# A collection of ErrorRootCause structures corresponding to the trace
# segments.
# @return [Array<Types::ErrorRootCause>]
#
# @!attribute [rw] response_time_root_causes
# A collection of ResponseTimeRootCause structures corresponding to
# the trace segments.
# @return [Array<Types::ResponseTimeRootCause>]
#
# @!attribute [rw] revision
# The revision number of a trace.
# @return [Integer]
#
# @!attribute [rw] matched_event_time
# The matched time stamp of a defined event.
# @return [Time]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/TraceSummary AWS API Documentation
#
class TraceSummary < Struct.new(
:id,
:duration,
:response_time,
:has_fault,
:has_error,
:has_throttle,
:is_partial,
:http,
:annotations,
:users,
:service_ids,
:resource_arns,
:instance_ids,
:availability_zones,
:entry_point,
:fault_root_causes,
:error_root_causes,
:response_time_root_causes,
:revision,
:matched_event_time)
SENSITIVE = []
include Aws::Structure
end
# Information about a user recorded in segment documents.
#
# @!attribute [rw] user_name
# The user's name.
# @return [String]
#
# @!attribute [rw] service_ids
# Services that the user's request hit.
# @return [Array<Types::ServiceId>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/TraceUser AWS API Documentation
#
class TraceUser < Struct.new(
:user_name,
:service_ids)
SENSITIVE = []
include Aws::Structure
end
# Sampling statistics from a call to GetSamplingTargets that X-Ray could
# not process.
#
# @!attribute [rw] rule_name
# The name of the sampling rule.
# @return [String]
#
# @!attribute [rw] error_code
# The error code.
# @return [String]
#
# @!attribute [rw] message
# The error message.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/UnprocessedStatistics AWS API Documentation
#
class UnprocessedStatistics < Struct.new(
:rule_name,
:error_code,
:message)
SENSITIVE = []
include Aws::Structure
end
# Information about a segment that failed processing.
#
# @!attribute [rw] id
# The segment's ID.
# @return [String]
#
# @!attribute [rw] error_code
# The error that caused processing to fail.
# @return [String]
#
# @!attribute [rw] message
# The error message.
# @return [String]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/UnprocessedTraceSegment AWS API Documentation
#
class UnprocessedTraceSegment < Struct.new(
:id,
:error_code,
:message)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass UntagResourceRequest
# data as a hash:
#
# {
# resource_arn: "AmazonResourceName", # required
# tag_keys: ["TagKey"], # required
# }
#
# @!attribute [rw] resource_arn
# The Amazon Resource Number (ARN) of an X-Ray group or sampling rule.
# @return [String]
#
# @!attribute [rw] tag_keys
# Keys for one or more tags that you want to remove from an X-Ray
# group or sampling rule.
# @return [Array<String>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/UntagResourceRequest AWS API Documentation
#
class UntagResourceRequest < Struct.new(
:resource_arn,
:tag_keys)
SENSITIVE = []
include Aws::Structure
end
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/UntagResourceResponse AWS API Documentation
#
class UntagResourceResponse < Aws::EmptyStructure; end
# @note When making an API call, you may pass UpdateGroupRequest
# data as a hash:
#
# {
# group_name: "GroupName",
# group_arn: "GroupARN",
# filter_expression: "FilterExpression",
# insights_configuration: {
# insights_enabled: false,
# notifications_enabled: false,
# },
# }
#
# @!attribute [rw] group_name
# The case-sensitive name of the group.
# @return [String]
#
# @!attribute [rw] group_arn
# The ARN that was generated upon creation.
# @return [String]
#
# @!attribute [rw] filter_expression
# The updated filter expression defining criteria by which to group
# traces.
# @return [String]
#
# @!attribute [rw] insights_configuration
# The structure containing configurations related to insights.
#
# * The InsightsEnabled boolean can be set to true to enable insights
# for the group or false to disable insights for the group.
#
# * The NotifcationsEnabled boolean can be set to true to enable
# insights notifications for the group. Notifications can only be
# enabled on a group with InsightsEnabled set to true.
# @return [Types::InsightsConfiguration]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/UpdateGroupRequest AWS API Documentation
#
class UpdateGroupRequest < Struct.new(
:group_name,
:group_arn,
:filter_expression,
:insights_configuration)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] group
# The group that was updated. Contains the name of the group that was
# updated, the ARN of the group that was updated, the updated filter
# expression, and the updated insight configuration assigned to the
# group.
# @return [Types::Group]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/UpdateGroupResult AWS API Documentation
#
class UpdateGroupResult < Struct.new(
:group)
SENSITIVE = []
include Aws::Structure
end
# @note When making an API call, you may pass UpdateSamplingRuleRequest
# data as a hash:
#
# {
# sampling_rule_update: { # required
# rule_name: "RuleName",
# rule_arn: "String",
# resource_arn: "ResourceARN",
# priority: 1,
# fixed_rate: 1.0,
# reservoir_size: 1,
# host: "Host",
# service_name: "ServiceName",
# service_type: "ServiceType",
# http_method: "HTTPMethod",
# url_path: "URLPath",
# attributes: {
# "AttributeKey" => "AttributeValue",
# },
# },
# }
#
# @!attribute [rw] sampling_rule_update
# The rule and fields to change.
# @return [Types::SamplingRuleUpdate]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/UpdateSamplingRuleRequest AWS API Documentation
#
class UpdateSamplingRuleRequest < Struct.new(
:sampling_rule_update)
SENSITIVE = []
include Aws::Structure
end
# @!attribute [rw] sampling_rule_record
# The updated rule definition and metadata.
# @return [Types::SamplingRuleRecord]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/UpdateSamplingRuleResult AWS API Documentation
#
class UpdateSamplingRuleResult < Struct.new(
:sampling_rule_record)
SENSITIVE = []
include Aws::Structure
end
# Information about a segment annotation.
#
# @!attribute [rw] annotation_value
# Values of the annotation.
# @return [Types::AnnotationValue]
#
# @!attribute [rw] service_ids
# Services to which the annotation applies.
# @return [Array<Types::ServiceId>]
#
# @see http://docs.aws.amazon.com/goto/WebAPI/xray-2016-04-12/ValueWithServiceIds AWS API Documentation
#
class ValueWithServiceIds < Struct.new(
:annotation_value,
:service_ids)
SENSITIVE = []
include Aws::Structure
end
end
end
| 31.22245 | 125 | 0.608657 |
394aa0fc8f964f0dd8293a8605d8e0ac50ef0aa9 | 484 | require 'generator/exercise_case'
class CollatzConjectureCase < Generator::ExerciseCase
def workload
case expected
when Integer
standard_assertion
when Hash
error_assertion
end
end
def standard_assertion
assert_equal { subject_of_test }
end
def error_assertion
"assert_raises(ArgumentError) { #{subject_of_test} }"
end
def subject_of_test
"CollatzConjecture.steps(#{input})"
end
def input
literal(number)
end
end
| 15.612903 | 57 | 0.71281 |
1c7d156b286fb881b2b9ed5186774ff0314d2ac9 | 1,141 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/file_v1/service.rb'
require 'google/apis/file_v1/classes.rb'
require 'google/apis/file_v1/representations.rb'
module Google
module Apis
# Cloud Filestore API
#
# The Cloud Filestore API is used for creating and managing cloud file servers.
#
# @see https://cloud.google.com/filestore/
module FileV1
VERSION = 'V1'
REVISION = '20190129'
# View and manage your data across Google Cloud Platform services
AUTH_CLOUD_PLATFORM = 'https://www.googleapis.com/auth/cloud-platform'
end
end
end
| 32.6 | 83 | 0.732691 |
ab9057e0244b6454079efa48a69a16f27fe63946 | 12,433 | require 'spec_helper'
module Alchemy
describe Element do
describe '#new_from_scratch' do
it "should initialize an element by name from scratch" do
el = Element.new_from_scratch({'name' => 'article'})
el.should be_valid
end
it "should raise an error if the given name is not defined in the elements.yml" do
expect { Element.new_from_scratch({'name' => 'foobar'}) }.to raise_error
end
it "should take the first part of an given name containing a hash (#)" do
el = Element.new_from_scratch({'name' => 'article#header'})
el.name.should == "article"
end
it "should merge given attributes into defined ones" do
el = Element.new_from_scratch({'name' => 'article', 'page_id' => 1})
el.page_id.should == 1
end
it "should not have forbidden attributes from definition" do
el = Element.new_from_scratch({'name' => 'article'})
el.contents.should == []
end
end
context "scoped" do
it "should return all public elements" do
element_1 = FactoryGirl.create(:element, :public => true)
element_2 = FactoryGirl.create(:element, :public => true)
elements = Element.published.all
elements.should include(element_1)
elements.should include(element_2)
end
it "should return all elements by name" do
element_1 = FactoryGirl.create(:element, :name => 'article')
element_2 = FactoryGirl.create(:element, :name => 'article')
elements = Element.named(['article']).all
elements.should include(element_1)
elements.should include(element_2)
end
it "should return all elements but excluded ones" do
FactoryGirl.create(:element, :name => 'article')
FactoryGirl.create(:element, :name => 'article')
excluded = FactoryGirl.create(:element, :name => 'claim')
Element.excluded(['claim']).all.should_not include(excluded)
end
context "not_in_cell" do
it "should return all elements that are not in a cell" do
Element.delete_all
FactoryGirl.create(:element, :cell_id => 6)
FactoryGirl.create(:element, :cell_id => nil)
Element.not_in_cell.should have(1).element
end
end
end
describe '.all_definitions_for' do
it "should return a list of element definitions for a list of element names" do
element_names = ["article"]
definitions = Element.all_definitions_for(element_names)
definitions.first.fetch("name").should == 'article'
end
context "given 'all' as element name" do
before do
@element_definition = [
{'name' => 'article'},
{'name' => 'headline'}
]
Element.stub!(:definitions).and_return @element_definition
end
it "should return all element definitions" do
Element.all_definitions_for('all').should == @element_definition
end
end
it "should always return an array" do
definitions = Element.all_definitions_for(nil)
definitions.should == []
end
end
context "no description files are found" do
before(:each) do
FileUtils.mv(File.join(File.dirname(__FILE__), '..', '..', 'config', 'alchemy', 'elements.yml'), File.join(File.dirname(__FILE__), '..', '..', 'config', 'alchemy', 'elements.yml.bak'))
end
it "should raise an error" do
expect { Element.descriptions }.to raise_error(LoadError)
end
after(:each) do
FileUtils.mv(File.join(File.dirname(__FILE__), '..', '..', 'config', 'alchemy', 'elements.yml.bak'), File.join(File.dirname(__FILE__), '..', '..', 'config', 'alchemy', 'elements.yml'))
end
end
context "retrieving contents, essences and ingredients" do
let(:element) { FactoryGirl.create(:element, :name => 'news', :create_contents_after_create => true) }
it "should return an ingredient by name" do
element.ingredient('news_headline').should == EssenceText.first.ingredient
end
it "should return the content for rss title" do
element.content_for_rss_title.should == element.contents.find_by_name('news_headline')
end
it "should return the content for rss description" do
element.content_for_rss_description.should == element.contents.find_by_name('body')
end
end
context "limited amount" do
before do
defs = [
{
'name' => 'column_headline',
'amount' => 3,
'contents' => [{'name' => 'headline', 'type' => 'EssenceText'}]
},
{
'name' => 'unique_headline',
'unique' => true,
'amount' => 3,
'contents' => [{'name' => 'headline', 'type' => 'EssenceText'}]
}
]
# F&%#ing alias methods
Element.stub!(:definitions).and_return(defs)
Element.stub!(:descriptions).and_return(defs)
PageLayout.stub!(:get).and_return({
'name' => 'columns',
'elements' => ['column_headline', 'unique_headline'],
'autogenerate' => ['unique_headline', 'column_headline', 'column_headline', 'column_headline']
})
@page = FactoryGirl.create(:page, :page_layout => 'columns', :do_not_autogenerate => false)
end
it "should be readable" do
element = Element.all_definitions_for(['column_headline']).first
element['amount'].should be 3
end
it "should limit elements" do
Element.all_for_page(@page).each { |e| e['name'].should_not == 'column_headline' }
end
it "should be ignored if unique" do
Element.all_for_page(@page).each { |e| e['name'].should_not == 'unique_headline' }
end
end
context "collections" do
context "for trashed elements" do
let(:element) do
FactoryGirl.create(:element, :page_id => 1)
end
it "should return a collection of trashed elements" do
not_trashed_element = FactoryGirl.create(:element)
element.trash
Element.trashed.should include(element)
end
it "should return a collection of not trashed elements" do
Element.not_trashed.should include(element)
end
end
end
describe "#trash" do
before(:each) do
@element = FactoryGirl.create(:element, :page_id => 1, :cell_id => 1)
@element.trash
end
it "should remove the elements position" do
@element.position.should == nil
end
it "should set the public state to false" do
@element.public?.should == false
end
it "should not remove the page_id" do
@element.page_id.should == 1
end
it "should not remove the cell_id" do
@element.cell_id.should == 1
end
it "it should be possible to trash more than one element from the same page" do
trashed_element_2 = FactoryGirl.create(:element, :page_id => 1)
trashed_element_2.trash
Element.trashed.should include(@element, trashed_element_2)
end
end
it "should raise error if all_for_page method has no page" do
expect { Element.all_for_page(nil) }.to raise_error(TypeError)
end
describe "#content_by_type" do
before(:each) do
@element = FactoryGirl.create(:element, :name => 'headline')
@content = @element.contents.first
end
context "with namespaced essence type" do
it "should return content by passing a essence type" do
@element.content_by_type('Alchemy::EssenceText').should == @content
end
end
context "without namespaced essence type" do
it "should return content by passing a essence type" do
@element.content_by_type('EssenceText').should == @content
end
end
end
describe "#all_contents_by_type" do
before(:each) do
@element = FactoryGirl.create(:element)
@contents = @element.contents.select { |c| c.essence_type == 'Alchemy::EssenceText' }
end
context "with namespaced essence type" do
it "should return content by passing a essence type" do
@element.all_contents_by_type('Alchemy::EssenceText').should == @contents
end
end
context "without namespaced essence type" do
it "should return content by passing a essence type" do
@element.all_contents_by_type('EssenceText').should == @contents
end
end
end
describe '#copy' do
let(:element) { FactoryGirl.create(:element, :create_contents_after_create => true) }
it "should not create contents from scratch" do
copy = Element.copy(element)
copy.contents.count.should == element.contents.count
end
it "should create a new record with all attributes of source except given differences" do
copy = Element.copy(element, {:name => 'foobar'})
copy.name.should == 'foobar'
end
it "should make copies of all contents of source" do
copy = Element.copy(element)
copy.contents.collect(&:id).should_not == element.contents.collect(&:id)
end
end
describe "Finding previous or next element." do
let(:page) { FactoryGirl.create(:language_root_page) }
before(:each) do
@element1 = FactoryGirl.create(:element, :page => page, :name => 'headline')
@element2 = FactoryGirl.create(:element, :page => page)
@element3 = FactoryGirl.create(:element, :page => page, :name => 'text')
end
describe '#prev' do
it "should return previous element on same page" do
@element2.prev.should == @element1
end
context "with name as parameter" do
it "should return previous of this kind" do
@element3.prev('headline').should == @element1
end
end
end
describe '#next' do
it "should return next element on same page" do
@element1.next.should == @element2
end
context "with name as parameter" do
it "should return next of this kind" do
@element1.next('text').should == @element3
end
end
end
end
describe '#belonging_cellnames' do
before do
@page = FactoryGirl.create(:public_page)
@element = FactoryGirl.create(:element, :page => @page)
end
context "with page having cells defining the correct elements" do
before do
Cell.stub!(:definitions).and_return([
{'name' => 'header', 'elements' => ['article', 'headline']},
{'name' => 'footer', 'elements' => ['article', 'text']},
{'name' => 'sidebar', 'elements' => ['teaser']}
])
end
it "should return a list of all cells from given page this element could be placed in" do
@header_cell = FactoryGirl.create(:cell, :name => 'header', :page => @page)
@footer_cell = FactoryGirl.create(:cell, :name => 'footer', :page => @page)
@sidebar_cell = FactoryGirl.create(:cell, :name => 'sidebar', :page => @page)
@element.belonging_cellnames(@page).should include('header')
@element.belonging_cellnames(@page).should include('footer')
end
context "but without any cells" do
it "should return the 'nil cell'" do
@element.belonging_cellnames(@page).should == ['for_other_elements']
end
end
end
context "with page having cells defining the wrong elements" do
before do
Cell.stub!(:definitions).and_return([
{'name' => 'header', 'elements' => ['download', 'headline']},
{'name' => 'footer', 'elements' => ['contactform', 'text']},
{'name' => 'sidebar', 'elements' => ['teaser']}
])
end
it "should return the 'nil cell'" do
@header_cell = FactoryGirl.create(:cell, :name => 'header', :page => @page)
@footer_cell = FactoryGirl.create(:cell, :name => 'footer', :page => @page)
@sidebar_cell = FactoryGirl.create(:cell, :name => 'sidebar', :page => @page)
@element.belonging_cellnames(@page).should == ['for_other_elements']
end
end
end
end
end
| 30.698765 | 192 | 0.603314 |
110b79eb8b3284c51aa54554947930ac8ad30cc5 | 3,982 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with this
# work for additional information regarding copyright ownership. The ASF
# licenses this file to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helpers'))
unless RUBY_PLATFORM =~ /java/
describe ENV, 'JAVA_HOME on OS X' do
before do
@old_home, ENV['JAVA_HOME'] = ENV['JAVA_HOME'], nil
@old_env_java = Object.module_eval { remove_const :ENV_JAVA }
RbConfig::CONFIG.should_receive(:[]).at_least(:once).with('host_os').and_return('darwin0.9')
end
it 'should point to default JVM' do
load File.expand_path('../lib/buildr/java/rjb.rb')
ENV['JAVA_HOME'].should == '/System/Library/Frameworks/JavaVM.framework/Home'
end
it 'should use value of environment variable if specified' do
ENV['JAVA_HOME'] = '/System/Library/Frameworks/JavaVM.specified'
load File.expand_path('../lib/buildr/java/rjb.rb')
ENV['JAVA_HOME'].should == '/System/Library/Frameworks/JavaVM.specified'
end
after do
ENV['JAVA_HOME'] = @old_home
ENV_JAVA.replace @old_env_java
end
end
else
describe 'JRuby environment' do
it 'should enforce a minimum version of jruby' do
check =File.read(File.expand_path('../lib/buildr/java/jruby.rb')).match(/JRUBY_MIN_VERSION.*\n.*JRUBY_MIN_VERSION\n/).to_s
check.sub!('JRUBY_VERSION', "'0.0.0'")
lambda { eval(check) }.should raise_error(/JRuby must be at least at version /)
end
end
end
describe 'Java.tools_jar' do
before do
@old_home = ENV['JAVA_HOME']
end
describe 'when JAVA_HOME points to a JDK' do
before do
Java.instance_eval { @tools_jar = nil }
write 'jdk/lib/tools.jar'
ENV['JAVA_HOME'] = File.expand_path('jdk')
end
it 'should return the path to tools.jar' do
Java.tools_jar.should point_to_path('jdk/lib/tools.jar')
end
end
describe 'when JAVA_HOME points to a JRE inside a JDK' do
before do
Java.instance_eval { @tools_jar = nil }
write 'jdk/lib/tools.jar'
ENV['JAVA_HOME'] = File.expand_path('jdk/jre')
end
it 'should return the path to tools.jar' do
Java.tools_jar.should point_to_path('jdk/lib/tools.jar')
end
end
describe 'when there is no tools.jar' do
before do
Java.instance_eval { @tools_jar = nil }
ENV['JAVA_HOME'] = File.expand_path('jdk')
end
it 'should return nil' do
Java.tools_jar.should be_nil
end
end
after do
ENV['JAVA_HOME'] = @old_home
end
end
describe 'Java#java' do
before do
@old_home = ENV['JAVA_HOME']
end
describe 'when JAVA_HOME points to an invalid JRE/JDK installation' do
before do
write 'jdk'
ENV['JAVA_HOME'] = File.expand_path('jdk')
end
it 'should fail with an error message mentioning JAVA_HOME' do
begin
Java.java ['-version']
fail 'Java.java did not fail with JAVA_HOME pointing to invalid JRE/JDK installation'
rescue => error
error.message.to_s.should match(/JAVA_HOME/)
end
end
end
after do
ENV['JAVA_HOME'] = @old_home
end
end
describe Java::JavaWrapper do
it 'should be removed in version 1.5 since it was deprecated in version 1.3' do
Buildr::VERSION.should < '1.5'
lambda { Java::JavaWrapper }.should_not raise_error
end
end
| 29.93985 | 128 | 0.685334 |
abdec945deb608ea06eefc09e44bdc676c9639da | 138 | module Noticent
module Testing
class Exclusive < ::Noticent::Channel
def only_here
render
end
end
end
end
| 13.8 | 41 | 0.623188 |
bf84b845793259e687b5f48eabbde743e1f4b655 | 350 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe UpdateAllIssuesJob, type: :job do
Sidekiq::Testing.inline!
before do
2.times { FactoryBot.create(:issue) }
end
it 'calls the IssueUpdateJob for all issues' do
expect(IssueUpdateJob).to receive(:perform_async).twice
UpdateAllIssuesJob.perform_async
end
end
| 20.588235 | 59 | 0.754286 |
e985798b3812cf320b08bb4665153353e41b1d3a | 3,071 | module NestedRelatedItemFixtures
def related_item_host_fixture
<<-XML
<mods>
<relatedItem type="host">
<titleInfo>A host type related item</titleInfo>
<note displayLabel="Custom Notes">A note content</note>
</relatedItem>
<relatedItem type="host">
<titleInfo>A collection</titleInfo>
<typeOfResource collection="yes" />
</relatedItem>
</mods>
XML
end
def multi_constituent_fixture
<<-XML
<mods>
<relatedItem type="constituent">
<titleInfo>
<title>Polychronicon (epitome and continuation to 1429)</title>
<partNumber>ff. 1r - 29v</partNumber>
</titleInfo>
<titleInfo displayLabel="Nasmith Title" type="alternative">
<title>Epitome chronicae Cicestrensis, sed extractum e Polychronico, usque ad annum Christi 1429</title>
</titleInfo>
<name type="personal">
<namePart>Ranulf Higden OSB</namePart>
<role>
<roleTerm authority="marcrelator" type="text" valueURI="http://id.loc.gov/vocabulary/relators/aut">author</roleTerm>
</role>
</name>
<note>Dates are marked in the margin. Ends with the coronation of Henry VI at St Denis</note>
<note displayLabel="Incipit" type="incipit">Ieronimus ad eugenium in epistola 43a dicit quod decime leguntur primum date ab abraham</note>
<note displayLabel="Explicit" type="explicit">videlicet nono die mensis decembris ano etatis sue 10o</note>
</relatedItem>
<relatedItem type="constituent">
<titleInfo>
<title>Gesta regum ad Henricum VI</title>
<partNumber>ff. 30r - 53r</partNumber>
</titleInfo>
<titleInfo displayLabel="Nasmith Title" type="alternative">
<title>Breviarium historiae Angliae ad annum quartum Henrici IV. viz. 1402</title>
</titleInfo>
<titleInfo displayLabel="M.R. James Title" type="alternative">
<title>Breuiarium</title>
</titleInfo>
<name type="personal">
<namePart>Thomas Harsfield</namePart>
<role>
<roleTerm authority="marcrelator" type="text" valueURI="http://id.loc.gov/vocabulary/relators/aut">author</roleTerm>
</role>
</name>
<note>Another hand. Begins with 19 lines</note>
<note displayLabel="Incipit 1" type="incipit">Albion est terra constans in finibus orbis</note>
<note displayLabel="Explicit 1" type="explicit">Petrus pictauis dat cistrensis monachusque</note>
<note displayLabel="Incipit 2" type="incipit">Et quia confrater carissime non solum audiendo sacre scripture verbis aurem sedulus auditor accomodatur (!) tenetur</note>
<note displayLabel="Explicit 2" type="explicit">Tempore huius regis Owynus quidam Wallensis erigens se in principem Wallie toto vite sue tempore cum Wallensibus rebellauit</note>
</relatedItem>
</mods>
XML
end
end
| 47.246154 | 188 | 0.6366 |
62aa4709a4f531bc7fe629d67183ea31d3fad2f5 | 5,232 | #
# Be sure to run `pod spec lint QuintypeFramework.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "QuintypeFramework"
s.version = "0.0.2"
s.summary = "A sample library to check network requests using ALamofire and pasring response using SwiftyJson and creating a pod for tesing"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
DESC
s.homepage = "https://github.com/arjunpa/QuintypeFramework"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = { :type => "MIT", :file => "LICENSE" }
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Arjun P A" => "[email protected]" }
# Or just: s.author = ""
# s.authors = { "" => "" }
# s.social_media_url = "http://twitter.com/"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "8.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/arjunpa/QuintypeFramework.git", :tag => "0.0.1" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "Classes", "Classes/**/*.{h,m,swift}"
s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
s.framework = "UIKit"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
s.dependency "Alamofire", "~> 3.4.0"
s.dependency "SDWebImage", "~> 3.7.6"
s.dependency "SwiftyJSON", "~> 2.3.2"
end | 37.640288 | 147 | 0.593654 |
0347b367608fcf47340ded3a40458dad9578d298 | 879 | class PoisonCentres::NotificationsController < ApplicationController
def index
result = search_notifications(10)
@notifications = result.records
end
def show
@notification = Notification.find_by reference_number: params[:reference_number]
authorize @notification, policy_class: PoisonCentreNotificationPolicy
if User.current&.poison_centre_user?
render "show_poison_centre"
else
@contact_person = @notification.responsible_person.contact_persons.first
render "show_msa"
end
end
private
def authorize_user!
raise Pundit::NotAuthorizedError unless poison_centre_or_msa_user?
end
def search_notifications(page_size)
query = ElasticsearchQuery.new(query_params[:q])
Notification.full_search(query).paginate(page: params[:page], per_page: page_size)
end
def query_params
params.permit(:q)
end
end
| 26.636364 | 86 | 0.763367 |
0123ee2460f8f879991ed1145efe1abf0edd0316 | 7,134 | require File.expand_path(File.join(File.dirname(__FILE__), '..', 'spec_helper'))
if RUBY_VERSION >= '1.9.0'
require 'csv'
else
gem 'fastercsv', '~>1.4.0'
require 'fastercsv'
end
if ADAPTER
module ::TypeTests
class Impostor < DataMapper::Type
primitive String
end
class Coconut
include DataMapper::Resource
storage_names[ADAPTER] = 'coconuts'
def self.default_repository_name
ADAPTER
end
property :id, Serial
property :faked, Impostor
property :active, Boolean
property :note, Text
end
end
class ::Lemon
include DataMapper::Resource
def self.default_repository_name
ADAPTER
end
property :id, Serial
property :color, String
property :deleted_at, DataMapper::Types::ParanoidDateTime
end
class ::Lime
include DataMapper::Resource
def self.default_repository_name
ADAPTER
end
property :id, Serial
property :color, String
property :deleted_at, DataMapper::Types::ParanoidBoolean
end
describe DataMapper::Type, "with #{ADAPTER}" do
before do
TypeTests::Coconut.auto_migrate!(ADAPTER)
@document = <<-EOS.margin
NAME, RATING, CONVENIENCE
Freebird's, 3, 3
Whataburger, 1, 5
Jimmy John's, 3, 4
Mignon, 5, 2
Fuzi Yao's, 5, 1
Blue Goose, 5, 1
EOS
@stuff = YAML::dump({ 'Happy Cow!' => true, 'Sad Cow!' => false })
@active = true
@note = "This is a note on our ol' guy bob"
end
it "should instantiate an object with custom types" do
coconut = TypeTests::Coconut.new(:faked => 'bob', :active => @active, :note => @note)
coconut.faked.should == 'bob'
coconut.active.should be_a_kind_of(TrueClass)
coconut.note.should be_a_kind_of(String)
end
it "should CRUD an object with custom types" do
repository(ADAPTER) do
coconut = TypeTests::Coconut.new(:faked => 'bob', :active => @active, :note => @note)
coconut.save.should be_true
coconut.id.should_not be_nil
fred = TypeTests::Coconut.get!(coconut.id)
fred.faked.should == 'bob'
fred.active.should be_a_kind_of(TrueClass)
fred.note.should be_a_kind_of(String)
note = "Seems like bob is just mockin' around"
fred.note = note
fred.save.should be_true
active = false
fred.active = active
fred.save.should be_true
# Can't call coconut.reload since coconut.collection isn't setup.
mac = TypeTests::Coconut.get!(fred.id)
mac.active.should == active
mac.note.should == note
end
end
it "should respect paranoia with a datetime" do
Lemon.auto_migrate!(ADAPTER)
lemon = nil
repository(ADAPTER) do |repository|
lemon = Lemon.new
lemon.color = 'green'
lemon.save
lemon.destroy
lemon.deleted_at.should be_kind_of(DateTime)
end
repository(ADAPTER) do |repository|
Lemon.all.should be_empty
Lemon.get(lemon.id).should be_nil
end
end
it "should provide access to paranoid items with DateTime" do
Lemon.auto_migrate!(ADAPTER)
lemon = nil
repository(ADAPTER) do |repository|
%w(red green yellow blue).each do |color|
Lemon.create(:color => color)
end
Lemon.all.size.should == 4
Lemon.first.destroy
Lemon.all.size.should == 3
Lemon.with_deleted{Lemon.all.size.should == 1}
end
end
it "should set paranoid datetime to a date time" do
tmp = (DateTime.now - 0.5)
dt = DateTime.now
DateTime.stub!(:now).and_return(tmp)
repository(ADAPTER) do |repository|
lemon = Lemon.new
lemon.color = 'green'
lemon.save
lemon.destroy
lemon.deleted_at.should == tmp
end
end
it "should respect paranoia with a boolean" do
Lime.auto_migrate!(ADAPTER)
lime = nil
repository(ADAPTER) do |repository|
lime = Lime.new
lime.color = 'green'
lime.save
lime.destroy
lime.deleted_at.should be_kind_of(TrueClass)
end
repository(ADAPTER) do |repository|
Lime.all.should be_empty
Lime.get(lime.id).should be_nil
end
end
it "should provide access to paranoid items with Boolean" do
Lime.auto_migrate!(ADAPTER)
lemon = nil
repository(ADAPTER) do |repository|
%w(red green yellow blue).each do |color|
Lime.create(:color => color)
end
Lime.all.size.should == 4
Lime.first.destroy
Lime.all.size.should == 3
Lime.with_deleted{Lime.all.size.should == 1}
end
end
describe "paranoid types across repositories" do
before(:all) do
DataMapper::Repository.adapters[:alternate_paranoid] = repository(ADAPTER).adapter.dup
Object.send(:remove_const, :Orange) if defined?(Orange)
class ::Orange
include DataMapper::Resource
def self.default_repository_name
ADAPTER
end
property :id, Serial
property :color, String
repository(:alternate_paranoid) do
property :deleted, DataMapper::Types::ParanoidBoolean
property :deleted_at, DataMapper::Types::ParanoidDateTime
end
end
repository(:alternate_paranoid){Orange.auto_migrate!}
end
before(:each) do
%w(red orange blue green).each{|color| o = Orange.create(:color => color)}
end
after(:each) do
Orange.repository.adapter.execute("DELETE FROM oranges")
end
it "should setup the correct objects for the spec" do
repository(:alternate_paranoid){Orange.all.should have(4).items}
end
it "should allow access the the default repository" do
Orange.all.should have(4).items
end
it "should mark the objects as deleted in the alternate_paranoid repository" do
repository(:alternate_paranoid) do
Orange.first.destroy
Orange.all.should have(3).items
Orange.find_by_sql("SELECT * FROM oranges").should have(4).items
end
end
it "should mark the objects as deleted in the alternate_paranoid repository but ignore it in the #{ADAPTER} repository" do
repository(:alternate_paranoid) do
Orange.first.destroy
end
Orange.all.should have(4).items
end
it "should raise an error when trying to destroy from a repository that is not paranoid" do
lambda do
Orange.first.destroy
end.should raise_error(ArgumentError)
end
it "should set all paranoid attributes on delete" do
repository(:alternate_paranoid) do
orange = Orange.first
orange.deleted.should be_false
orange.deleted_at.should be_nil
orange.destroy
orange.deleted.should be_true
orange.deleted_at.should be_a_kind_of(DateTime)
end
end
end
end
end
| 25.847826 | 128 | 0.621951 |
ac9ba89f12090cc96d8bc8639726561c46ca3e67 | 1,901 | # frozen_string_literal: true
# == Schema Information
#
# Table name: assignments
#
# id :integer not null, primary key
# created_at :datetime
# updated_at :datetime
# user_id :integer
# course_id :integer
# article_id :integer
# article_title :string(255)
# role :integer
# wiki_id :integer
#
require 'rails_helper'
describe Assignment do
before { stub_wiki_validation }
describe 'assignment creation' do
context 'when no similar assignments exist' do
it 'creates Assignment objects' do
course = create(:course)
assignment = create(:assignment, course_id: course.id)
assignment2 = create(:redlink, course_id: course.id)
expect(assignment.id).to be_kind_of(Integer)
expect(assignment2.article_id).to be_nil
end
end
context 'when the same article on a different wiki is assignment' do
let(:es_wiki) { create(:wiki, language: 'es', project: 'wikipedia') }
before do
create(:assignment, user_id: 1, course_id: 1, wiki_id: 1,
article_title: 'Selfie', role: 0)
end
it 'creates the new assignment' do
Assignment.create(user_id: 1, course_id: 1, wiki_id: es_wiki.id,
article_title: 'Selfie', role: 0)
expect(Assignment.count).to eq(2)
end
end
context 'when the same article is assignment twice' do
before do
create(:assignment, user_id: 1, course_id: 1, wiki_id: 1,
article_title: 'Selfie', role: 0)
end
let(:subject) do
Assignment.create!(user_id: 1, course_id: 1, wiki_id: 1,
article_title: 'Selfie', role: 0)
end
it 'does not create a duplicate' do
expect { subject }.to raise_error(ActiveRecord::RecordInvalid)
end
end
end
end
| 29.246154 | 75 | 0.610205 |
bb7cbfc23b03d404ed341fbe12ddb7a2a994fac8 | 11,171 | require "formula_versions"
require "migrator"
require "formulary"
require "descriptions"
require "cleanup"
module Homebrew
def update_preinstall_header
@header_already_printed ||= begin
ohai "Auto-updated Homebrew!" if ARGV.include?("--preinstall")
true
end
end
def update_report
HOMEBREW_REPOSITORY.cd do
analytics_message_displayed = \
Utils.popen_read("git", "config", "--local", "--get", "homebrew.analyticsmessage").chuzzle
analytics_disabled = \
Utils.popen_read("git", "config", "--local", "--get", "homebrew.analyticsdisabled").chuzzle
if analytics_message_displayed != "true" && analytics_disabled != "true" && !ENV["HOMEBREW_NO_ANALYTICS"]
ENV["HOMEBREW_NO_ANALYTICS_THIS_RUN"] = "1"
ohai "Homebrew has enabled anonymous aggregate user behaviour analytics"
puts "Read the analytics documentation (and how to opt-out) here:"
puts " https://git.io/brew-analytics"
# Consider the message possibly missed if not a TTY.
if $stdout.tty?
safe_system "git", "config", "--local", "--replace-all", "homebrew.analyticsmessage", "true"
end
end
end
install_core_tap_if_necessary
hub = ReporterHub.new
updated = false
initial_revision = ENV["HOMEBREW_UPDATE_BEFORE"].to_s
current_revision = ENV["HOMEBREW_UPDATE_AFTER"].to_s
if initial_revision.empty? || current_revision.empty?
odie "update-report should not be called directly!"
end
if initial_revision != current_revision
update_preinstall_header
puts "Updated Homebrew from #{shorten_revision(initial_revision)} to #{shorten_revision(current_revision)}."
updated = true
end
updated_taps = []
Tap.each do |tap|
next unless tap.git?
begin
reporter = Reporter.new(tap)
rescue Reporter::ReporterRevisionUnsetError => e
onoe e if ARGV.homebrew_developer?
next
end
if reporter.updated?
updated_taps << tap.name
hub.add(reporter)
end
end
unless updated_taps.empty?
update_preinstall_header
puts "Updated #{updated_taps.size} tap#{plural(updated_taps.size)} " \
"(#{updated_taps.join(", ")})."
updated = true
end
migrate_legacy_cache_if_necessary
if !updated
if !ARGV.include?("--preinstall") && !ENV["HOMEBREW_UPDATE_FAILED"]
puts "Already up-to-date."
end
elsif hub.empty?
puts "No changes to formulae."
else
hub.dump
hub.reporters.each(&:migrate_tap_migration)
hub.reporters.each(&:migrate_formula_rename)
Descriptions.update_cache(hub)
end
Tap.each(&:link_manpages)
Homebrew.failed = true if ENV["HOMEBREW_UPDATE_FAILED"]
end
private
def shorten_revision(revision)
Utils.popen_read("git", "-C", HOMEBREW_REPOSITORY, "rev-parse", "--short", revision).chomp
end
def install_core_tap_if_necessary
core_tap = CoreTap.instance
return if core_tap.installed?
CoreTap.ensure_installed! :quiet => false
revision = core_tap.git_head
ENV["HOMEBREW_UPDATE_BEFORE_HOMEBREW_HOMEBREW_CORE"] = revision
ENV["HOMEBREW_UPDATE_AFTER_HOMEBREW_HOMEBREW_CORE"] = revision
end
def migrate_legacy_cache_if_necessary
legacy_cache = Pathname.new "/Library/Caches/Homebrew"
return if HOMEBREW_CACHE.to_s == legacy_cache.to_s
return unless legacy_cache.directory?
return unless legacy_cache.readable_real?
migration_attempted_file = legacy_cache/".migration_attempted"
return if migration_attempted_file.exist?
return unless legacy_cache.writable_real?
FileUtils.touch migration_attempted_file
# Cleanup to avoid copying files unnecessarily
ohai "Cleaning up #{legacy_cache}..."
Cleanup.cleanup_cache legacy_cache
# This directory could have been compromised if it's world-writable/
# a symlink/owned by another user so don't copy files in those cases.
world_writable = legacy_cache.stat.mode & 0777 == 0777
return if world_writable
return if legacy_cache.symlink?
return if !legacy_cache.owned? && legacy_cache.lstat.uid != 0
ohai "Migrating #{legacy_cache} to #{HOMEBREW_CACHE}..."
HOMEBREW_CACHE.mkpath
legacy_cache.cd do
legacy_cache.entries.each do |f|
next if [".", "..", ".migration_attempted"].include? "#{f}"
begin
FileUtils.cp_r f, HOMEBREW_CACHE
rescue
@migration_failed ||= true
end
end
end
if @migration_failed
opoo <<-EOS.undent
Failed to migrate #{legacy_cache} to
#{HOMEBREW_CACHE}. Please do so manually.
EOS
else
ohai "Deleting #{legacy_cache}..."
FileUtils.rm_rf legacy_cache
if legacy_cache.exist?
FileUtils.touch migration_attempted_file
opoo <<-EOS.undent
Failed to delete #{legacy_cache}.
Please do so manually.
EOS
end
end
end
end
class Reporter
class ReporterRevisionUnsetError < RuntimeError
def initialize(var_name)
super "#{var_name} is unset!"
end
end
attr_reader :tap, :initial_revision, :current_revision
def initialize(tap)
@tap = tap
initial_revision_var = "HOMEBREW_UPDATE_BEFORE#{repo_var}"
@initial_revision = ENV[initial_revision_var].to_s
raise ReporterRevisionUnsetError, initial_revision_var if @initial_revision.empty?
current_revision_var = "HOMEBREW_UPDATE_AFTER#{repo_var}"
@current_revision = ENV[current_revision_var].to_s
raise ReporterRevisionUnsetError, current_revision_var if @current_revision.empty?
end
def report
return @report if @report
@report = Hash.new { |h, k| h[k] = [] }
return @report unless updated?
diff.each_line do |line|
status, *paths = line.split
src = Pathname.new paths.first
dst = Pathname.new paths.last
next unless dst.extname == ".rb"
next unless paths.any? { |p| tap.formula_file?(p) }
case status
when "A", "D"
@report[status.to_sym] << tap.formula_file_to_name(src)
when "M"
begin
formula = Formulary.factory(tap.path/src)
new_version = formula.pkg_version
old_version = FormulaVersions.new(formula).formula_at_revision(@initial_revision, &:pkg_version)
next if new_version == old_version
rescue Exception => e
onoe e if ARGV.homebrew_developer?
end
@report[:M] << tap.formula_file_to_name(src)
when /^R\d{0,3}/
src_full_name = tap.formula_file_to_name(src)
dst_full_name = tap.formula_file_to_name(dst)
# Don't report formulae that are moved within a tap but not renamed
next if src_full_name == dst_full_name
@report[:D] << src_full_name
@report[:A] << dst_full_name
end
end
renamed_formulae = []
@report[:D].each do |old_full_name|
old_name = old_full_name.split("/").last
new_name = tap.formula_renames[old_name]
next unless new_name
if tap.core_tap?
new_full_name = new_name
else
new_full_name = "#{tap}/#{new_name}"
end
renamed_formulae << [old_full_name, new_full_name] if @report[:A].include? new_full_name
end
unless renamed_formulae.empty?
@report[:A] -= renamed_formulae.map(&:last)
@report[:D] -= renamed_formulae.map(&:first)
@report[:R] = renamed_formulae
end
@report
end
def updated?
initial_revision != current_revision
end
def migrate_tap_migration
report[:D].each do |full_name|
name = full_name.split("/").last
next unless (dir = HOMEBREW_CELLAR/name).exist? # skip if formula is not installed.
next unless new_tap_name = tap.tap_migrations[name] # skip if formula is not in tap_migrations list.
tabs = dir.subdirs.map { |d| Tab.for_keg(Keg.new(d)) }
next unless tabs.first.tap == tap # skip if installed formula is not from this tap.
new_tap = Tap.fetch(new_tap_name)
# For formulae migrated to cask: Auto-install cask or provide install instructions.
if new_tap_name == "caskroom/cask"
if new_tap.installed? && (HOMEBREW_REPOSITORY/"Caskroom").directory?
ohai "#{name} has been moved to Homebrew Cask. Installing #{name}..."
system HOMEBREW_BREW_FILE, "uninstall", "--force", name
system HOMEBREW_BREW_FILE, "prune"
system HOMEBREW_BREW_FILE, "cask", "install", name
else
ohai "#{name} has been moved to Homebrew Cask.", <<-EOS.undent
To uninstall the formula and install the cask run:
brew uninstall --force #{name}
brew cask install #{name}
EOS
end
else
new_tap.install unless new_tap.installed?
# update tap for each Tab
tabs.each { |tab| tab.tap = new_tap }
tabs.each(&:write)
end
end
end
def migrate_formula_rename
report[:R].each do |old_full_name, new_full_name|
old_name = old_full_name.split("/").last
next unless (dir = HOMEBREW_CELLAR/old_name).directory? && !dir.subdirs.empty?
begin
f = Formulary.factory(new_full_name)
rescue Exception => e
onoe e if ARGV.homebrew_developer?
next
end
begin
migrator = Migrator.new(f)
migrator.migrate
rescue Migrator::MigratorDifferentTapsError
rescue Exception => e
onoe e
end
end
end
private
def repo_var
@repo_var ||= tap.path.to_s.
strip_prefix(Tap::TAP_DIRECTORY.to_s).
tr("^A-Za-z0-9", "_").
upcase
end
def diff
Utils.popen_read(
"git", "-C", tap.path, "diff-tree", "-r", "--name-status", "--diff-filter=AMDR",
"-M85%", initial_revision, current_revision
)
end
end
class ReporterHub
attr_reader :reporters
def initialize
@hash = {}
@reporters = []
end
def select_formula(key)
@hash.fetch(key, [])
end
def add(reporter)
@reporters << reporter
report = reporter.report.delete_if { |k,v| v.empty? }
@hash.update(report) { |_key, oldval, newval| oldval.concat(newval) }
end
def empty?
@hash.empty?
end
def dump
# Key Legend: Added (A), Copied (C), Deleted (D), Modified (M), Renamed (R)
dump_formula_report :A, "New Formulae"
dump_formula_report :M, "Updated Formulae"
dump_formula_report :R, "Renamed Formulae"
dump_formula_report :D, "Deleted Formulae"
end
private
def dump_formula_report(key, title)
formulae = select_formula(key).sort.map do |name, new_name|
# Format list items of renamed formulae
if key == :R
name = pretty_installed(name) if installed?(name)
new_name = pretty_installed(new_name) if installed?(new_name)
"#{name} -> #{new_name}"
else
installed?(name) ? pretty_installed(name) : name
end
end
unless formulae.empty?
# Dump formula list.
ohai title
puts_columns(formulae)
end
end
def installed?(formula)
(HOMEBREW_CELLAR/formula.split("/").last).directory?
end
end
| 29.789333 | 114 | 0.657954 |
1873edff4c45204299e4ad2849f46863ba3d5f96 | 1,190 | class Liblastfm < Formula
desc "Libraries for Last.fm site services"
homepage "https://github.com/lastfm/liblastfm/"
url "https://github.com/lastfm/liblastfm/archive/1.0.9.tar.gz"
sha256 "5276b5fe00932479ce6fe370ba3213f3ab842d70a7d55e4bead6e26738425f7b"
revision 2
bottle do
cellar :any
sha256 "666b4220844dbab8143a3b04fafc086f08d2246d41eaa7fd6fac9b9cfb60db6a" => :mojave
sha256 "b064d2c0725d5500cb5c9b3dff12e3e01fc10c0855b1763885dd489ab9c3c034" => :high_sierra
sha256 "ed421bdd81c4643de07048e5d73575bb4a6909fce584c5e5b6760a5103cd0617" => :sierra
sha256 "40e10cadb1dc55904ae6114a13597e7209596e1d274b94db8ac96f1ebf7da979" => :el_capitan
sha256 "0d5342788a8f4eb95ea970d2247e829d7dac17db2d43713aacbf4617e742bbba" => :yosemite
end
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "fftw"
depends_on "libsamplerate"
depends_on "qt"
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args
system "make", "install"
cd "tests" do
system "make"
end
share.install "tests"
end
end
test do
cp_r "#{share}/tests/.", testpath
system "./TrackTest"
end
end
| 30.512821 | 93 | 0.739496 |
ff06b7ff9a7412d50ce186020fd73ee3bde4f260 | 1,430 | require 'spec_helper'
describe Puppet::Type.type(:mongodb_database).provider(:mongodb) do
let(:raw_dbs) {
{
"databases" => [
{
"name" => "admin",
"sizeOnDisk" => 83886080,
"empty" => false
}, {
"name" => "local",
"sizeOnDisk" => 83886080,
"empty" => false
}
],
"totalSize" => 251658240,
"ok" => 1
}.to_json
}
let(:parsed_dbs) { %w(admin local) }
let(:resource) { Puppet::Type.type(:mongodb_database).new(
{ :ensure => :present,
:name => 'new_database',
:provider => described_class.name
}
)}
let(:provider) { resource.provider }
before :each do
provider.class.stubs(:mongo_eval).with('printjson(db.getMongo().getDBs())').returns(raw_dbs)
end
let(:instance) { provider.class.instances.first }
describe 'self.instances' do
it 'returns an array of dbs' do
dbs = provider.class.instances.collect {|x| x.name }
expect(parsed_dbs).to match_array(dbs)
end
end
describe 'create' do
it 'makes a database' do
provider.expects(:mongo_eval)
provider.create
end
end
describe 'destroy' do
it 'removes a database' do
provider.expects(:mongo_eval)
provider.destroy
end
end
describe 'exists?' do
it 'checks if database exists' do
instance.exists?
end
end
end
| 21.029412 | 96 | 0.572028 |
bf9cc1f2f0acf369c6e940ef3dc165fa9b4acaf0 | 3,676 | class Contribution < ApplicationRecord
belongs_to :user
belongs_to :merger, class_name: 'User', foreign_key: :merged_by_id, primary_key: :uid
after_save { if user then user.update_contribution_count end }
after_destroy { if user then user.update_contribution_count end }
validates :issue_url, uniqueness: { scope: :user_id }, if: :pull_request?
validates :body, presence: true, unless: :pull_request?
validates :repo_name, presence: true, unless: :pull_request?
validates :created_at, presence: true, unless: :pull_request?
after_create :autogift, :post_to_firehose
has_many :gifts
scope :year, -> (year) { where('EXTRACT(year FROM contributions.created_at) = ?', year) }
scope :by_language, -> (language) { where('lower(language) = ?', language.downcase) }
scope :latest, -> (limit) { order('created_at desc').limit(limit) }
scope :for_aggregation, -> {
where(AggregationFilter.pull_request_filter)
}
scope :excluding_organisations, -> (excluded_organisations) {
excluded_organisations = Array(excluded_organisations)
where.not("repo_name ~* ?", %{^(#{excluded_organisations.join("|")})/})
}
EARLIEST_PULL_DATE = Date.parse("01/12/#{Tfpullrequests::Application.current_year}").midnight
LATEST_PULL_DATE = Date.parse("25/12/#{Tfpullrequests::Application.current_year}").midnight
class << self
def active_users(year)
User.where(id: Contribution.year(year).map(&:user_id).compact.uniq)
end
def create_from_github(json)
create(initialize_from_github(json))
end
def initialize_from_github(json)
{
title: json['payload']['pull_request']['title'],
issue_url: json['payload']['pull_request']['_links']['html']['href'],
created_at: json['payload']['pull_request']['created_at'],
state: json['payload']['pull_request']['state'],
body: json['payload']['pull_request']['body'],
merged: json['payload']['pull_request']['merged'],
merged_by_id: (json['payload']['pull_request']['merged_by'] || {})['id'],
repo_name: json['repo']['name'],
language: json['repo']['language']
}
end
def in_date_range?
return false if ENV['DISABLED'].present?
EARLIEST_PULL_DATE < Time.zone.now && Time.zone.now < LATEST_PULL_DATE
end
end
def pull_request?
state.present?
end
def check_state
pr = GithubClient.new(user.nickname, user.token).pull_request(repo_name, github_id)
update(state: pr.state,
comments_count: pr.comments,
merged: pr.merged,
merged_by_id: pr.merged_by.try(:id))
end
def post_to_firehose
return unless Rails.env.production?
return unless created_at.year == Tfpullrequests::Application.current_year
Typhoeus::Request.new(ENV['FIREHOSE_URL'],
method: :post,
body: self.to_json(include: { user: { only: [:uid, :nickname, :name, :blog, :location] } }),
headers: { 'Content-Type' => 'application/json' }).run
end
def post_tweet
return unless created_at.year == Tfpullrequests::Application.current_year
user.twitter.update(I18n.t 'pull_request.twitter_message', issue_url: issue_url) if user && user.twitter_linked?
rescue => e
Rails.logger.error "likely a Twitter API error occurred:\n"\
"#{e.inspect}"
end
def gifted_state
gifts.any? ? :gifted : :not_gifted
end
def autogift
return unless created_at.year == Tfpullrequests::Application.current_year
user.new_gift(contribution: self).save if body && body.scan(/24 ?pull ?request/i).any?
end
private
def github_id
issue_url.split('/').last
end
end
| 35.346154 | 116 | 0.675734 |
38b7a3f32518a2c5746e64e6f6d8d93b061e78e8 | 1,223 | class Evil::Client
# Utility to format body/query into one of formats: :json, :form, :multipart
module Formatter
extend self
# Loads concrete formatters called by factory method [#call]
require_relative "formatter/text"
require_relative "formatter/form"
require_relative "formatter/multipart"
# Factory that knows how to format source depending on given format
#
# @param [Object] source
# @param [:json, :form, :multipart, :text] format
# @option opts [String] :boundary The boundary for a multipart body
# @return [String] formatted body
#
def call(source, format, **opts)
return unless source
return to_json(source) if format == :json
return to_yaml(source) if format == :yaml
return to_form(source) if format == :form
return to_text(source) if format == :text
to_multipart(source, opts)
end
private
def to_json(source)
JSON.dump source
end
def to_yaml(source)
YAML.dump source
end
def to_text(source)
Text.call source
end
def to_form(source)
Form.call source
end
def to_multipart(source, opts)
Multipart.call [source], opts
end
end
end
| 23.980392 | 78 | 0.6574 |
6229c5f1fdb89c4ccdc699eacb4c001f21288fc1 | 224 | class Container::Jar < Container
def self.valid_parents
['Container::Drawer', 'Container::UnitTray', 'Container::Cabinet', 'Container::Shelf', 'Container::Virtual', 'Container::VialRack', 'Container::Box']
end
end
| 28 | 153 | 0.709821 |
accd501d748b2f33a9c4040eb48da8f7df66ef3c | 3,219 | # Copyright (c) 2017-present, Facebook, Inc. All rights reserved.
#
# You are hereby granted a non-exclusive, worldwide, royalty-free license to use,
# copy, modify, and distribute this software in source code or binary form for use
# in connection with the web services and APIs provided by Facebook.
#
# As with any software that integrates with the Facebook platform, your use of
# this software is subject to the Facebook Platform Policy
# [http://developers.facebook.com/policy/]. This copyright notice shall be
# included in all copies or substantial portions of the software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# FB:AUTOGEN
module FacebookAds
# This class is auto-generated.
# For any issues or feature requests related to this class, please let us know
# on github and we'll fix in our codegen framework. We'll not be able to accept
# pull request for this class.
class AdAssetFeedSpec < AdObject
CALL_TO_ACTION_TYPES = [
"OPEN_LINK",
"LIKE_PAGE",
"SHOP_NOW",
"PLAY_GAME",
"INSTALL_APP",
"USE_APP",
"CALL",
"CALL_ME",
"INSTALL_MOBILE_APP",
"USE_MOBILE_APP",
"MOBILE_DOWNLOAD",
"BOOK_TRAVEL",
"LISTEN_MUSIC",
"WATCH_VIDEO",
"LEARN_MORE",
"SIGN_UP",
"DOWNLOAD",
"WATCH_MORE",
"NO_BUTTON",
"VISIT_PAGES_FEED",
"APPLY_NOW",
"BUY_NOW",
"GET_OFFER",
"GET_OFFER_VIEW",
"BUY_TICKETS",
"UPDATE_APP",
"GET_DIRECTIONS",
"BUY",
"MESSAGE_PAGE",
"DONATE",
"SUBSCRIBE",
"SAY_THANKS",
"SELL_NOW",
"SHARE",
"DONATE_NOW",
"GET_QUOTE",
"CONTACT_US",
"ORDER_NOW",
"ADD_TO_CART",
"VIDEO_ANNOTATION",
"MOMENTS",
"RECORD_NOW",
"GET_SHOWTIMES",
"LISTEN_NOW",
"WOODHENGE_SUPPORT",
"EVENT_RSVP",
"WHATSAPP_MESSAGE",
"FOLLOW_NEWS_STORYLINE",
"SEE_MORE",
]
field :ad_formats, { list: 'string' }
field :additional_data, 'object'
field :asset_customization_rules, { list: 'object' }
field :autotranslate, { list: 'string' }
field :bodies, { list: 'AdAssetFeedSpecBody' }
field :call_to_action_types, { list: { enum: -> { CALL_TO_ACTION_TYPES }} }
field :captions, { list: 'AdAssetFeedSpecCaption' }
field :descriptions, { list: 'AdAssetFeedSpecDescription' }
field :groups, { list: 'AdAssetFeedSpecGroupRule' }
field :images, { list: 'AdAssetFeedSpecImage' }
field :link_urls, { list: 'AdAssetFeedSpecLinkUrl' }
field :optimization_type, 'string'
field :titles, { list: 'AdAssetFeedSpecTitle' }
field :videos, { list: 'AdAssetFeedSpecVideo' }
field :id, 'string'
has_no_get
has_no_post
has_no_delete
end
end
| 31.252427 | 82 | 0.663249 |
1dab402db480749ee0122453d8d2a8ff12d4ef9f | 2,467 | class RecipesController < ApplicationController
# GET: /recipes
get "/recipes" do
if logged_in?
erb :"/recipes/index.html"
else
redirect "/users/new"
end
end
# GET: /recipes/new
get "/recipes/new" do
if logged_in?
erb :"/recipes/new.html"
else
redirect "/users/new"
end
end
# POST: /recipes
post "/recipes" do
# binding.pry
if params[:recipe][:name] != "" && params[:recipe][:description] != "" && params[:recipe][:content] != ""
if Recipe.find_by(name: params[:recipe][:name] != nil)
redirect "/recipes/new"
end
recipe = Recipe.new(name: params[:recipe][:name], description: params[:recipe][:description], content: params[:recipe][:content])
user = current_user
user.recipes << recipe
user.save
recipe.save
redirect "/users/" + user.id.to_s
else
redirect "/recipes/new"
end
end
# GET: /recipes/5
get "/recipes/:id" do
if logged_in?
@recipe = Recipe.find(params[:id])
erb :"/recipes/show.html"
else
redirect "/login"
end
end
# GET: /recipes/5/edit
get "/recipes/:id/edit" do
if logged_in?
@recipe = Recipe.find(params[:id])
erb :"/recipes/edit.html"
else
redirect "/login"
end
end
# PATCH: /recipes/5
patch "/recipes/:id" do
if session[:user_id] != Recipe.find(params[:id]).user_id
redirect '/users/'
end
if params[:name] == "" || params[:description] == "" || params[:content] == ""
id = params[:id].to_s
redirect '/recipes/'+id+'/edit'
end
if params[:name] != ""
recipe = Recipe.find(params[:id])
recipe.name = params[:name]
recipe.save
end
if params[:description] != ""
recipe = Recipe.find(params[:id])
recipe.description = params[:description]
recipe.save
end
if params[:content] != ""
recipe = Recipe.find(params[:id])
recipe.content = params[:content]
recipe.save
end
id = Recipe.find(params[:id]).user_id.to_s
redirect '/users/'+id
end
# DELETE: /recipes/5/delete
delete "/recipes/:id/delete" do
if logged_in?
if session[:user_id] == current_user.id
Recipe.find(params[:id]).destroy
redirect "/recipes"
else
id = params[:id].to_s
redirect '/recipes/'+id
end
else
id = params[:id].to_s
redirect '/recipes/'+id
end
end
end
| 22.427273 | 135 | 0.576003 |
4a64a51121e0ad3bd65666473c676ddeed74e1d2 | 52 | module NintendoEshop
VERSION = "0.2.1".freeze
end
| 13 | 26 | 0.730769 |
08f369e1eb137e5076345e6c2dbdceb7ef35126c | 455 | require_relative './lib/books_dl'
# 如何取得 book_id
# 進入你要下載的書的閱讀頁面,取得網址列中網址
# 例如:
# https://viewer-ebook.books.com.tw/viewer/epub/web/?book_uni_id=E050017049_reflowable_normal
# book_uni_id= 之後的字串就是這本書的 book_id 了
#
# book_id = 'E050017049_reflowable_normal'
downloader = BooksDL::Downloader.new('E050113792_reflowable_normal')
downloader.perform
book_id = 'E050013173_reflowable_normal'
downloader = BooksDL::Downloader.new(book_id)
downloader.perform
| 26.764706 | 95 | 0.808791 |
623d49930bf640c1e52c5655f931a0f10db8dc04 | 394 | # Be sure to restart your server when you modify this file.
#S13::Application.config.session_store :cookie_store, :key => '_s13_session'
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rake db:sessions:create")
S13::Application.config.session_store :active_record_store
| 43.777778 | 76 | 0.794416 |
08e0d357c8f050b452b40c1e6b11912f04231fa3 | 40,088 | require "kitchen"
autoload :MsRestAzure, "ms_rest_azure"
require_relative "azure_credentials"
require "securerandom" unless defined?(SecureRandom)
module Azure
autoload :Resources, "azure_mgmt_resources"
autoload :Network, "azure_mgmt_network"
end
require "base64" unless defined?(Base64)
autoload :SSHKey, "sshkey"
require "fileutils" unless defined?(FileUtils)
require "erb" unless defined?(Erb)
require "ostruct" unless defined?(OpenStruct)
require "json" unless defined?(JSON)
autoload :Faraday, "faraday"
module Kitchen
module Driver
#
# Azurerm
# Create a new resource group object and set the location and tags attributes then return it.
#
# @return [::Azure::Resources::Profiles::Latest::Mgmt::Models::ResourceGroup] A new resource group object.
class Azurerm < Kitchen::Driver::Base
attr_accessor :resource_management_client
attr_accessor :network_management_client
kitchen_driver_api_version 2
default_config(:azure_resource_group_prefix) do |_config|
"kitchen-"
end
default_config(:azure_resource_group_suffix) do |_config|
""
end
default_config(:azure_resource_group_name) do |config|
config.instance.name.to_s
end
default_config(:explicit_resource_group_name) do |_config|
nil
end
default_config(:resource_group_tags) do |_config|
{}
end
default_config(:image_urn) do |_config|
"Canonical:UbuntuServer:14.04.3-LTS:latest"
end
default_config(:image_url) do |_config|
""
end
default_config(:image_id) do |_config|
""
end
default_config(:use_ephemeral_osdisk) do |_config|
false
end
default_config(:os_disk_size_gb) do |_config|
""
end
default_config(:os_type) do |_config|
"linux"
end
default_config(:custom_data) do |_config|
""
end
default_config(:username) do |_config|
"azure"
end
default_config(:password) do |_config|
SecureRandom.base64(25)
end
default_config(:vm_name) do |_config|
"vm"
end
default_config(:nic_name) do |_config|
""
end
default_config(:vnet_id) do |_config|
""
end
default_config(:subnet_id) do |_config|
""
end
default_config(:storage_account_type) do |_config|
"Standard_LRS"
end
default_config(:existing_storage_account_blob_url) do |_config|
""
end
default_config(:existing_storage_account_container) do |_config|
"vhds"
end
default_config(:boot_diagnostics_enabled) do |_config|
"true"
end
default_config(:winrm_powershell_script) do |_config|
false
end
default_config(:azure_environment) do |_config|
"Azure"
end
default_config(:pre_deployment_template) do |_config|
""
end
default_config(:pre_deployment_parameters) do |_config|
{}
end
default_config(:post_deployment_template) do |_config|
""
end
default_config(:post_deployment_parameters) do |_config|
{}
end
default_config(:plan) do |_config|
{}
end
default_config(:vm_tags) do |_config|
{}
end
default_config(:public_ip) do |_config|
false
end
default_config(:use_managed_disks) do |_config|
true
end
default_config(:data_disks) do |_config|
nil
end
default_config(:format_data_disks) do |_config|
false
end
default_config(:format_data_disks_powershell_script) do |_config|
false
end
default_config(:system_assigned_identity) do |_config|
false
end
default_config(:user_assigned_identities) do |_config|
[]
end
default_config(:destroy_explicit_resource_group) do |_config|
true
end
default_config(:destroy_explicit_resource_group_tags) do |_config|
true
end
default_config(:destroy_resource_group_contents) do |_config|
false
end
default_config(:deployment_sleep) do |_config|
10
end
default_config(:secret_url) do |_config|
""
end
default_config(:vault_name) do |_config|
""
end
default_config(:vault_resource_group) do |_config|
""
end
default_config(:subscription_id) do |_config|
ENV["AZURE_SUBSCRIPTION_ID"]
end
default_config(:public_ip_sku) do |_config|
"Basic"
end
default_config(:azure_api_retries) do |_config|
5
end
default_config(:use_fqdn_hostname) do |_config|
false
end
def create(state)
state = validate_state(state)
deployment_parameters = {
location: config[:location],
vmSize: config[:machine_size],
storageAccountType: config[:storage_account_type],
bootDiagnosticsEnabled: config[:boot_diagnostics_enabled],
newStorageAccountName: "storage#{state[:uuid]}",
adminUsername: config[:username],
dnsNameForPublicIP: "kitchen-#{state[:uuid]}",
vmName: state[:vm_name],
systemAssignedIdentity: config[:system_assigned_identity],
userAssignedIdentities: config[:user_assigned_identities].map { |identity| [identity, {}] }.to_h,
secretUrl: config[:secret_url],
vaultName: config[:vault_name],
vaultResourceGroup: config[:vault_resource_group],
}
if instance.transport[:ssh_key].nil?
deployment_parameters[:adminPassword] = config[:password]
end
deployment_parameters[:publicIPSKU] = config[:public_ip_sku]
if config[:public_ip_sku] == "Standard"
deployment_parameters[:publicIPAddressType] = "Static"
end
if config[:subscription_id].to_s == ""
raise "A subscription_id config value was not detected and kitchen-azurerm cannot continue. Please check your kitchen.yml configuration. Exiting."
end
if config[:nic_name].to_s == ""
vmnic = "nic-#{config[:vm_name]}"
else
vmnic = config[:nic_name]
end
deployment_parameters["nicName"] = vmnic.to_s
if config[:custom_data].to_s != ""
deployment_parameters["customData"] = prepared_custom_data
end
# When deploying in a shared storage account, we needs to add
# a unique suffix to support multiple kitchen instances
if config[:existing_storage_account_blob_url].to_s != ""
deployment_parameters["osDiskNameSuffix"] = "-#{state[:azure_resource_group_name]}"
end
if config[:existing_storage_account_blob_url].to_s != ""
deployment_parameters["existingStorageAccountBlobURL"] = config[:existing_storage_account_blob_url]
end
if config[:existing_storage_account_container].to_s != ""
deployment_parameters["existingStorageAccountBlobContainer"] = config[:existing_storage_account_container]
end
if config[:os_disk_size_gb].to_s != ""
deployment_parameters["osDiskSizeGb"] = config[:os_disk_size_gb]
end
# The three deployment modes
# a) Private Image: Managed VM Image (by id)
# b) Private Image: Using a VHD URL (note: we must use existing_storage_account_blob_url due to azure limitations)
# c) Public Image: Using a marketplace image (urn)
if config[:image_id].to_s != ""
deployment_parameters["imageId"] = config[:image_id]
elsif config[:image_url].to_s != ""
deployment_parameters["imageUrl"] = config[:image_url]
deployment_parameters["osType"] = config[:os_type]
else
image_publisher, image_offer, image_sku, image_version = config[:image_urn].split(":", 4)
deployment_parameters["imagePublisher"] = image_publisher
deployment_parameters["imageOffer"] = image_offer
deployment_parameters["imageSku"] = image_sku
deployment_parameters["imageVersion"] = image_version
end
options = Kitchen::Driver::AzureCredentials.new(subscription_id: config[:subscription_id],
environment: config[:azure_environment]).azure_options
debug "Azure environment: #{config[:azure_environment]}"
@resource_management_client = ::Azure::Resources::Profiles::Latest::Mgmt::Client.new(options)
# Create Resource Group
begin
info "Creating Resource Group: #{state[:azure_resource_group_name]}"
create_resource_group(state[:azure_resource_group_name], get_resource_group)
rescue ::MsRestAzure::AzureOperationError => operation_error
error operation_error.body
raise operation_error
end
# Execute deployment steps
begin
if File.file?(config[:pre_deployment_template])
pre_deployment_name = "pre-deploy-#{state[:uuid]}"
info "Creating deployment: #{pre_deployment_name}"
create_deployment_async(state[:azure_resource_group_name], pre_deployment_name, pre_deployment(config[:pre_deployment_template], config[:pre_deployment_parameters])).value!
follow_deployment_until_end_state(state[:azure_resource_group_name], pre_deployment_name)
end
deployment_name = "deploy-#{state[:uuid]}"
info "Creating deployment: #{deployment_name}"
create_deployment_async(state[:azure_resource_group_name], deployment_name, deployment(deployment_parameters)).value!
follow_deployment_until_end_state(state[:azure_resource_group_name], deployment_name)
state[:username] = deployment_parameters[:adminUsername] unless existing_state_value?(state, :username)
state[:password] = deployment_parameters[:adminPassword] unless existing_state_value?(state, :password) && instance.transport[:ssh_key].nil?
if File.file?(config[:post_deployment_template])
post_deployment_name = "post-deploy-#{state[:uuid]}"
info "Creating deployment: #{post_deployment_name}"
create_deployment_async(state[:azure_resource_group_name], post_deployment_name, post_deployment(config[:post_deployment_template], config[:post_deployment_parameters])).value!
follow_deployment_until_end_state(state[:azure_resource_group_name], post_deployment_name)
end
rescue ::MsRestAzure::AzureOperationError => operation_error
rest_error = operation_error.body["error"]
deployment_active = rest_error["code"] == "DeploymentActive"
if deployment_active
info "Deployment for resource group #{state[:azure_resource_group_name]} is ongoing."
info "If you need to change the deployment template you'll need to rerun `kitchen create` for this instance."
else
info rest_error
raise operation_error
end
end
@network_management_client = ::Azure::Network::Profiles::Latest::Mgmt::Client.new(options)
if config[:vnet_id] == "" || config[:public_ip]
# Retrieve the public IP from the resource group:
result = get_public_ip(state[:azure_resource_group_name], "publicip")
info "IP Address is: #{result.ip_address} [#{result.dns_settings.fqdn}]"
state[:hostname] = result.ip_address
if config[:use_fqdn_hostname]
info "Using FQDN to communicate instead of IP"
state[:hostname] = result.dns_settings.fqdn
end
else
# Retrieve the internal IP from the resource group:
result = get_network_interface(state[:azure_resource_group_name], vmnic.to_s)
info "IP Address is: #{result.ip_configurations[0].private_ipaddress}"
state[:hostname] = result.ip_configurations[0].private_ipaddress
end
end
# Return a True of False if the state is already stored for a particular property.
#
# @param [Hash] Hash of existing state values.
# @param [String] A property to check
# @return [Boolean]
def existing_state_value?(state, property)
state.key?(property) && !state[property].nil?
end
# Leverage existing state values or bring state into existence from a configuration file.
#
# @param [Hash] Existing Hash of state values.
# @return [Hash] Updated Hash of state values.
def validate_state(state = {})
state[:uuid] = SecureRandom.hex(8) unless existing_state_value?(state, :uuid)
state[:server_id] = "vm#{state[:uuid]}" unless existing_state_value?(state, :server_id)
state[:azure_resource_group_name] = azure_resource_group_name unless existing_state_value?(state, :azure_resource_group_name)
%i{subscription_id vm_name azure_environment use_managed_disks}.each do |config_element|
state[config_element] = config[config_element] unless existing_state_value?(state, config_element)
end
state.delete(:password) unless instance.transport[:ssh_key].nil?
state
end
def azure_resource_group_name
formatted_time = Time.now.utc.strftime "%Y%m%dT%H%M%S"
return "#{config[:azure_resource_group_prefix]}#{config[:azure_resource_group_name]}-#{formatted_time}#{config[:azure_resource_group_suffix]}" unless config[:explicit_resource_group_name]
config[:explicit_resource_group_name]
end
def data_disks_for_vm_json
return nil if config[:data_disks].nil?
disks = []
if config[:use_managed_disks]
config[:data_disks].each do |data_disk|
disks << { name: "datadisk#{data_disk[:lun]}", lun: data_disk[:lun], diskSizeGB: data_disk[:disk_size_gb], createOption: "Empty" }
end
debug "Additional disks being added to configuration: #{disks.inspect}"
else
warn 'Data disks are only supported when used with the "use_managed_disks" option. No additional disks were added to the configuration.'
end
disks.to_json
end
def template_for_transport_name
template = JSON.parse(virtual_machine_deployment_template)
if instance.transport.name.casecmp("winrm") == 0
if instance.platform.name.index("nano").nil?
info "Adding WinRM configuration to provisioning profile."
encoded_command = Base64.strict_encode64(custom_data_script_windows)
template["resources"].select { |h| h["type"] == "Microsoft.Compute/virtualMachines" }.each do |resource|
resource["properties"]["osProfile"]["customData"] = encoded_command
resource["properties"]["osProfile"]["windowsConfiguration"] = windows_unattend_content
end
end
end
unless instance.transport[:ssh_key].nil?
info "Adding public key from #{File.expand_path(instance.transport[:ssh_key])}.pub to the deployment."
public_key = public_key_for_deployment(File.expand_path(instance.transport[:ssh_key]))
template["resources"].select { |h| h["type"] == "Microsoft.Compute/virtualMachines" }.each do |resource|
resource["properties"]["osProfile"]["linuxConfiguration"] = JSON.parse(custom_linux_configuration(public_key))
end
end
template.to_json
end
def public_key_for_deployment(private_key_filename)
if File.file?(private_key_filename) == false
k = SSHKey.generate
::FileUtils.mkdir_p(File.dirname(private_key_filename))
private_key_file = File.new(private_key_filename, "w")
private_key_file.syswrite(k.private_key)
private_key_file.chmod(0600)
private_key_file.close
public_key_file = File.new("#{private_key_filename}.pub", "w")
public_key_file.syswrite(k.ssh_public_key)
public_key_file.chmod(0600)
public_key_file.close
output = k.ssh_public_key
else
output = if instance.transport[:ssh_public_key].nil?
File.read("#{private_key_filename}.pub")
else
File.read(instance.transport[:ssh_public_key])
end
end
output.strip
end
def pre_deployment(pre_deployment_template_filename, pre_deployment_parameters)
pre_deployment_template = ::File.read(pre_deployment_template_filename)
pre_deployment = ::Azure::Resources::Profiles::Latest::Mgmt::Models::Deployment.new
pre_deployment.properties = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentProperties.new
pre_deployment.properties.mode = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentMode::Incremental
pre_deployment.properties.template = JSON.parse(pre_deployment_template)
pre_deployment.properties.parameters = parameters_in_values_format(pre_deployment_parameters)
debug(pre_deployment.properties.template)
pre_deployment
end
def deployment(parameters)
template = template_for_transport_name
deployment = ::Azure::Resources::Profiles::Latest::Mgmt::Models::Deployment.new
deployment.properties = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentProperties.new
deployment.properties.mode = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentMode::Incremental
deployment.properties.template = JSON.parse(template)
deployment.properties.parameters = parameters_in_values_format(parameters)
debug(JSON.pretty_generate(deployment.properties.template))
deployment
end
def post_deployment(post_deployment_template_filename, post_deployment_parameters)
post_deployment_template = ::File.read(post_deployment_template_filename)
post_deployment = ::Azure::Resources::Profiles::Latest::Mgmt::Models::Deployment.new
post_deployment.properties = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentProperties.new
post_deployment.properties.mode = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentMode::Incremental
post_deployment.properties.template = JSON.parse(post_deployment_template)
post_deployment.properties.parameters = parameters_in_values_format(post_deployment_parameters)
debug(post_deployment.properties.template)
post_deployment
end
def empty_deployment
template = virtual_machine_deployment_template_file("empty.erb", nil)
empty_deployment = ::Azure::Resources::Profiles::Latest::Mgmt::Models::Deployment.new
empty_deployment.properties = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentProperties.new
empty_deployment.properties.mode = ::Azure::Resources::Profiles::Latest::Mgmt::Models::DeploymentMode::Complete
empty_deployment.properties.template = JSON.parse(template)
debug(JSON.pretty_generate(empty_deployment.properties.template))
empty_deployment
end
def vm_tag_string(vm_tags_in)
tag_string = ""
unless vm_tags_in.empty?
tag_array = vm_tags_in.map do |key, value|
"\"#{key}\": \"#{value}\",\n"
end
# Strip punctuation from last item
tag_array[-1] = tag_array[-1][0..-3]
tag_string = tag_array.join
end
tag_string
end
def parameters_in_values_format(parameters_in)
parameters = parameters_in.map do |key, value|
{ key.to_sym => { "value" => value } }
end
parameters.reduce(:merge!)
end
def follow_deployment_until_end_state(resource_group, deployment_name)
end_provisioning_states = "Canceled,Failed,Deleted,Succeeded"
end_provisioning_state_reached = false
until end_provisioning_state_reached
list_outstanding_deployment_operations(resource_group, deployment_name)
sleep config[:deployment_sleep]
deployment_provisioning_state = get_deployment_state(resource_group, deployment_name)
end_provisioning_state_reached = end_provisioning_states.split(",").include?(deployment_provisioning_state)
end
info "Resource Template deployment reached end state of '#{deployment_provisioning_state}'."
show_failed_operations(resource_group, deployment_name) if deployment_provisioning_state == "Failed"
end
def show_failed_operations(resource_group, deployment_name)
failed_operations = list_deployment_operations(resource_group, deployment_name)
failed_operations.each do |val|
resource_code = val.properties.status_code
raise val.properties.status_message.inspect.to_s if resource_code != "OK"
end
end
def list_outstanding_deployment_operations(resource_group, deployment_name)
end_operation_states = "Failed,Succeeded"
deployment_operations = list_deployment_operations(resource_group, deployment_name)
deployment_operations.each do |val|
resource_provisioning_state = val.properties.provisioning_state
unless val.properties.target_resource.nil?
resource_name = val.properties.target_resource.resource_name
resource_type = val.properties.target_resource.resource_type
end
end_operation_state_reached = end_operation_states.split(",").include?(resource_provisioning_state)
unless end_operation_state_reached
info "Resource #{resource_type} '#{resource_name}' provisioning status is #{resource_provisioning_state}"
end
end
end
def destroy(state)
# TODO: We have some not so fun state issues we need to clean up
state[:azure_environment] = config[:azure_environment] unless state[:azure_environment]
state[:subscription_id] = config[:subscription_id] unless state[:subscription_id]
# Setup our authentication components for the SDK
options = Kitchen::Driver::AzureCredentials.new(subscription_id: state[:subscription_id],
environment: state[:azure_environment]).azure_options
@resource_management_client = ::Azure::Resources::Profiles::Latest::Mgmt::Client.new(options)
# If we don't have any instances, let's check to see if the user wants to delete a resource group and if so let's delete!
if state[:server_id].nil? && state[:azure_resource_group_name].nil? && !config[:explicit_resource_group_name].nil? && config[:destroy_explicit_resource_group]
if resource_group_exists?(config[:explicit_resource_group_name])
info "This instance doesn't exist but you asked to delete the resource group."
begin
info "Destroying Resource Group: #{config[:explicit_resource_group_name]}"
delete_resource_group_async(config[:explicit_resource_group_name])
info "Destroy operation accepted and will continue in the background."
return
rescue ::MsRestAzure::AzureOperationError => operation_error
error operation_error.body
raise operation_error
end
end
end
# Our working environment
info "Azure environment: #{state[:azure_environment]}"
# Skip if we don't have any instances
return if state[:server_id].nil?
# Destroy resource group contents
if config[:destroy_resource_group_contents] == true
info "Destroying individual resources within the Resource Group."
empty_deployment_name = "empty-deploy-#{state[:uuid]}"
begin
info "Creating deployment: #{empty_deployment_name}"
create_deployment_async(state[:azure_resource_group_name], empty_deployment_name, empty_deployment).value!
follow_deployment_until_end_state(state[:azure_resource_group_name], empty_deployment_name)
# NOTE: We are using the internal wrapper function create_resource_group() which wraps the API
# method of create_or_update()
begin
# Maintain tags on the resource group
create_resource_group(state[:azure_resource_group_name], get_resource_group) unless config[:destroy_explicit_resource_group_tags] == true
warn 'The "destroy_explicit_resource_group_tags" setting value is set to "false". The tags on the resource group will NOT be removed.' unless config[:destroy_explicit_resource_group_tags] == true
# Corner case where we want to use kitchen to remove the tags
resource_group = get_resource_group
resource_group.tags = {}
create_resource_group(state[:azure_resource_group_name], resource_group) unless config[:destroy_explicit_resource_group_tags] == false
warn 'The "destroy_explicit_resource_group_tags" setting value is set to "true". The tags on the resource group will be removed.' unless config[:destroy_explicit_resource_group_tags] == false
rescue ::MsRestAzure::AzureOperationError => operation_error
error operation_error.body
raise operation_error
end
rescue ::MsRestAzure::AzureOperationError => operation_error
error operation_error.body
raise operation_error
end
end
# Do not remove the explicitly named resource group
if config[:destroy_explicit_resource_group] == false && !config[:explicit_resource_group_name].nil?
warn 'The "destroy_explicit_resource_group" setting value is set to "false". The resource group will not be deleted.'
warn 'Remember to manually destroy resources, or set "destroy_resource_group_contents: true" to save costs!' unless config[:destroy_resource_group_contents] == true
return state
end
# Destroy the world
begin
info "Destroying Resource Group: #{state[:azure_resource_group_name]}"
delete_resource_group_async(state[:azure_resource_group_name])
info "Destroy operation accepted and will continue in the background."
# Remove resource group name from driver state
state.delete(:azure_resource_group_name)
rescue ::MsRestAzure::AzureOperationError => operation_error
error operation_error.body
raise operation_error
end
# Clear state of components
state.delete(:server_id)
state.delete(:hostname)
state.delete(:username)
state.delete(:password)
end
def enable_winrm_powershell_script
config[:winrm_powershell_script] ||
<<-PS1
$cert = New-SelfSignedCertificate -DnsName $env:COMPUTERNAME -CertStoreLocation Cert:\\LocalMachine\\My
$config = '@{CertificateThumbprint="' + $cert.Thumbprint + '"}'
winrm create winrm/config/listener?Address=*+Transport=HTTPS $config
winrm create winrm/config/Listener?Address=*+Transport=HTTP
winrm set winrm/config/service/auth '@{Basic="true";Kerberos="false";Negotiate="true";Certificate="false";CredSSP="true"}'
New-NetFirewallRule -DisplayName "Windows Remote Management (HTTPS-In)" -Name "Windows Remote Management (HTTPS-In)" -Profile Any -LocalPort 5986 -Protocol TCP
winrm set winrm/config/service '@{AllowUnencrypted="true"}'
New-NetFirewallRule -DisplayName "Windows Remote Management (HTTP-In)" -Name "Windows Remote Management (HTTP-In)" -Profile Any -LocalPort 5985 -Protocol TCP
PS1
end
def format_data_disks_powershell_script
return unless config[:format_data_disks]
info "Data disks will be initialized and formatted NTFS automatically." unless config[:data_disks].nil?
config[:format_data_disks_powershell_script] ||
<<-PS1
Write-Host "Initializing and formatting raw disks"
$disks = Get-Disk | where partitionstyle -eq 'raw'
$letters = New-Object System.Collections.ArrayList
$letters.AddRange( ('F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z') )
Function AvailableVolumes() {
$currentDrives = get-volume
ForEach ($v in $currentDrives) {
if ($letters -contains $v.DriveLetter.ToString()) {
Write-Host "Drive letter $($v.DriveLetter) is taken, moving to next letter"
$letters.Remove($v.DriveLetter.ToString())
}
}
}
ForEach ($d in $disks) {
AvailableVolumes
$driveLetter = $letters[0]
Write-Host "Creating volume $($driveLetter)"
$d | Initialize-Disk -PartitionStyle GPT -PassThru | New-Partition -DriveLetter $driveLetter -UseMaximumSize
# Prevent error ' Cannot perform the requested operation while the drive is read only'
Start-Sleep 1
Format-Volume -FileSystem NTFS -NewFileSystemLabel "datadisk" -DriveLetter $driveLetter -Confirm:$false
}
PS1
end
def custom_data_script_windows
<<-EOH
#{enable_winrm_powershell_script}
#{format_data_disks_powershell_script}
logoff
EOH
end
def custom_linux_configuration(public_key)
<<-EOH
{
"disablePasswordAuthentication": "true",
"ssh": {
"publicKeys": [
{
"path": "[concat('/home/',parameters('adminUsername'),'/.ssh/authorized_keys')]",
"keyData": "#{public_key}"
}
]
}
}
EOH
end
def windows_unattend_content
{
additionalUnattendContent: [
{
passName: "oobeSystem",
componentName: "Microsoft-Windows-Shell-Setup",
settingName: "FirstLogonCommands",
content: '<FirstLogonCommands><SynchronousCommand><CommandLine>cmd /c "copy C:\\AzureData\\CustomData.bin C:\\Config.ps1"</CommandLine><Description>copy</Description><Order>1</Order></SynchronousCommand><SynchronousCommand><CommandLine>%windir%\\System32\\WindowsPowerShell\\v1.0\\powershell.exe -NoProfile -ExecutionPolicy Bypass -file C:\\Config.ps1</CommandLine><Description>script</Description><Order>2</Order></SynchronousCommand></FirstLogonCommands>',
},
{
passName: "oobeSystem",
componentName: "Microsoft-Windows-Shell-Setup",
settingName: "AutoLogon",
content: "[concat('<AutoLogon><Password><Value>', parameters('adminPassword'), '</Value></Password><Enabled>true</Enabled><LogonCount>1</LogonCount><Username>', parameters('adminUserName'), '</Username></AutoLogon>')]",
},
],
}
end
def virtual_machine_deployment_template
if config[:vnet_id] == ""
virtual_machine_deployment_template_file("public.erb", vm_tags: vm_tag_string(config[:vm_tags]), use_managed_disks: config[:use_managed_disks], image_url: config[:image_url], storage_account_type: config[:storage_account_type], existing_storage_account_blob_url: config[:existing_storage_account_blob_url], image_id: config[:image_id], existing_storage_account_container: config[:existing_storage_account_container], custom_data: config[:custom_data], os_disk_size_gb: config[:os_disk_size_gb], data_disks_for_vm_json: data_disks_for_vm_json, use_ephemeral_osdisk: config[:use_ephemeral_osdisk], ssh_key: instance.transport[:ssh_key], plan_json: plan_json)
else
info "Using custom vnet: #{config[:vnet_id]}"
virtual_machine_deployment_template_file("internal.erb", vnet_id: config[:vnet_id], subnet_id: config[:subnet_id], public_ip: config[:public_ip], vm_tags: vm_tag_string(config[:vm_tags]), use_managed_disks: config[:use_managed_disks], image_url: config[:image_url], storage_account_type: config[:storage_account_type], existing_storage_account_blob_url: config[:existing_storage_account_blob_url], image_id: config[:image_id], existing_storage_account_container: config[:existing_storage_account_container], custom_data: config[:custom_data], os_disk_size_gb: config[:os_disk_size_gb], data_disks_for_vm_json: data_disks_for_vm_json, use_ephemeral_osdisk: config[:use_ephemeral_osdisk], ssh_key: instance.transport[:ssh_key], public_ip_sku: config[:public_ip_sku], plan_json: plan_json)
end
end
def plan_json
return nil if config[:plan].empty?
plan = {}
plan["name"] = config[:plan][:name] if config[:plan][:name]
plan["product"] = config[:plan][:product] if config[:plan][:product]
plan["promotionCode"] = config[:plan][:promotion_code] if config[:plan][:promotion_code]
plan["publisher"] = config[:plan][:publisher] if config[:plan][:publisher]
plan.to_json
end
def virtual_machine_deployment_template_file(template_file, data = {})
template = File.read(File.expand_path(File.join(__dir__, "../../../templates", template_file)))
render_binding = OpenStruct.new(data)
ERB.new(template, nil, "-").result(render_binding.instance_eval { binding })
end
def resource_manager_endpoint_url(azure_environment)
case azure_environment.downcase
when "azureusgovernment"
MsRestAzure::AzureEnvironments::AzureUSGovernment.resource_manager_endpoint_url
when "azurechina"
MsRestAzure::AzureEnvironments::AzureChinaCloud.resource_manager_endpoint_url
when "azuregermancloud"
MsRestAzure::AzureEnvironments::AzureGermanCloud.resource_manager_endpoint_url
when "azure"
MsRestAzure::AzureEnvironments::AzureCloud.resource_manager_endpoint_url
end
end
def prepared_custom_data
# If user_data is a file reference, lets read it as such
return nil if config[:custom_data].nil?
@custom_data ||= begin
if File.file?(config[:custom_data])
Base64.strict_encode64(File.read(config[:custom_data]))
else
Base64.strict_encode64(config[:custom_data])
end
end
end
private
#
# Wrapper methods for the Azure API calls to retry the calls when getting timeouts.
#
# Create a new resource group object and set the location and tags attributes then return it.
#
# @return [::Azure::Resources::Profiles::Latest::Mgmt::Models::ResourceGroup] A new resource group object.
def get_resource_group
resource_group = ::Azure::Resources::Profiles::Latest::Mgmt::Models::ResourceGroup.new
resource_group.location = config[:location]
resource_group.tags = config[:resource_group_tags]
resource_group
end
# Checks whether a resource group exists.
#
# @param resource_group_name [String] The name of the resource group to check.
# The name is case insensitive.
#
# @return [Boolean] operation results.
#
def resource_group_exists?(resource_group_name)
retries = config[:azure_api_retries]
begin
resource_management_client.resource_groups.check_existence(resource_group_name)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while checking if resource group '#{resource_group_name}' exists. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def create_resource_group(resource_group_name, resource_group)
retries = config[:azure_api_retries]
begin
resource_management_client.resource_groups.create_or_update(resource_group_name, resource_group)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while creating resource group '#{resource_group_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def create_deployment_async(resource_group, deployment_name, deployment)
retries = config[:azure_api_retries]
begin
resource_management_client.deployments.begin_create_or_update_async(resource_group, deployment_name, deployment)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while sending deployment creation request for deployment '#{deployment_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def get_public_ip(resource_group_name, public_ip_name)
retries = config[:azure_api_retries]
begin
network_management_client.public_ipaddresses.get(resource_group_name, public_ip_name)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while fetching public ip '#{public_ip_name}' for resource group '#{resource_group_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def get_network_interface(resource_group_name, network_interface_name)
retries = config[:azure_api_retries]
begin
network_interfaces = ::Azure::Network::Profiles::Latest::Mgmt::NetworkInterfaces.new(network_management_client)
network_interfaces.get(resource_group_name, network_interface_name)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while fetching network interface '#{network_interface_name}' for resource group '#{resource_group_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def list_deployment_operations(resource_group, deployment_name)
retries = config[:azure_api_retries]
begin
resource_management_client.deployment_operations.list(resource_group, deployment_name)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while listing deployment operations for deployment '#{deployment_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def get_deployment_state(resource_group, deployment_name)
retries = config[:azure_api_retries]
begin
deployments = resource_management_client.deployments.get(resource_group, deployment_name)
deployments.properties.provisioning_state
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while retrieving state for deployment '#{deployment_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def delete_resource_group_async(resource_group_name)
retries = config[:azure_api_retries]
begin
resource_management_client.resource_groups.begin_delete(resource_group_name)
rescue Faraday::TimeoutError, Faraday::ClientError => exception
send_exception_message(exception, "while sending resource group deletion request for '#{resource_group_name}'. #{retries} retries left.")
raise if retries == 0
retries -= 1
retry
end
end
def send_exception_message(exception, message)
if exception.is_a?(Faraday::TimeoutError)
header = "Timed out"
elsif exception.is_a?(Faraday::ClientError)
header = "Connection reset by peer"
else
# Unhandled exception, return early
info "Unrecognized exception type."
return
end
info "#{header} #{message}"
end
end
end
end
| 42.874866 | 796 | 0.676487 |
bbbc8218706268e7687632a0003a62fe4ceb7907 | 185 | AccountCred.blueprint do
financial_inst { FinancialInst.make }
account_key { 'abc123' }
cred_key { 'abc123' }
cred_guid { ActiveSupport::SecureRandom.hex(16) }
end | 30.833333 | 56 | 0.681081 |
e2a95f2c1737576c306e7862a557c30707b1f35d | 85,967 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module DataflowV1b3
class MultiOutputInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SourceSplitRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SourceGetMetadataResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ShellTask
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class MetricShortId
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AutoscalingEvent
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TaskRunnerSettings
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Position
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Source
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SplitInt64
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WorkerPool
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SourceOperationRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WorkItem
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StructuredMessage
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReportedParallelism
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ResourceUtilizationReport
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TopologyConfig
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SourceSplitOptions
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReadInstruction
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WorkerSettings
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StreamingStageLocation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DataDiskAssignment
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApproximateSplitRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Status
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ExecutionStageState
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StreamLocation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SendWorkerMessagesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StreamingComputationConfig
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TransformSummary
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LeaseWorkItemResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LaunchTemplateParameters
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Sink
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FlattenInstruction
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class PartialGroupByKeyInstruction
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StageSource
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class InstructionInput
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StringList
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DisplayData
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class GetDebugConfigRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LeaseWorkItemRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class GetTemplateResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Parameter
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReportWorkItemStatusRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class PipelineDescription
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StreamingConfigTask
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class JobExecutionInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Step
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FailedLocation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Disk
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CounterMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListJobMessagesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApproximateReportedProgress
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class IntegerList
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StateFamilyConfig
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ResourceUtilizationReportResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SourceSplitResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ParallelInstruction
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Package
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class KeyRangeDataDiskAssignment
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ParDoInstruction
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class MetricUpdate
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CounterStructuredName
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApproximateProgress
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WorkerMessageResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TemplateMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WorkerMessage
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class JobMetrics
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FloatingPointList
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CounterUpdate
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SourceMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DistributionUpdate
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SourceFork
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WorkerHealthReportResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WorkItemStatus
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ComponentSource
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WorkItemServiceState
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class MetricStructuredName
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SeqMapTaskOutputInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class JobExecutionStageInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class KeyRangeLocation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SourceGetMetadataRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SeqMapTask
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class NameAndKind
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WorkerMessageCode
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CustomSourceLocation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class MapTask
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FloatingPointMean
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReportWorkItemStatusResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class InstructionOutput
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CreateJobFromTemplateRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class IntegerMean
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListJobsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ComputationTopology
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class RuntimeEnvironment
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class MountedDataDisk
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StreamingSideInputLocation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LaunchTemplateResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Job
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DynamicSourceSplit
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DerivedSource
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SourceOperationResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SideInputInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SendDebugCaptureResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ConcatPosition
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CounterStructuredNameAndMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WriteInstruction
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StreamingComputationRanges
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AutoscalingSettings
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ExecutionStageSummary
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SendWorkerMessagesRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LogBucket
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SourceSplitShard
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CpuTime
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Environment
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StreamingComputationTask
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SendDebugCaptureRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class GetDebugConfigResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ComponentTransform
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StreamingSetupTask
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class PubsubLocation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WorkerHealthReport
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class JobMessage
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ParameterMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class MultiOutputInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :tag, as: 'tag'
end
end
class SourceSplitRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :options, as: 'options', class: Google::Apis::DataflowV1b3::SourceSplitOptions, decorator: Google::Apis::DataflowV1b3::SourceSplitOptions::Representation
property :source, as: 'source', class: Google::Apis::DataflowV1b3::Source, decorator: Google::Apis::DataflowV1b3::Source::Representation
end
end
class SourceGetMetadataResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :metadata, as: 'metadata', class: Google::Apis::DataflowV1b3::SourceMetadata, decorator: Google::Apis::DataflowV1b3::SourceMetadata::Representation
end
end
class ShellTask
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :command, as: 'command'
property :exit_code, as: 'exitCode'
end
end
class MetricShortId
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :metric_index, as: 'metricIndex'
property :short_id, :numeric_string => true, as: 'shortId'
end
end
class AutoscalingEvent
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :time, as: 'time'
property :description, as: 'description', class: Google::Apis::DataflowV1b3::StructuredMessage, decorator: Google::Apis::DataflowV1b3::StructuredMessage::Representation
property :event_type, as: 'eventType'
property :target_num_workers, :numeric_string => true, as: 'targetNumWorkers'
property :current_num_workers, :numeric_string => true, as: 'currentNumWorkers'
end
end
class TaskRunnerSettings
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :dataflow_api_version, as: 'dataflowApiVersion'
collection :oauth_scopes, as: 'oauthScopes'
property :streaming_worker_main_class, as: 'streamingWorkerMainClass'
property :log_upload_location, as: 'logUploadLocation'
property :workflow_file_name, as: 'workflowFileName'
property :commandlines_file_name, as: 'commandlinesFileName'
property :language_hint, as: 'languageHint'
property :base_task_dir, as: 'baseTaskDir'
property :temp_storage_prefix, as: 'tempStoragePrefix'
property :base_url, as: 'baseUrl'
property :log_to_serialconsole, as: 'logToSerialconsole'
property :continue_on_exception, as: 'continueOnException'
property :parallel_worker_settings, as: 'parallelWorkerSettings', class: Google::Apis::DataflowV1b3::WorkerSettings, decorator: Google::Apis::DataflowV1b3::WorkerSettings::Representation
property :vm_id, as: 'vmId'
property :task_user, as: 'taskUser'
property :alsologtostderr, as: 'alsologtostderr'
property :task_group, as: 'taskGroup'
property :harness_command, as: 'harnessCommand'
property :log_dir, as: 'logDir'
end
end
class Position
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :record_index, :numeric_string => true, as: 'recordIndex'
property :shuffle_position, as: 'shufflePosition'
property :byte_offset, :numeric_string => true, as: 'byteOffset'
property :concat_position, as: 'concatPosition', class: Google::Apis::DataflowV1b3::ConcatPosition, decorator: Google::Apis::DataflowV1b3::ConcatPosition::Representation
property :end, as: 'end'
property :key, as: 'key'
end
end
class Source
# @private
class Representation < Google::Apis::Core::JsonRepresentation
hash :spec, as: 'spec'
property :metadata, as: 'metadata', class: Google::Apis::DataflowV1b3::SourceMetadata, decorator: Google::Apis::DataflowV1b3::SourceMetadata::Representation
collection :base_specs, as: 'baseSpecs'
property :does_not_need_splitting, as: 'doesNotNeedSplitting'
hash :codec, as: 'codec'
end
end
class SplitInt64
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :low_bits, as: 'lowBits'
property :high_bits, as: 'highBits'
end
end
class WorkerPool
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :subnetwork, as: 'subnetwork'
property :ip_configuration, as: 'ipConfiguration'
property :taskrunner_settings, as: 'taskrunnerSettings', class: Google::Apis::DataflowV1b3::TaskRunnerSettings, decorator: Google::Apis::DataflowV1b3::TaskRunnerSettings::Representation
property :autoscaling_settings, as: 'autoscalingSettings', class: Google::Apis::DataflowV1b3::AutoscalingSettings, decorator: Google::Apis::DataflowV1b3::AutoscalingSettings::Representation
hash :metadata, as: 'metadata'
property :network, as: 'network'
property :default_package_set, as: 'defaultPackageSet'
property :num_threads_per_worker, as: 'numThreadsPerWorker'
property :num_workers, as: 'numWorkers'
property :zone, as: 'zone'
property :disk_source_image, as: 'diskSourceImage'
collection :packages, as: 'packages', class: Google::Apis::DataflowV1b3::Package, decorator: Google::Apis::DataflowV1b3::Package::Representation
property :teardown_policy, as: 'teardownPolicy'
property :on_host_maintenance, as: 'onHostMaintenance'
hash :pool_args, as: 'poolArgs'
property :disk_size_gb, as: 'diskSizeGb'
property :worker_harness_container_image, as: 'workerHarnessContainerImage'
property :machine_type, as: 'machineType'
property :disk_type, as: 'diskType'
property :kind, as: 'kind'
collection :data_disks, as: 'dataDisks', class: Google::Apis::DataflowV1b3::Disk, decorator: Google::Apis::DataflowV1b3::Disk::Representation
end
end
class SourceOperationRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :get_metadata, as: 'getMetadata', class: Google::Apis::DataflowV1b3::SourceGetMetadataRequest, decorator: Google::Apis::DataflowV1b3::SourceGetMetadataRequest::Representation
property :split, as: 'split', class: Google::Apis::DataflowV1b3::SourceSplitRequest, decorator: Google::Apis::DataflowV1b3::SourceSplitRequest::Representation
end
end
class WorkItem
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :streaming_setup_task, as: 'streamingSetupTask', class: Google::Apis::DataflowV1b3::StreamingSetupTask, decorator: Google::Apis::DataflowV1b3::StreamingSetupTask::Representation
property :report_status_interval, as: 'reportStatusInterval'
property :source_operation_task, as: 'sourceOperationTask', class: Google::Apis::DataflowV1b3::SourceOperationRequest, decorator: Google::Apis::DataflowV1b3::SourceOperationRequest::Representation
property :lease_expire_time, as: 'leaseExpireTime'
property :streaming_config_task, as: 'streamingConfigTask', class: Google::Apis::DataflowV1b3::StreamingConfigTask, decorator: Google::Apis::DataflowV1b3::StreamingConfigTask::Representation
property :initial_report_index, :numeric_string => true, as: 'initialReportIndex'
property :streaming_computation_task, as: 'streamingComputationTask', class: Google::Apis::DataflowV1b3::StreamingComputationTask, decorator: Google::Apis::DataflowV1b3::StreamingComputationTask::Representation
property :shell_task, as: 'shellTask', class: Google::Apis::DataflowV1b3::ShellTask, decorator: Google::Apis::DataflowV1b3::ShellTask::Representation
property :job_id, as: 'jobId'
property :id, :numeric_string => true, as: 'id'
property :configuration, as: 'configuration'
property :map_task, as: 'mapTask', class: Google::Apis::DataflowV1b3::MapTask, decorator: Google::Apis::DataflowV1b3::MapTask::Representation
property :seq_map_task, as: 'seqMapTask', class: Google::Apis::DataflowV1b3::SeqMapTask, decorator: Google::Apis::DataflowV1b3::SeqMapTask::Representation
collection :packages, as: 'packages', class: Google::Apis::DataflowV1b3::Package, decorator: Google::Apis::DataflowV1b3::Package::Representation
property :project_id, as: 'projectId'
end
end
class StructuredMessage
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :message_text, as: 'messageText'
collection :parameters, as: 'parameters', class: Google::Apis::DataflowV1b3::Parameter, decorator: Google::Apis::DataflowV1b3::Parameter::Representation
property :message_key, as: 'messageKey'
end
end
class ReportedParallelism
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :value, as: 'value'
property :is_infinite, as: 'isInfinite'
end
end
class ResourceUtilizationReport
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :cpu_time, as: 'cpuTime', class: Google::Apis::DataflowV1b3::CpuTime, decorator: Google::Apis::DataflowV1b3::CpuTime::Representation
end
end
class TopologyConfig
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :computations, as: 'computations', class: Google::Apis::DataflowV1b3::ComputationTopology, decorator: Google::Apis::DataflowV1b3::ComputationTopology::Representation
property :persistent_state_version, as: 'persistentStateVersion'
collection :data_disk_assignments, as: 'dataDiskAssignments', class: Google::Apis::DataflowV1b3::DataDiskAssignment, decorator: Google::Apis::DataflowV1b3::DataDiskAssignment::Representation
property :forwarding_key_bits, as: 'forwardingKeyBits'
hash :user_stage_to_computation_name_map, as: 'userStageToComputationNameMap'
end
end
class SourceSplitOptions
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :desired_bundle_size_bytes, :numeric_string => true, as: 'desiredBundleSizeBytes'
property :desired_shard_size_bytes, :numeric_string => true, as: 'desiredShardSizeBytes'
end
end
class ReadInstruction
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :source, as: 'source', class: Google::Apis::DataflowV1b3::Source, decorator: Google::Apis::DataflowV1b3::Source::Representation
end
end
class WorkerSettings
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :service_path, as: 'servicePath'
property :shuffle_service_path, as: 'shuffleServicePath'
property :worker_id, as: 'workerId'
property :temp_storage_prefix, as: 'tempStoragePrefix'
property :reporting_enabled, as: 'reportingEnabled'
property :base_url, as: 'baseUrl'
end
end
class StreamingStageLocation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :stream_id, as: 'streamId'
end
end
class DataDiskAssignment
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :vm_instance, as: 'vmInstance'
collection :data_disks, as: 'dataDisks'
end
end
class ApproximateSplitRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :position, as: 'position', class: Google::Apis::DataflowV1b3::Position, decorator: Google::Apis::DataflowV1b3::Position::Representation
property :fraction_consumed, as: 'fractionConsumed'
end
end
class Status
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :code, as: 'code'
property :message, as: 'message'
collection :details, as: 'details'
end
end
class ExecutionStageState
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :current_state_time, as: 'currentStateTime'
property :execution_stage_state, as: 'executionStageState'
property :execution_stage_name, as: 'executionStageName'
end
end
class StreamLocation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :custom_source_location, as: 'customSourceLocation', class: Google::Apis::DataflowV1b3::CustomSourceLocation, decorator: Google::Apis::DataflowV1b3::CustomSourceLocation::Representation
property :side_input_location, as: 'sideInputLocation', class: Google::Apis::DataflowV1b3::StreamingSideInputLocation, decorator: Google::Apis::DataflowV1b3::StreamingSideInputLocation::Representation
property :pubsub_location, as: 'pubsubLocation', class: Google::Apis::DataflowV1b3::PubsubLocation, decorator: Google::Apis::DataflowV1b3::PubsubLocation::Representation
property :streaming_stage_location, as: 'streamingStageLocation', class: Google::Apis::DataflowV1b3::StreamingStageLocation, decorator: Google::Apis::DataflowV1b3::StreamingStageLocation::Representation
end
end
class SendWorkerMessagesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :worker_message_responses, as: 'workerMessageResponses', class: Google::Apis::DataflowV1b3::WorkerMessageResponse, decorator: Google::Apis::DataflowV1b3::WorkerMessageResponse::Representation
end
end
class StreamingComputationConfig
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :computation_id, as: 'computationId'
property :stage_name, as: 'stageName'
property :system_name, as: 'systemName'
collection :instructions, as: 'instructions', class: Google::Apis::DataflowV1b3::ParallelInstruction, decorator: Google::Apis::DataflowV1b3::ParallelInstruction::Representation
end
end
class TransformSummary
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :id, as: 'id'
collection :display_data, as: 'displayData', class: Google::Apis::DataflowV1b3::DisplayData, decorator: Google::Apis::DataflowV1b3::DisplayData::Representation
collection :output_collection_name, as: 'outputCollectionName'
property :kind, as: 'kind'
collection :input_collection_name, as: 'inputCollectionName'
property :name, as: 'name'
end
end
class LeaseWorkItemResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :work_items, as: 'workItems', class: Google::Apis::DataflowV1b3::WorkItem, decorator: Google::Apis::DataflowV1b3::WorkItem::Representation
end
end
class LaunchTemplateParameters
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :environment, as: 'environment', class: Google::Apis::DataflowV1b3::RuntimeEnvironment, decorator: Google::Apis::DataflowV1b3::RuntimeEnvironment::Representation
hash :parameters, as: 'parameters'
property :job_name, as: 'jobName'
end
end
class Sink
# @private
class Representation < Google::Apis::Core::JsonRepresentation
hash :codec, as: 'codec'
hash :spec, as: 'spec'
end
end
class FlattenInstruction
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :inputs, as: 'inputs', class: Google::Apis::DataflowV1b3::InstructionInput, decorator: Google::Apis::DataflowV1b3::InstructionInput::Representation
end
end
class PartialGroupByKeyInstruction
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :original_combine_values_input_store_name, as: 'originalCombineValuesInputStoreName'
collection :side_inputs, as: 'sideInputs', class: Google::Apis::DataflowV1b3::SideInputInfo, decorator: Google::Apis::DataflowV1b3::SideInputInfo::Representation
property :original_combine_values_step_name, as: 'originalCombineValuesStepName'
property :input, as: 'input', class: Google::Apis::DataflowV1b3::InstructionInput, decorator: Google::Apis::DataflowV1b3::InstructionInput::Representation
hash :input_element_codec, as: 'inputElementCodec'
hash :value_combining_fn, as: 'valueCombiningFn'
end
end
class StageSource
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :size_bytes, :numeric_string => true, as: 'sizeBytes'
property :name, as: 'name'
property :user_name, as: 'userName'
property :original_transform_or_collection, as: 'originalTransformOrCollection'
end
end
class InstructionInput
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :producer_instruction_index, as: 'producerInstructionIndex'
property :output_num, as: 'outputNum'
end
end
class StringList
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :elements, as: 'elements'
end
end
class DisplayData
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :str_value, as: 'strValue'
property :duration_value, as: 'durationValue'
property :int64_value, :numeric_string => true, as: 'int64Value'
property :namespace, as: 'namespace'
property :float_value, as: 'floatValue'
property :key, as: 'key'
property :short_str_value, as: 'shortStrValue'
property :url, as: 'url'
property :label, as: 'label'
property :timestamp_value, as: 'timestampValue'
property :java_class_value, as: 'javaClassValue'
property :bool_value, as: 'boolValue'
end
end
class GetDebugConfigRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :component_id, as: 'componentId'
property :worker_id, as: 'workerId'
property :location, as: 'location'
end
end
class LeaseWorkItemRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :worker_capabilities, as: 'workerCapabilities'
property :worker_id, as: 'workerId'
property :requested_lease_duration, as: 'requestedLeaseDuration'
property :current_worker_time, as: 'currentWorkerTime'
property :location, as: 'location'
collection :work_item_types, as: 'workItemTypes'
end
end
class GetTemplateResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :metadata, as: 'metadata', class: Google::Apis::DataflowV1b3::TemplateMetadata, decorator: Google::Apis::DataflowV1b3::TemplateMetadata::Representation
property :status, as: 'status', class: Google::Apis::DataflowV1b3::Status, decorator: Google::Apis::DataflowV1b3::Status::Representation
end
end
class Parameter
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :key, as: 'key'
property :value, as: 'value'
end
end
class ReportWorkItemStatusRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :work_item_statuses, as: 'workItemStatuses', class: Google::Apis::DataflowV1b3::WorkItemStatus, decorator: Google::Apis::DataflowV1b3::WorkItemStatus::Representation
property :current_worker_time, as: 'currentWorkerTime'
property :worker_id, as: 'workerId'
property :location, as: 'location'
end
end
class PipelineDescription
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :original_pipeline_transform, as: 'originalPipelineTransform', class: Google::Apis::DataflowV1b3::TransformSummary, decorator: Google::Apis::DataflowV1b3::TransformSummary::Representation
collection :display_data, as: 'displayData', class: Google::Apis::DataflowV1b3::DisplayData, decorator: Google::Apis::DataflowV1b3::DisplayData::Representation
collection :execution_pipeline_stage, as: 'executionPipelineStage', class: Google::Apis::DataflowV1b3::ExecutionStageSummary, decorator: Google::Apis::DataflowV1b3::ExecutionStageSummary::Representation
end
end
class StreamingConfigTask
# @private
class Representation < Google::Apis::Core::JsonRepresentation
hash :user_step_to_state_family_name_map, as: 'userStepToStateFamilyNameMap'
property :windmill_service_port, :numeric_string => true, as: 'windmillServicePort'
collection :streaming_computation_configs, as: 'streamingComputationConfigs', class: Google::Apis::DataflowV1b3::StreamingComputationConfig, decorator: Google::Apis::DataflowV1b3::StreamingComputationConfig::Representation
property :windmill_service_endpoint, as: 'windmillServiceEndpoint'
end
end
class JobExecutionInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
hash :stages, as: 'stages', class: Google::Apis::DataflowV1b3::JobExecutionStageInfo, decorator: Google::Apis::DataflowV1b3::JobExecutionStageInfo::Representation
end
end
class Step
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :kind, as: 'kind'
hash :properties, as: 'properties'
end
end
class FailedLocation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
end
end
class Disk
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :mount_point, as: 'mountPoint'
property :disk_type, as: 'diskType'
property :size_gb, as: 'sizeGb'
end
end
class CounterMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :description, as: 'description'
property :kind, as: 'kind'
property :standard_units, as: 'standardUnits'
property :other_units, as: 'otherUnits'
end
end
class ListJobMessagesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :next_page_token, as: 'nextPageToken'
collection :autoscaling_events, as: 'autoscalingEvents', class: Google::Apis::DataflowV1b3::AutoscalingEvent, decorator: Google::Apis::DataflowV1b3::AutoscalingEvent::Representation
collection :job_messages, as: 'jobMessages', class: Google::Apis::DataflowV1b3::JobMessage, decorator: Google::Apis::DataflowV1b3::JobMessage::Representation
end
end
class ApproximateReportedProgress
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :remaining_parallelism, as: 'remainingParallelism', class: Google::Apis::DataflowV1b3::ReportedParallelism, decorator: Google::Apis::DataflowV1b3::ReportedParallelism::Representation
property :position, as: 'position', class: Google::Apis::DataflowV1b3::Position, decorator: Google::Apis::DataflowV1b3::Position::Representation
property :fraction_consumed, as: 'fractionConsumed'
property :consumed_parallelism, as: 'consumedParallelism', class: Google::Apis::DataflowV1b3::ReportedParallelism, decorator: Google::Apis::DataflowV1b3::ReportedParallelism::Representation
end
end
class IntegerList
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :elements, as: 'elements', class: Google::Apis::DataflowV1b3::SplitInt64, decorator: Google::Apis::DataflowV1b3::SplitInt64::Representation
end
end
class StateFamilyConfig
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :state_family, as: 'stateFamily'
property :is_read, as: 'isRead'
end
end
class ResourceUtilizationReportResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
end
end
class SourceSplitResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :bundles, as: 'bundles', class: Google::Apis::DataflowV1b3::DerivedSource, decorator: Google::Apis::DataflowV1b3::DerivedSource::Representation
collection :shards, as: 'shards', class: Google::Apis::DataflowV1b3::SourceSplitShard, decorator: Google::Apis::DataflowV1b3::SourceSplitShard::Representation
property :outcome, as: 'outcome'
end
end
class ParallelInstruction
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :original_name, as: 'originalName'
property :flatten, as: 'flatten', class: Google::Apis::DataflowV1b3::FlattenInstruction, decorator: Google::Apis::DataflowV1b3::FlattenInstruction::Representation
property :write, as: 'write', class: Google::Apis::DataflowV1b3::WriteInstruction, decorator: Google::Apis::DataflowV1b3::WriteInstruction::Representation
property :system_name, as: 'systemName'
property :partial_group_by_key, as: 'partialGroupByKey', class: Google::Apis::DataflowV1b3::PartialGroupByKeyInstruction, decorator: Google::Apis::DataflowV1b3::PartialGroupByKeyInstruction::Representation
collection :outputs, as: 'outputs', class: Google::Apis::DataflowV1b3::InstructionOutput, decorator: Google::Apis::DataflowV1b3::InstructionOutput::Representation
property :name, as: 'name'
property :read, as: 'read', class: Google::Apis::DataflowV1b3::ReadInstruction, decorator: Google::Apis::DataflowV1b3::ReadInstruction::Representation
property :par_do, as: 'parDo', class: Google::Apis::DataflowV1b3::ParDoInstruction, decorator: Google::Apis::DataflowV1b3::ParDoInstruction::Representation
end
end
class Package
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :location, as: 'location'
property :name, as: 'name'
end
end
class KeyRangeDataDiskAssignment
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :data_disk, as: 'dataDisk'
property :start, as: 'start'
property :end, as: 'end'
end
end
class ParDoInstruction
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :side_inputs, as: 'sideInputs', class: Google::Apis::DataflowV1b3::SideInputInfo, decorator: Google::Apis::DataflowV1b3::SideInputInfo::Representation
collection :multi_output_infos, as: 'multiOutputInfos', class: Google::Apis::DataflowV1b3::MultiOutputInfo, decorator: Google::Apis::DataflowV1b3::MultiOutputInfo::Representation
hash :user_fn, as: 'userFn'
property :input, as: 'input', class: Google::Apis::DataflowV1b3::InstructionInput, decorator: Google::Apis::DataflowV1b3::InstructionInput::Representation
property :num_outputs, as: 'numOutputs'
end
end
class MetricUpdate
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :internal, as: 'internal'
property :cumulative, as: 'cumulative'
property :kind, as: 'kind'
property :scalar, as: 'scalar'
property :mean_count, as: 'meanCount'
property :mean_sum, as: 'meanSum'
property :update_time, as: 'updateTime'
property :name, as: 'name', class: Google::Apis::DataflowV1b3::MetricStructuredName, decorator: Google::Apis::DataflowV1b3::MetricStructuredName::Representation
property :distribution, as: 'distribution'
property :set, as: 'set'
end
end
class CounterStructuredName
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :component_step_name, as: 'componentStepName'
property :portion, as: 'portion'
property :original_step_name, as: 'originalStepName'
property :worker_id, as: 'workerId'
property :origin_namespace, as: 'originNamespace'
property :name, as: 'name'
property :execution_step_name, as: 'executionStepName'
property :origin, as: 'origin'
end
end
class ApproximateProgress
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :percent_complete, as: 'percentComplete'
property :remaining_time, as: 'remainingTime'
property :position, as: 'position', class: Google::Apis::DataflowV1b3::Position, decorator: Google::Apis::DataflowV1b3::Position::Representation
end
end
class WorkerMessageResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :worker_metrics_response, as: 'workerMetricsResponse', class: Google::Apis::DataflowV1b3::ResourceUtilizationReportResponse, decorator: Google::Apis::DataflowV1b3::ResourceUtilizationReportResponse::Representation
property :worker_health_report_response, as: 'workerHealthReportResponse', class: Google::Apis::DataflowV1b3::WorkerHealthReportResponse, decorator: Google::Apis::DataflowV1b3::WorkerHealthReportResponse::Representation
end
end
class TemplateMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :parameters, as: 'parameters', class: Google::Apis::DataflowV1b3::ParameterMetadata, decorator: Google::Apis::DataflowV1b3::ParameterMetadata::Representation
property :name, as: 'name'
property :description, as: 'description'
end
end
class WorkerMessage
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :worker_health_report, as: 'workerHealthReport', class: Google::Apis::DataflowV1b3::WorkerHealthReport, decorator: Google::Apis::DataflowV1b3::WorkerHealthReport::Representation
property :worker_metrics, as: 'workerMetrics', class: Google::Apis::DataflowV1b3::ResourceUtilizationReport, decorator: Google::Apis::DataflowV1b3::ResourceUtilizationReport::Representation
property :worker_message_code, as: 'workerMessageCode', class: Google::Apis::DataflowV1b3::WorkerMessageCode, decorator: Google::Apis::DataflowV1b3::WorkerMessageCode::Representation
hash :labels, as: 'labels'
property :time, as: 'time'
end
end
class JobMetrics
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :metrics, as: 'metrics', class: Google::Apis::DataflowV1b3::MetricUpdate, decorator: Google::Apis::DataflowV1b3::MetricUpdate::Representation
property :metric_time, as: 'metricTime'
end
end
class FloatingPointList
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :elements, as: 'elements'
end
end
class CounterUpdate
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :floating_point_list, as: 'floatingPointList', class: Google::Apis::DataflowV1b3::FloatingPointList, decorator: Google::Apis::DataflowV1b3::FloatingPointList::Representation
property :integer, as: 'integer', class: Google::Apis::DataflowV1b3::SplitInt64, decorator: Google::Apis::DataflowV1b3::SplitInt64::Representation
property :structured_name_and_metadata, as: 'structuredNameAndMetadata', class: Google::Apis::DataflowV1b3::CounterStructuredNameAndMetadata, decorator: Google::Apis::DataflowV1b3::CounterStructuredNameAndMetadata::Representation
property :integer_list, as: 'integerList', class: Google::Apis::DataflowV1b3::IntegerList, decorator: Google::Apis::DataflowV1b3::IntegerList::Representation
property :integer_mean, as: 'integerMean', class: Google::Apis::DataflowV1b3::IntegerMean, decorator: Google::Apis::DataflowV1b3::IntegerMean::Representation
property :floating_point, as: 'floatingPoint'
property :internal, as: 'internal'
property :cumulative, as: 'cumulative'
property :floating_point_mean, as: 'floatingPointMean', class: Google::Apis::DataflowV1b3::FloatingPointMean, decorator: Google::Apis::DataflowV1b3::FloatingPointMean::Representation
property :boolean, as: 'boolean'
property :name_and_kind, as: 'nameAndKind', class: Google::Apis::DataflowV1b3::NameAndKind, decorator: Google::Apis::DataflowV1b3::NameAndKind::Representation
property :string_list, as: 'stringList', class: Google::Apis::DataflowV1b3::StringList, decorator: Google::Apis::DataflowV1b3::StringList::Representation
property :distribution, as: 'distribution', class: Google::Apis::DataflowV1b3::DistributionUpdate, decorator: Google::Apis::DataflowV1b3::DistributionUpdate::Representation
property :short_id, :numeric_string => true, as: 'shortId'
end
end
class SourceMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :estimated_size_bytes, :numeric_string => true, as: 'estimatedSizeBytes'
property :infinite, as: 'infinite'
property :produces_sorted_keys, as: 'producesSortedKeys'
end
end
class DistributionUpdate
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :max, as: 'max', class: Google::Apis::DataflowV1b3::SplitInt64, decorator: Google::Apis::DataflowV1b3::SplitInt64::Representation
collection :log_buckets, as: 'logBuckets', class: Google::Apis::DataflowV1b3::LogBucket, decorator: Google::Apis::DataflowV1b3::LogBucket::Representation
property :count, as: 'count', class: Google::Apis::DataflowV1b3::SplitInt64, decorator: Google::Apis::DataflowV1b3::SplitInt64::Representation
property :min, as: 'min', class: Google::Apis::DataflowV1b3::SplitInt64, decorator: Google::Apis::DataflowV1b3::SplitInt64::Representation
property :sum_of_squares, as: 'sumOfSquares'
property :sum, as: 'sum', class: Google::Apis::DataflowV1b3::SplitInt64, decorator: Google::Apis::DataflowV1b3::SplitInt64::Representation
end
end
class SourceFork
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :primary_source, as: 'primarySource', class: Google::Apis::DataflowV1b3::DerivedSource, decorator: Google::Apis::DataflowV1b3::DerivedSource::Representation
property :residual, as: 'residual', class: Google::Apis::DataflowV1b3::SourceSplitShard, decorator: Google::Apis::DataflowV1b3::SourceSplitShard::Representation
property :residual_source, as: 'residualSource', class: Google::Apis::DataflowV1b3::DerivedSource, decorator: Google::Apis::DataflowV1b3::DerivedSource::Representation
property :primary, as: 'primary', class: Google::Apis::DataflowV1b3::SourceSplitShard, decorator: Google::Apis::DataflowV1b3::SourceSplitShard::Representation
end
end
class WorkerHealthReportResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :report_interval, as: 'reportInterval'
end
end
class WorkItemStatus
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :counter_updates, as: 'counterUpdates', class: Google::Apis::DataflowV1b3::CounterUpdate, decorator: Google::Apis::DataflowV1b3::CounterUpdate::Representation
property :work_item_id, as: 'workItemId'
collection :metric_updates, as: 'metricUpdates', class: Google::Apis::DataflowV1b3::MetricUpdate, decorator: Google::Apis::DataflowV1b3::MetricUpdate::Representation
collection :errors, as: 'errors', class: Google::Apis::DataflowV1b3::Status, decorator: Google::Apis::DataflowV1b3::Status::Representation
property :dynamic_source_split, as: 'dynamicSourceSplit', class: Google::Apis::DataflowV1b3::DynamicSourceSplit, decorator: Google::Apis::DataflowV1b3::DynamicSourceSplit::Representation
property :source_operation_response, as: 'sourceOperationResponse', class: Google::Apis::DataflowV1b3::SourceOperationResponse, decorator: Google::Apis::DataflowV1b3::SourceOperationResponse::Representation
property :progress, as: 'progress', class: Google::Apis::DataflowV1b3::ApproximateProgress, decorator: Google::Apis::DataflowV1b3::ApproximateProgress::Representation
property :requested_lease_duration, as: 'requestedLeaseDuration'
property :report_index, :numeric_string => true, as: 'reportIndex'
property :stop_position, as: 'stopPosition', class: Google::Apis::DataflowV1b3::Position, decorator: Google::Apis::DataflowV1b3::Position::Representation
property :completed, as: 'completed'
property :reported_progress, as: 'reportedProgress', class: Google::Apis::DataflowV1b3::ApproximateReportedProgress, decorator: Google::Apis::DataflowV1b3::ApproximateReportedProgress::Representation
property :source_fork, as: 'sourceFork', class: Google::Apis::DataflowV1b3::SourceFork, decorator: Google::Apis::DataflowV1b3::SourceFork::Representation
end
end
class ComponentSource
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :user_name, as: 'userName'
property :original_transform_or_collection, as: 'originalTransformOrCollection'
end
end
class WorkItemServiceState
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :suggested_stop_point, as: 'suggestedStopPoint', class: Google::Apis::DataflowV1b3::ApproximateProgress, decorator: Google::Apis::DataflowV1b3::ApproximateProgress::Representation
property :split_request, as: 'splitRequest', class: Google::Apis::DataflowV1b3::ApproximateSplitRequest, decorator: Google::Apis::DataflowV1b3::ApproximateSplitRequest::Representation
property :suggested_stop_position, as: 'suggestedStopPosition', class: Google::Apis::DataflowV1b3::Position, decorator: Google::Apis::DataflowV1b3::Position::Representation
property :report_status_interval, as: 'reportStatusInterval'
hash :harness_data, as: 'harnessData'
property :lease_expire_time, as: 'leaseExpireTime'
collection :metric_short_id, as: 'metricShortId', class: Google::Apis::DataflowV1b3::MetricShortId, decorator: Google::Apis::DataflowV1b3::MetricShortId::Representation
property :next_report_index, :numeric_string => true, as: 'nextReportIndex'
end
end
class MetricStructuredName
# @private
class Representation < Google::Apis::Core::JsonRepresentation
hash :context, as: 'context'
property :name, as: 'name'
property :origin, as: 'origin'
end
end
class SeqMapTaskOutputInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :tag, as: 'tag'
property :sink, as: 'sink', class: Google::Apis::DataflowV1b3::Sink, decorator: Google::Apis::DataflowV1b3::Sink::Representation
end
end
class JobExecutionStageInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :step_name, as: 'stepName'
end
end
class KeyRangeLocation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :delivery_endpoint, as: 'deliveryEndpoint'
property :data_disk, as: 'dataDisk'
property :start, as: 'start'
property :end, as: 'end'
property :deprecated_persistent_directory, as: 'deprecatedPersistentDirectory'
end
end
class SourceGetMetadataRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :source, as: 'source', class: Google::Apis::DataflowV1b3::Source, decorator: Google::Apis::DataflowV1b3::Source::Representation
end
end
class SeqMapTask
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
collection :output_infos, as: 'outputInfos', class: Google::Apis::DataflowV1b3::SeqMapTaskOutputInfo, decorator: Google::Apis::DataflowV1b3::SeqMapTaskOutputInfo::Representation
collection :inputs, as: 'inputs', class: Google::Apis::DataflowV1b3::SideInputInfo, decorator: Google::Apis::DataflowV1b3::SideInputInfo::Representation
property :stage_name, as: 'stageName'
property :system_name, as: 'systemName'
hash :user_fn, as: 'userFn'
end
end
class NameAndKind
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :kind, as: 'kind'
end
end
class WorkerMessageCode
# @private
class Representation < Google::Apis::Core::JsonRepresentation
hash :parameters, as: 'parameters'
property :code, as: 'code'
end
end
class CustomSourceLocation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :stateful, as: 'stateful'
end
end
class MapTask
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :stage_name, as: 'stageName'
property :system_name, as: 'systemName'
collection :instructions, as: 'instructions', class: Google::Apis::DataflowV1b3::ParallelInstruction, decorator: Google::Apis::DataflowV1b3::ParallelInstruction::Representation
end
end
class FloatingPointMean
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :count, as: 'count', class: Google::Apis::DataflowV1b3::SplitInt64, decorator: Google::Apis::DataflowV1b3::SplitInt64::Representation
property :sum, as: 'sum'
end
end
class ReportWorkItemStatusResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :work_item_service_states, as: 'workItemServiceStates', class: Google::Apis::DataflowV1b3::WorkItemServiceState, decorator: Google::Apis::DataflowV1b3::WorkItemServiceState::Representation
end
end
class InstructionOutput
# @private
class Representation < Google::Apis::Core::JsonRepresentation
hash :codec, as: 'codec'
property :name, as: 'name'
property :original_name, as: 'originalName'
property :system_name, as: 'systemName'
property :only_count_key_bytes, as: 'onlyCountKeyBytes'
property :only_count_value_bytes, as: 'onlyCountValueBytes'
end
end
class CreateJobFromTemplateRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :job_name, as: 'jobName'
property :gcs_path, as: 'gcsPath'
property :environment, as: 'environment', class: Google::Apis::DataflowV1b3::RuntimeEnvironment, decorator: Google::Apis::DataflowV1b3::RuntimeEnvironment::Representation
property :location, as: 'location'
hash :parameters, as: 'parameters'
end
end
class IntegerMean
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :count, as: 'count', class: Google::Apis::DataflowV1b3::SplitInt64, decorator: Google::Apis::DataflowV1b3::SplitInt64::Representation
property :sum, as: 'sum', class: Google::Apis::DataflowV1b3::SplitInt64, decorator: Google::Apis::DataflowV1b3::SplitInt64::Representation
end
end
class ListJobsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :failed_location, as: 'failedLocation', class: Google::Apis::DataflowV1b3::FailedLocation, decorator: Google::Apis::DataflowV1b3::FailedLocation::Representation
property :next_page_token, as: 'nextPageToken'
collection :jobs, as: 'jobs', class: Google::Apis::DataflowV1b3::Job, decorator: Google::Apis::DataflowV1b3::Job::Representation
end
end
class ComputationTopology
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :system_stage_name, as: 'systemStageName'
property :computation_id, as: 'computationId'
collection :inputs, as: 'inputs', class: Google::Apis::DataflowV1b3::StreamLocation, decorator: Google::Apis::DataflowV1b3::StreamLocation::Representation
collection :key_ranges, as: 'keyRanges', class: Google::Apis::DataflowV1b3::KeyRangeLocation, decorator: Google::Apis::DataflowV1b3::KeyRangeLocation::Representation
collection :state_families, as: 'stateFamilies', class: Google::Apis::DataflowV1b3::StateFamilyConfig, decorator: Google::Apis::DataflowV1b3::StateFamilyConfig::Representation
collection :outputs, as: 'outputs', class: Google::Apis::DataflowV1b3::StreamLocation, decorator: Google::Apis::DataflowV1b3::StreamLocation::Representation
end
end
class RuntimeEnvironment
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :machine_type, as: 'machineType'
property :zone, as: 'zone'
property :max_workers, as: 'maxWorkers'
property :service_account_email, as: 'serviceAccountEmail'
property :temp_location, as: 'tempLocation'
property :bypass_temp_dir_validation, as: 'bypassTempDirValidation'
end
end
class MountedDataDisk
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :data_disk, as: 'dataDisk'
end
end
class StreamingSideInputLocation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :tag, as: 'tag'
property :state_family, as: 'stateFamily'
end
end
class LaunchTemplateResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :job, as: 'job', class: Google::Apis::DataflowV1b3::Job, decorator: Google::Apis::DataflowV1b3::Job::Representation
end
end
class Job
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :stage_states, as: 'stageStates', class: Google::Apis::DataflowV1b3::ExecutionStageState, decorator: Google::Apis::DataflowV1b3::ExecutionStageState::Representation
property :project_id, as: 'projectId'
property :type, as: 'type'
property :pipeline_description, as: 'pipelineDescription', class: Google::Apis::DataflowV1b3::PipelineDescription, decorator: Google::Apis::DataflowV1b3::PipelineDescription::Representation
property :replace_job_id, as: 'replaceJobId'
property :requested_state, as: 'requestedState'
collection :temp_files, as: 'tempFiles'
property :client_request_id, as: 'clientRequestId'
property :name, as: 'name'
collection :steps, as: 'steps', class: Google::Apis::DataflowV1b3::Step, decorator: Google::Apis::DataflowV1b3::Step::Representation
property :replaced_by_job_id, as: 'replacedByJobId'
property :execution_info, as: 'executionInfo', class: Google::Apis::DataflowV1b3::JobExecutionInfo, decorator: Google::Apis::DataflowV1b3::JobExecutionInfo::Representation
property :id, as: 'id'
property :current_state, as: 'currentState'
property :location, as: 'location'
property :current_state_time, as: 'currentStateTime'
hash :transform_name_mapping, as: 'transformNameMapping'
hash :labels, as: 'labels'
property :environment, as: 'environment', class: Google::Apis::DataflowV1b3::Environment, decorator: Google::Apis::DataflowV1b3::Environment::Representation
property :create_time, as: 'createTime'
end
end
class DynamicSourceSplit
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :residual, as: 'residual', class: Google::Apis::DataflowV1b3::DerivedSource, decorator: Google::Apis::DataflowV1b3::DerivedSource::Representation
property :primary, as: 'primary', class: Google::Apis::DataflowV1b3::DerivedSource, decorator: Google::Apis::DataflowV1b3::DerivedSource::Representation
end
end
class DerivedSource
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :source, as: 'source', class: Google::Apis::DataflowV1b3::Source, decorator: Google::Apis::DataflowV1b3::Source::Representation
property :derivation_mode, as: 'derivationMode'
end
end
class SourceOperationResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :split, as: 'split', class: Google::Apis::DataflowV1b3::SourceSplitResponse, decorator: Google::Apis::DataflowV1b3::SourceSplitResponse::Representation
property :get_metadata, as: 'getMetadata', class: Google::Apis::DataflowV1b3::SourceGetMetadataResponse, decorator: Google::Apis::DataflowV1b3::SourceGetMetadataResponse::Representation
end
end
class SideInputInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :tag, as: 'tag'
hash :kind, as: 'kind'
collection :sources, as: 'sources', class: Google::Apis::DataflowV1b3::Source, decorator: Google::Apis::DataflowV1b3::Source::Representation
end
end
class SendDebugCaptureResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
end
end
class ConcatPosition
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :position, as: 'position', class: Google::Apis::DataflowV1b3::Position, decorator: Google::Apis::DataflowV1b3::Position::Representation
property :index, as: 'index'
end
end
class CounterStructuredNameAndMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name', class: Google::Apis::DataflowV1b3::CounterStructuredName, decorator: Google::Apis::DataflowV1b3::CounterStructuredName::Representation
property :metadata, as: 'metadata', class: Google::Apis::DataflowV1b3::CounterMetadata, decorator: Google::Apis::DataflowV1b3::CounterMetadata::Representation
end
end
class WriteInstruction
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :sink, as: 'sink', class: Google::Apis::DataflowV1b3::Sink, decorator: Google::Apis::DataflowV1b3::Sink::Representation
property :input, as: 'input', class: Google::Apis::DataflowV1b3::InstructionInput, decorator: Google::Apis::DataflowV1b3::InstructionInput::Representation
end
end
class StreamingComputationRanges
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :range_assignments, as: 'rangeAssignments', class: Google::Apis::DataflowV1b3::KeyRangeDataDiskAssignment, decorator: Google::Apis::DataflowV1b3::KeyRangeDataDiskAssignment::Representation
property :computation_id, as: 'computationId'
end
end
class AutoscalingSettings
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :algorithm, as: 'algorithm'
property :max_num_workers, as: 'maxNumWorkers'
end
end
class ExecutionStageSummary
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :component_source, as: 'componentSource', class: Google::Apis::DataflowV1b3::ComponentSource, decorator: Google::Apis::DataflowV1b3::ComponentSource::Representation
property :kind, as: 'kind'
collection :output_source, as: 'outputSource', class: Google::Apis::DataflowV1b3::StageSource, decorator: Google::Apis::DataflowV1b3::StageSource::Representation
property :name, as: 'name'
collection :input_source, as: 'inputSource', class: Google::Apis::DataflowV1b3::StageSource, decorator: Google::Apis::DataflowV1b3::StageSource::Representation
property :id, as: 'id'
collection :component_transform, as: 'componentTransform', class: Google::Apis::DataflowV1b3::ComponentTransform, decorator: Google::Apis::DataflowV1b3::ComponentTransform::Representation
end
end
class SendWorkerMessagesRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :worker_messages, as: 'workerMessages', class: Google::Apis::DataflowV1b3::WorkerMessage, decorator: Google::Apis::DataflowV1b3::WorkerMessage::Representation
property :location, as: 'location'
end
end
class LogBucket
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :log, as: 'log'
property :count, :numeric_string => true, as: 'count'
end
end
class SourceSplitShard
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :derivation_mode, as: 'derivationMode'
property :source, as: 'source', class: Google::Apis::DataflowV1b3::Source, decorator: Google::Apis::DataflowV1b3::Source::Representation
end
end
class CpuTime
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :total_ms, :numeric_string => true, as: 'totalMs'
property :rate, as: 'rate'
property :timestamp, as: 'timestamp'
end
end
class Environment
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cluster_manager_api_service, as: 'clusterManagerApiService'
property :temp_storage_prefix, as: 'tempStoragePrefix'
collection :worker_pools, as: 'workerPools', class: Google::Apis::DataflowV1b3::WorkerPool, decorator: Google::Apis::DataflowV1b3::WorkerPool::Representation
property :dataset, as: 'dataset'
collection :experiments, as: 'experiments'
hash :version, as: 'version'
hash :internal_experiments, as: 'internalExperiments'
property :service_account_email, as: 'serviceAccountEmail'
hash :user_agent, as: 'userAgent'
hash :sdk_pipeline_options, as: 'sdkPipelineOptions'
end
end
class StreamingComputationTask
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :data_disks, as: 'dataDisks', class: Google::Apis::DataflowV1b3::MountedDataDisk, decorator: Google::Apis::DataflowV1b3::MountedDataDisk::Representation
property :task_type, as: 'taskType'
collection :computation_ranges, as: 'computationRanges', class: Google::Apis::DataflowV1b3::StreamingComputationRanges, decorator: Google::Apis::DataflowV1b3::StreamingComputationRanges::Representation
end
end
class SendDebugCaptureRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :location, as: 'location'
property :data, as: 'data'
property :component_id, as: 'componentId'
property :worker_id, as: 'workerId'
end
end
class GetDebugConfigResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :config, as: 'config'
end
end
class ComponentTransform
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :original_transform, as: 'originalTransform'
property :name, as: 'name'
property :user_name, as: 'userName'
end
end
class StreamingSetupTask
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :streaming_computation_topology, as: 'streamingComputationTopology', class: Google::Apis::DataflowV1b3::TopologyConfig, decorator: Google::Apis::DataflowV1b3::TopologyConfig::Representation
property :drain, as: 'drain'
property :worker_harness_port, as: 'workerHarnessPort'
property :receive_work_port, as: 'receiveWorkPort'
end
end
class PubsubLocation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :topic, as: 'topic'
property :timestamp_label, as: 'timestampLabel'
property :subscription, as: 'subscription'
property :drop_late_data, as: 'dropLateData'
property :tracking_subscription, as: 'trackingSubscription'
property :with_attributes, as: 'withAttributes'
property :id_label, as: 'idLabel'
end
end
class WorkerHealthReport
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :pods, as: 'pods'
property :vm_startup_time, as: 'vmStartupTime'
property :report_interval, as: 'reportInterval'
property :vm_is_healthy, as: 'vmIsHealthy'
end
end
class JobMessage
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :id, as: 'id'
property :message_importance, as: 'messageImportance'
property :message_text, as: 'messageText'
property :time, as: 'time'
end
end
class ParameterMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :regexes, as: 'regexes'
property :label, as: 'label'
property :help_text, as: 'helpText'
property :is_optional, as: 'isOptional'
property :name, as: 'name'
end
end
end
end
end
| 40.417019 | 239 | 0.658032 |
915900fbb9a5e215a8ec8f0c06452a307e1172cb | 61 | module Erp
module Contacts
VERSION = '0.1.0'
end
end
| 10.166667 | 21 | 0.639344 |
1163a1cfea717b1e747a1b0db17903228f9ae8ac | 7,266 | describe :net_ftp_putbinaryfile, shared: :true do
before :each do
@server = NetFTPSpecs::DummyFTP.new
@server.serve_once
@local_fixture_file = File.dirname(__FILE__) + "/../fixtures/putbinaryfile"
@remote_tmp_file = tmp("binaryfile", false)
@ftp = Net::FTP.new
@ftp.connect(@server.hostname, @server.server_port)
@ftp.binary = @binary_mode
end
after :each do
@ftp.quit rescue nil
@ftp.close
@server.stop
rm_r @remote_tmp_file
end
it "sends the STOR command to the server" do
@ftp.send(@method, @local_fixture_file, "binary")
@ftp.last_response.should == "200 OK, Data received. (STOR binary)\n"
end
it "sends the contents of the passed local_file, without modifications" do
@ftp.send(@method, @local_fixture_file, "binary")
remote_lines = File.readlines(@remote_tmp_file)
local_lines = File.readlines(@local_fixture_file)
remote_lines.should == local_lines
end
it "returns nil" do
@ftp.send(@method, @local_fixture_file, "binary").should be_nil
end
describe "when passed a block" do
it "yields the transmitted content as binary blocks of the passed size" do
res = []
@ftp.send(@method, @local_fixture_file, "binary", 10) { |x| res << x }
res.should == [
"This is an", " example f",
"ile\nwhich ", "is going t",
"o be trans", "mitted\nusi",
"ng #putbin", "aryfile.\n"
]
end
end
describe "when resuming an existing file" do
before :each do
File.open(@remote_tmp_file, "w") do |f|
f << "This is an example file\n"
end
@ftp.resume = true
end
it "sends the remaining content of the passed local_file to the passed remote_file" do
@ftp.send(@method, @local_fixture_file, "binary")
File.read(@remote_tmp_file).should == File.read(@local_fixture_file)
end
describe "and the APPE command fails" do
it "raises a Net::FTPProtoError when the response code is 550" do
@server.should_receive(:appe).and_respond("Requested action not taken.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPProtoError)
end
it "raises a Net::FTPPermError when the response code is 500" do
@server.should_receive(:appe).and_respond("500 Syntax error, command unrecognized.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
it "raises a Net::FTPPermError when the response code is 501" do
@server.should_receive(:appe).and_respond("501 Syntax error, command unrecognized.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
it "raises a Net::FTPPermError when the response code is 502" do
@server.should_receive(:appe).and_respond("502 Command not implemented.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
it "raises a Net::FTPTempError when the response code is 421" do
@server.should_receive(:appe).and_respond("421 Service not available, closing control connection.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPTempError)
end
it "raises a Net::FTPPermError when the response code is 530" do
@server.should_receive(:appe).and_respond("530 Not logged in.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
end
end
describe "when the STOR command fails" do
it "raises a Net::FTPPermError when the response code is 532" do
@server.should_receive(:stor).and_respond("532 Need account for storing files.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
it "raises a Net::FTPTempError when the response code is 450" do
@server.should_receive(:stor).and_respond("450 Requested file action not taken.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPTempError)
end
it "raises a Net::FTPTempError when the response code is 452" do
@server.should_receive(:stor).and_respond("452 Requested action not taken.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPTempError)
end
it "raises a Net::FTPPermError when the response code is 553" do
@server.should_receive(:stor).and_respond("553 Requested action not taken.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
it "raises a Net::FTPPermError when the response code is 500" do
@server.should_receive(:stor).and_respond("500 Syntax error, command unrecognized.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
it "raises a Net::FTPPermError when the response code is 501" do
@server.should_receive(:stor).and_respond("501 Syntax error in parameters or arguments.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
it "raises a Net::FTPTempError when the response code is 421" do
@server.should_receive(:stor).and_respond("421 Service not available, closing control connection.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPTempError)
end
it "raises a Net::FTPPermError when the response code is 530" do
@server.should_receive(:stor).and_respond("530 Not logged in.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
end
describe "when opening the data port fails" do
it "raises a Net::FTPPermError when the response code is 500" do
@server.should_receive(:eprt).and_respond("500 Syntax error, command unrecognized.")
@server.should_receive(:port).and_respond("500 Syntax error, command unrecognized.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
it "raises a Net::FTPPermError when the response code is 501" do
@server.should_receive(:eprt).and_respond("501 Syntax error in parameters or arguments.")
@server.should_receive(:port).and_respond("501 Syntax error in parameters or arguments.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
it "raises a Net::FTPTempError when the response code is 421" do
@server.should_receive(:eprt).and_respond("421 Service not available, closing control connection.")
@server.should_receive(:port).and_respond("421 Service not available, closing control connection.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPTempError)
end
it "raises a Net::FTPPermError when the response code is 530" do
@server.should_receive(:eprt).and_respond("530 Not logged in.")
@server.should_receive(:port).and_respond("530 Not logged in.")
-> { @ftp.send(@method, @local_fixture_file, "binary") }.should raise_error(Net::FTPPermError)
end
end
end
| 43.25 | 107 | 0.686485 |
91525a726b5dc35f0662fd747086a8224ad7f4e2 | 4,162 | require "eventmachine"
require "sensu/extension/constants"
module Sensu
module Extension
class Base
# @!attribute [rw] logger
# @return [Array] logger provided by Sensu.
attr_accessor :logger
# @!attribute [rw] settings
# @return [Array] settings hash provided by Sensu.
attr_accessor :settings
# Initialize the extension, call post_init() when the
# eventmachine reactor starts up, stop() when it stops.
def initialize
EM.next_tick do
post_init
end
EM.add_shutdown_hook do
stop
end
end
# Override this method to set the extension's name.
def name
"base"
end
# Override this method to set the extension's description.
def description
"extension description (change me)"
end
# Override this method to change the extension's definition, a
# hash. You probably don't need to touch this. The hash must
# contain :type ("extension") and :name.
def definition
{
:type => "extension",
:name => name
}
end
# Override this method to do something immediately after the
# eventmachine reactor is started. This method is great for
# setting up connections etc.
def post_init
true
end
# Override this method to do something when the extension is
# run, you must yield or call the callback with two parameters,
# an output string and exit code.
#
# @param data [Object, nil] provided by Sensu.
# @param options [Hash] provided by Sensu, may contain
# connection objects, eg. redis.
# @param callback [Proc] provided by Sensu, expecting to be
# called with two parameters, an output string and exit code.
def run(data=nil, options={}, &callback)
callback.call("noop", 0)
end
# Override this method to do something when the eventmachine
# reactor stops, such as connection or file cleanup.
def stop
true
end
# Retrieve the definition object corresponding to a key, acting
# like a Hash object. Do not override this method!
#
# @param key [String, Symbol]
# @return [Object] value for key.
def [](key)
definition[key.to_sym]
end
# Check to see if the definition has a key. Do not override this
# method!
#
# @param key [String, Symbol]
# @return [TrueClass, FalseClass]
def has_key?(key)
definition.has_key?(key.to_sym)
end
# Run the extension with a few safeties. This method wraps
# run() with a begin;rescue, and duplicates data before passing
# it to ensure the extension doesn't mutate the original. Do
# not override this method!
#
# @param data [Object, nil) to dup() and pass to run(), if run()
# has an absolue arity of 1 or more.
# @param options [Hash] to pass to run(), if run() has an
# absolute arity of 2.
# @param callback [Proc] to pass to run().
def safe_run(data=nil, options={}, &callback)
begin
@run_arity ||= method(:run).arity.abs
arguments = []
arguments << (data ? data.dup : data) if @run_arity >= 1
arguments << options if @run_arity == 2
run(*arguments, &callback)
rescue => error
klass = error.class.name
backtrace = error.backtrace.map { |line| "\s\s#{line}" }.join("\n")
callback.call("#{klass}: #{error}\n#{backtrace}", 2)
end
end
# Determine classes that have inherited this class, used by the
# extension loader. Do not override this method!
#
# @return [Array<object>]
def self.descendants
ObjectSpace.each_object(Class).select do |klass|
klass < self
end
end
end
# Create an extension class for each category from Base.
CATEGORIES.each do |category|
extension_type = category.to_s.chop
Sensu::Extension.const_set(extension_type.capitalize, Class.new(Base))
end
end
end
| 31.530303 | 77 | 0.606439 |
3902a314785cdcfae7c68b250278c5b98d8f4a93 | 7,185 | ######################################
# Test databases support
######################################
require 'test_helper'
class DatabasesTest < Minitest::Test
def test_database
# PATHs options hashes
setup_databases
db_path = File.join(OUTPUT_PATH,'DB')
svn_call = IO.popen("svn ls https://github.com/rafnunser/seqtrimbb-databases/trunk/fastas | egrep '/'")
databases = svn_call.read.split(/\n/).map! { |db| db.chomp!('/') }
svn_call.close
json = File.join(db_path,'status_info','databases_status_info.json')
bb_path = BBPATH
bbtools = BBtools.new(bb_path)
## Initialize databases object
stbb_db = DatabasesSupportHandler.new({:workers => 1},db_path,bbtools)
# Init internal support
stbb_db.init_internal({:databases_action => 'replace', :databases_list => Array.new})
# Test info (databases, directory, and modified)
assert_equal(stbb_db.info['databases'],databases)
assert_equal(stbb_db.info['dir'],db_path)
assert_equal(stbb_db.info['modified'],true)
# Maintenance (test check)
stbb_db.maintenance_internal({:check_db => true})
# Test structure
['indices','status_info'].map { |d| assert_equal(Dir.exist?(File.join(db_path,d)),true) }
# Test update
databases.map { |d| assert_equal(Dir.exist?(File.join(db_path,'indices',d,'ref')),true) }
# Test info
assert_equal(stbb_db.info['indexed_databases'],databases)
assert_equal(stbb_db.info['installed_databases'],databases)
assert_equal(stbb_db.info['obsolete_databases'],Array.new)
random_database = databases.sample
random_info = {}
random_info['path'] = File.join(db_path,'fastas',random_database)
random_info['index'] = File.join(db_path,'indices',random_database)
random_info['update_error_file'] = File.join(db_path,'status_info','update_stderror_'+random_database+'.txt')
random_info['name'] = random_database
random_info['fastas'] = Dir[File.join(random_info['path'],"*.fasta*")].sort
random_info['list'] = random_info['fastas'].map { |fasta| File.basename(fasta).sub(/\Wfasta(\Wgz)?/,'').sub(/_/,' ') }
random_info['size'] = random_info['fastas'].map { |file| File.size?(file) }.inject(:+)
random_info['index_size'] = Dir[File.join(random_info['index'],'ref',"*/*/*")].map { |file| File.size?(file) }.inject(:+)
assert_equal(stbb_db.info[random_database],random_info)
# Test Save
json =
stbb_db.save_json(stbb_db.info,json)
old_info = stbb_db.info
old_info.delete('modified')
# Reinit internal support and test changes
stbb_db = DatabasesSupportHandler.new({:workers => 1},db_path,bbtools)
stbb_db.init_internal({:databases_action => 'replace', :databases_list => Array.new})
stbb_db.maintenance_internal({:check_db => true})
assert_equal(stbb_db.info,old_info)
stbb_db = DatabasesSupportHandler.new({:workers => 1},db_path,bbtools)
arr_sample = databases.sample(4)
stbb_db.init_internal({:databases_action => 'replace', :databases_list => arr_sample})
assert_equal(stbb_db.info['databases'].sort,arr_sample.sort)
stbb_db.init_internal({:databases_action => 'add', :databases_list => databases - arr_sample})
assert_equal(stbb_db.info['databases'].sort,databases.sort)
stbb_db.init_internal({:databases_action => 'remove', :databases_list => databases - arr_sample})
assert_equal(stbb_db.info['databases'].sort,arr_sample.sort)
stbb_db.init_internal({:databases_action => 'replace', :databases_list => ['default']})
assert_equal(stbb_db.info['databases'].sort,databases.sort)
# Init external support
## build two external databases(One folder, one file)
sample_file =random_info['fastas'].sample
ext_dbs = [File.join(db_path,'external_database'),File.join(db_path,File.basename(sample_file))]
FileUtils.cp sample_file,db_path
Dir.mkdir(ext_dbs[0])
FileUtils.cp Dir[File.join(db_path,'fastas',databases.sample,"*.fasta*")],ext_dbs[0]
# Set the external databases
stbb_db.set_external(ext_dbs)
assert_equal(stbb_db.external_db_info['databases'],ext_dbs)
# Maintenance external
stbb_db.maintenance_external(ext_dbs)
assert_equal(stbb_db.external_db_info['indexed_databases'],ext_dbs)
assert_equal(stbb_db.external_db_info['obsolete_databases'],Array.new)
folder_database = ext_dbs[0]
folder_info = {}
folder_info['name'] = File.basename(ext_dbs[0]).gsub(/\Wfasta(\Wgz)?/,'')
folder_info['path'] = folder_database
folder_info['index'] = File.join(folder_database,'index')
folder_info['update_error_file'] = File.join(folder_info['index'],'update_stderror_'+folder_info['name']+'.txt')
folder_info['fastas'] = Dir[File.join(folder_info['path'],"*.fasta*")].sort
folder_info['list'] = folder_info['fastas'].map { |fasta| File.basename(fasta).sub(/\Wfasta(\Wgz)?/,'').sub(/_/,' ') }
folder_info['size'] = folder_info['fastas'].map { |file| File.size?(file) }.inject(:+)
folder_info['index_size'] = Dir[File.join(folder_info['index'],'ref',"*/*/*")].map { |file| File.size?(file) }.inject(:+)
assert_equal(stbb_db.external_db_info[folder_database],folder_info)
file_database = ext_dbs[1]
file_info = {}
file_info['name'] = File.basename(ext_dbs[1]).gsub(/\Wfasta(\Wgz)?/,'')
file_info['path'] = file_database
file_info['index'] = File.join(File.dirname(file_database),'index')
file_info['update_error_file'] = File.join(file_info['index'],'update_stderror_'+file_info['name']+'.txt')
file_info['fastas'] = [file_database]
file_info['list'] = file_info['fastas'].map { |fasta| File.basename(fasta).sub(/\Wfasta(\Wgz)?/,'').sub(/_/,' ') }
file_info['size'] = file_info['fastas'].map { |file| File.size?(file) }.inject(:+)
file_info['index_size'] = Dir[File.join(file_info['index'],'ref',"*/*/*")].map { |file| File.size?(file) }.inject(:+)
assert_equal(stbb_db.external_db_info[file_database],file_info)
stbb_db.maintenance_external(ext_dbs)
#CLEAN UP
clean_up
end
end
| 62.478261 | 138 | 0.578427 |
6103d66b6240cca9a421b40dd3c6d69af2e9ebfc | 190 | require 'cmor/tags/configuration'
require 'cmor/tags/engine'
module Cmor
module Tags
extend Configuration
end
end
Cmor.configure { |c| c.register_configuration(:tags, Cmor::Tags) } | 19 | 66 | 0.752632 |
792505d0c6b3b2a2db8a6f412b608c66e803b17f | 1,355 | #
# Be sure to run `pod lib lint XibView.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'XibView'
s.version = '0.5.0'
s.summary = 'Live-rendering views from .xib files.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/hot3eed/XibView'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'Apache 2.0', :file => 'LICENSE' }
s.author = { 'Abdelrahman Eid' => '[email protected]' }
s.source = { :git => 'https://github.com/hot3eed/XibView.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.source_files = 'XibView/Classes/**/*'
s.swift_versions = '5.0', '4.2', '4.0'
end
| 37.638889 | 99 | 0.63321 |
26ba290a0e439fa7a524f5b881e45bfebb4ebba2 | 2,063 | class ImageFileUploader < CarrierWave::Uploader::Base
# Include RMagick or MiniMagick support:
# include CarrierWave::RMagick
include CarrierWave::MiniMagick
# Choose what kind of storage to use for this uploader:
storage :file
# storage :fog
# Override the directory where uploaded files will be stored.
# This is a sensible default for uploaders that are meant to be mounted:
def store_dir
"uploads/images"
end
# Provide a default URL as a default if there hasn't been a file uploaded:
# def default_url(*args)
# # For Rails 3.1+ asset pipeline compatibility:
# # ActionController::Base.helpers.asset_path("fallback/" + [version_name, "default.png"].compact.join('_'))
#
# "/images/fallback/" + [version_name, "default.png"].compact.join('_')
# end
# Process files as they are uploaded:
# process scale: [200, 300]
#
# def scale(width, height)
# # do something
# end
process :store_dimensions
# Create different versions of your uploaded files:
version :thumb do
process resize_to_fit_png_scale: [IceBlog::config(:thumb_width),
IceBlog::config(:thumb_height)]
end
version :large_thumb do
process resize_to_fit_png_scale: [IceBlog::config(:large_thumb_width),
IceBlog::config(:large_thumb_height)]
end
# Add a white list of extensions which are allowed to be uploaded.
# For images you might use something like this:
def extension_whitelist
%w(jpg jpeg gif png)
end
# Override the filename of the uploaded files:
# Avoid using model.id or version_name here, see uploader/store.rb for details.
def filename
"#{secure_token(12)}.#{file.extension}" if original_filename.present?
end
protected
def secure_token(length=16)
if model.name
model.name
else
token = ''
loop do
token = SecureRandom.hex(length/2)
break if ImageFile.where(name: token).count == 0
end
model.name = token
end
end
end
| 29.471429 | 112 | 0.667475 |
79daa2993cd8f655e9a829549eae45f33271b4d2 | 182 | require 'test_helper'
class AggregateControllerTest < ActionDispatch::IntegrationTest
test "should get show" do
get aggregate_show_url
assert_response :success
end
end
| 18.2 | 63 | 0.785714 |
61e04faca8d6377c6ba5add8a8cd2030d4c6e816 | 3,163 | # -*- encoding: us-ascii -*-
describe :string_succ, shared: true do
it "returns an empty string for empty strings" do
"".send(@method).should == ""
end
it "returns the successor by increasing the rightmost alphanumeric (digit => digit, letter => letter with same case)" do
"abcd".send(@method).should == "abce"
"THX1138".send(@method).should == "THX1139"
"<<koala>>".send(@method).should == "<<koalb>>"
"==A??".send(@method).should == "==B??"
end
it "increases non-alphanumerics (via ascii rules) if there are no alphanumerics" do
"***".send(@method).should == "**+"
"**`".send(@method).should == "**a"
end
it "increases the next best alphanumeric (jumping over non-alphanumerics) if there is a carry" do
"dz".send(@method).should == "ea"
"HZ".send(@method).should == "IA"
"49".send(@method).should == "50"
"izz".send(@method).should == "jaa"
"IZZ".send(@method).should == "JAA"
"699".send(@method).should == "700"
"6Z99z99Z".send(@method).should == "7A00a00A"
"1999zzz".send(@method).should == "2000aaa"
"NZ/[]ZZZ9999".send(@method).should == "OA/[]AAA0000"
end
it "increases the next best character if there is a carry for non-alphanumerics" do
"(\xFF".send(@method).should == ")\x00"
"`\xFF".send(@method).should == "a\x00"
"<\xFF\xFF".send(@method).should == "=\x00\x00"
end
it "adds an additional character (just left to the last increased one) if there is a carry and no character left to increase" do
"z".send(@method).should == "aa"
"Z".send(@method).should == "AA"
"9".send(@method).should == "10"
"zz".send(@method).should == "aaa"
"ZZ".send(@method).should == "AAA"
"99".send(@method).should == "100"
"9Z99z99Z".send(@method).should == "10A00a00A"
"ZZZ9999".send(@method).should == "AAAA0000"
"/[]9999".send(@method).should == "/[]10000"
"/[]ZZZ9999".send(@method).should == "/[]AAAA0000"
"Z/[]ZZZ9999".send(@method).should == "AA/[]AAA0000"
# non-alphanumeric cases
"\xFF".send(@method).should == "\x01\x00"
"\xFF\xFF".send(@method).should == "\x01\x00\x00"
end
it "returns subclass instances when called on a subclass" do
StringSpecs::MyString.new("").send(@method).should be_an_instance_of(StringSpecs::MyString)
StringSpecs::MyString.new("a").send(@method).should be_an_instance_of(StringSpecs::MyString)
StringSpecs::MyString.new("z").send(@method).should be_an_instance_of(StringSpecs::MyString)
end
it "taints the result if self is tainted" do
["", "a", "z", "Z", "9", "\xFF", "\xFF\xFF"].each do |s|
s.taint.send(@method).tainted?.should == true
end
end
end
describe :string_succ_bang, shared: true do
it "is equivalent to succ, but modifies self in place (still returns self)" do
["", "abcd", "THX1138"].each do |s|
r = s.dup.send(@method)
s.send(@method).should equal(s)
s.should == r
end
end
it "raises a RuntimeError if self is frozen" do
lambda { "".freeze.send(@method) }.should raise_error(RuntimeError)
lambda { "abcd".freeze.send(@method) }.should raise_error(RuntimeError)
end
end
| 35.539326 | 130 | 0.628201 |
ab825c6c63fb5a1643b8f1cb5d46244cf1575853 | 333 | # frozen_string_literal: true
require 'test_helper'
class JobApiTest < ActiveSupport::TestCase
it 'should get the jobs for a given project_id' do
VCR.use_cassette('project_jobs', match_requests_on: [:path]) do
api = JobApi.new(id: 1, page: 1).fetch
assert JSON.parse(api)['entries'].length < 20
end
end
end
| 25.615385 | 67 | 0.705706 |
acfca8394671a35f90f47356d3d266e85201d0ac | 11,251 | # encoding: UTF-8
# This file contains data derived from the IANA Time Zone Database
# (http://www.iana.org/time-zones).
module TZInfo
module Data
module Definitions
module Africa
module Ceuta
include TimezoneDefinition
timezone 'Africa/Ceuta' do |tz|
tz.offset :o0, -1276, 0, :LMT
tz.offset :o1, 0, 0, :WET
tz.offset :o2, 0, 3600, :WEST
tz.offset :o3, 3600, 0, :CET
tz.offset :o4, 3600, 3600, :CEST
tz.transition 1901, 1, :o1, -2177452800, 4830771, 2
tz.transition 1918, 5, :o2, -1630112400, 58121291, 24
tz.transition 1918, 10, :o1, -1616810400, 29062493, 12
tz.transition 1924, 4, :o2, -1442451600, 58173419, 24
tz.transition 1924, 10, :o1, -1427673600, 4848127, 2
tz.transition 1926, 4, :o2, -1379293200, 58190963, 24
tz.transition 1926, 10, :o1, -1364774400, 4849583, 2
tz.transition 1927, 4, :o2, -1348448400, 58199531, 24
tz.transition 1927, 10, :o1, -1333324800, 4850311, 2
tz.transition 1928, 4, :o2, -1316390400, 4850703, 2
tz.transition 1928, 10, :o1, -1301270400, 4851053, 2
tz.transition 1967, 6, :o2, -81432000, 2439645, 1
tz.transition 1967, 9, :o1, -71110800, 58554347, 24
tz.transition 1974, 6, :o2, 141264000
tz.transition 1974, 8, :o1, 147222000
tz.transition 1976, 5, :o2, 199756800
tz.transition 1976, 7, :o1, 207702000
tz.transition 1977, 5, :o2, 231292800
tz.transition 1977, 9, :o1, 244249200
tz.transition 1978, 6, :o2, 265507200
tz.transition 1978, 8, :o1, 271033200
tz.transition 1984, 3, :o3, 448243200
tz.transition 1986, 3, :o4, 512528400
tz.transition 1986, 9, :o3, 528253200
tz.transition 1987, 3, :o4, 543978000
tz.transition 1987, 9, :o3, 559702800
tz.transition 1988, 3, :o4, 575427600
tz.transition 1988, 9, :o3, 591152400
tz.transition 1989, 3, :o4, 606877200
tz.transition 1989, 9, :o3, 622602000
tz.transition 1990, 3, :o4, 638326800
tz.transition 1990, 9, :o3, 654656400
tz.transition 1991, 3, :o4, 670381200
tz.transition 1991, 9, :o3, 686106000
tz.transition 1992, 3, :o4, 701830800
tz.transition 1992, 9, :o3, 717555600
tz.transition 1993, 3, :o4, 733280400
tz.transition 1993, 9, :o3, 749005200
tz.transition 1994, 3, :o4, 764730000
tz.transition 1994, 9, :o3, 780454800
tz.transition 1995, 3, :o4, 796179600
tz.transition 1995, 9, :o3, 811904400
tz.transition 1996, 3, :o4, 828234000
tz.transition 1996, 10, :o3, 846378000
tz.transition 1997, 3, :o4, 859683600
tz.transition 1997, 10, :o3, 877827600
tz.transition 1998, 3, :o4, 891133200
tz.transition 1998, 10, :o3, 909277200
tz.transition 1999, 3, :o4, 922582800
tz.transition 1999, 10, :o3, 941331600
tz.transition 2000, 3, :o4, 954032400
tz.transition 2000, 10, :o3, 972781200
tz.transition 2001, 3, :o4, 985482000
tz.transition 2001, 10, :o3, 1004230800
tz.transition 2002, 3, :o4, 1017536400
tz.transition 2002, 10, :o3, 1035680400
tz.transition 2003, 3, :o4, 1048986000
tz.transition 2003, 10, :o3, 1067130000
tz.transition 2004, 3, :o4, 1080435600
tz.transition 2004, 10, :o3, 1099184400
tz.transition 2005, 3, :o4, 1111885200
tz.transition 2005, 10, :o3, 1130634000
tz.transition 2006, 3, :o4, 1143334800
tz.transition 2006, 10, :o3, 1162083600
tz.transition 2007, 3, :o4, 1174784400
tz.transition 2007, 10, :o3, 1193533200
tz.transition 2008, 3, :o4, 1206838800
tz.transition 2008, 10, :o3, 1224982800
tz.transition 2009, 3, :o4, 1238288400
tz.transition 2009, 10, :o3, 1256432400
tz.transition 2010, 3, :o4, 1269738000
tz.transition 2010, 10, :o3, 1288486800
tz.transition 2011, 3, :o4, 1301187600
tz.transition 2011, 10, :o3, 1319936400
tz.transition 2012, 3, :o4, 1332637200
tz.transition 2012, 10, :o3, 1351386000
tz.transition 2013, 3, :o4, 1364691600
tz.transition 2013, 10, :o3, 1382835600
tz.transition 2014, 3, :o4, 1396141200
tz.transition 2014, 10, :o3, 1414285200
tz.transition 2015, 3, :o4, 1427590800
tz.transition 2015, 10, :o3, 1445734800
tz.transition 2016, 3, :o4, 1459040400
tz.transition 2016, 10, :o3, 1477789200
tz.transition 2017, 3, :o4, 1490490000
tz.transition 2017, 10, :o3, 1509238800
tz.transition 2018, 3, :o4, 1521939600
tz.transition 2018, 10, :o3, 1540688400
tz.transition 2019, 3, :o4, 1553994000
tz.transition 2019, 10, :o3, 1572138000
tz.transition 2020, 3, :o4, 1585443600
tz.transition 2020, 10, :o3, 1603587600
tz.transition 2021, 3, :o4, 1616893200
tz.transition 2021, 10, :o3, 1635642000
tz.transition 2022, 3, :o4, 1648342800
tz.transition 2022, 10, :o3, 1667091600
tz.transition 2023, 3, :o4, 1679792400
tz.transition 2023, 10, :o3, 1698541200
tz.transition 2024, 3, :o4, 1711846800
tz.transition 2024, 10, :o3, 1729990800
tz.transition 2025, 3, :o4, 1743296400
tz.transition 2025, 10, :o3, 1761440400
tz.transition 2026, 3, :o4, 1774746000
tz.transition 2026, 10, :o3, 1792890000
tz.transition 2027, 3, :o4, 1806195600
tz.transition 2027, 10, :o3, 1824944400
tz.transition 2028, 3, :o4, 1837645200
tz.transition 2028, 10, :o3, 1856394000
tz.transition 2029, 3, :o4, 1869094800
tz.transition 2029, 10, :o3, 1887843600
tz.transition 2030, 3, :o4, 1901149200
tz.transition 2030, 10, :o3, 1919293200
tz.transition 2031, 3, :o4, 1932598800
tz.transition 2031, 10, :o3, 1950742800
tz.transition 2032, 3, :o4, 1964048400
tz.transition 2032, 10, :o3, 1982797200
tz.transition 2033, 3, :o4, 1995498000
tz.transition 2033, 10, :o3, 2014246800
tz.transition 2034, 3, :o4, 2026947600
tz.transition 2034, 10, :o3, 2045696400
tz.transition 2035, 3, :o4, 2058397200
tz.transition 2035, 10, :o3, 2077146000
tz.transition 2036, 3, :o4, 2090451600
tz.transition 2036, 10, :o3, 2108595600
tz.transition 2037, 3, :o4, 2121901200
tz.transition 2037, 10, :o3, 2140045200
tz.transition 2038, 3, :o4, 2153350800, 59172253, 24
tz.transition 2038, 10, :o3, 2172099600, 59177461, 24
tz.transition 2039, 3, :o4, 2184800400, 59180989, 24
tz.transition 2039, 10, :o3, 2203549200, 59186197, 24
tz.transition 2040, 3, :o4, 2216250000, 59189725, 24
tz.transition 2040, 10, :o3, 2234998800, 59194933, 24
tz.transition 2041, 3, :o4, 2248304400, 59198629, 24
tz.transition 2041, 10, :o3, 2266448400, 59203669, 24
tz.transition 2042, 3, :o4, 2279754000, 59207365, 24
tz.transition 2042, 10, :o3, 2297898000, 59212405, 24
tz.transition 2043, 3, :o4, 2311203600, 59216101, 24
tz.transition 2043, 10, :o3, 2329347600, 59221141, 24
tz.transition 2044, 3, :o4, 2342653200, 59224837, 24
tz.transition 2044, 10, :o3, 2361402000, 59230045, 24
tz.transition 2045, 3, :o4, 2374102800, 59233573, 24
tz.transition 2045, 10, :o3, 2392851600, 59238781, 24
tz.transition 2046, 3, :o4, 2405552400, 59242309, 24
tz.transition 2046, 10, :o3, 2424301200, 59247517, 24
tz.transition 2047, 3, :o4, 2437606800, 59251213, 24
tz.transition 2047, 10, :o3, 2455750800, 59256253, 24
tz.transition 2048, 3, :o4, 2469056400, 59259949, 24
tz.transition 2048, 10, :o3, 2487200400, 59264989, 24
tz.transition 2049, 3, :o4, 2500506000, 59268685, 24
tz.transition 2049, 10, :o3, 2519254800, 59273893, 24
tz.transition 2050, 3, :o4, 2531955600, 59277421, 24
tz.transition 2050, 10, :o3, 2550704400, 59282629, 24
tz.transition 2051, 3, :o4, 2563405200, 59286157, 24
tz.transition 2051, 10, :o3, 2582154000, 59291365, 24
tz.transition 2052, 3, :o4, 2595459600, 59295061, 24
tz.transition 2052, 10, :o3, 2613603600, 59300101, 24
tz.transition 2053, 3, :o4, 2626909200, 59303797, 24
tz.transition 2053, 10, :o3, 2645053200, 59308837, 24
tz.transition 2054, 3, :o4, 2658358800, 59312533, 24
tz.transition 2054, 10, :o3, 2676502800, 59317573, 24
tz.transition 2055, 3, :o4, 2689808400, 59321269, 24
tz.transition 2055, 10, :o3, 2708557200, 59326477, 24
tz.transition 2056, 3, :o4, 2721258000, 59330005, 24
tz.transition 2056, 10, :o3, 2740006800, 59335213, 24
tz.transition 2057, 3, :o4, 2752707600, 59338741, 24
tz.transition 2057, 10, :o3, 2771456400, 59343949, 24
tz.transition 2058, 3, :o4, 2784762000, 59347645, 24
tz.transition 2058, 10, :o3, 2802906000, 59352685, 24
tz.transition 2059, 3, :o4, 2816211600, 59356381, 24
tz.transition 2059, 10, :o3, 2834355600, 59361421, 24
tz.transition 2060, 3, :o4, 2847661200, 59365117, 24
tz.transition 2060, 10, :o3, 2866410000, 59370325, 24
tz.transition 2061, 3, :o4, 2879110800, 59373853, 24
tz.transition 2061, 10, :o3, 2897859600, 59379061, 24
tz.transition 2062, 3, :o4, 2910560400, 59382589, 24
tz.transition 2062, 10, :o3, 2929309200, 59387797, 24
tz.transition 2063, 3, :o4, 2942010000, 59391325, 24
tz.transition 2063, 10, :o3, 2960758800, 59396533, 24
tz.transition 2064, 3, :o4, 2974064400, 59400229, 24
tz.transition 2064, 10, :o3, 2992208400, 59405269, 24
tz.transition 2065, 3, :o4, 3005514000, 59408965, 24
tz.transition 2065, 10, :o3, 3023658000, 59414005, 24
tz.transition 2066, 3, :o4, 3036963600, 59417701, 24
tz.transition 2066, 10, :o3, 3055712400, 59422909, 24
tz.transition 2067, 3, :o4, 3068413200, 59426437, 24
tz.transition 2067, 10, :o3, 3087162000, 59431645, 24
tz.transition 2068, 3, :o4, 3099862800, 59435173, 24
tz.transition 2068, 10, :o3, 3118611600, 59440381, 24
end
end
end
end
end
end
| 52.574766 | 66 | 0.580482 |
f76f2108da0556a7a67c4a5597d29d5b084f82e2 | 327 | if Rails.env.production?
CarrierWave.configure do |config|
config.fog_credentials = {
:provider => 'AWS',
:region => ENV['S3_REGION'],
:aws_access_key_id => ENV['S3_ACCESS_KEY'],
:aws_secret_access_key => ENV['S3_SECRET_KEY']
}
config.fog_directory = ENV['S3_BUCKET']
end
end
| 27.25 | 50 | 0.626911 |
032e7f07d8d75bdacbd146001ec06870171568a7 | 216 | class AddAttachmentSnippetToQuotes < ActiveRecord::Migration
def self.up
change_table :quotes do |t|
t.attachment :snippet
end
end
def self.down
remove_attachment :quotes, :snippet
end
end
| 18 | 60 | 0.717593 |
1a44c9a94009898b1744c735a6b19af1eb72b208 | 288 |
class RedcasePatchJournal < Rails.version < '5.1' ? ActiveRecord::Migration : ActiveRecord::Migration[4.2]
def self.up
add_column :execution_journals, :executor_id, :integer, :null => false
end
def self.down
remove_column :execution_journals, :executor_id
end
end
| 22.153846 | 106 | 0.722222 |
2197733b2b2a04682413de54cb6bffe918c9b97e | 3,922 | module GitlabCtl
class Backup
attr_reader :etc_backup_path, :etc_path, :backup_keep_time, :remove_timestamp
def initialize(options = {})
backup_path = options[:backup_path].nil? ? '/etc/gitlab/config_backup' : options[:backup_path]
@etc_backup_path = File.expand_path(backup_path)
@etc_path = '/etc/gitlab'
@backup_keep_time = node_attributes.dig('gitlab', 'gitlab-rails', 'backup_keep_time').to_i
@remove_timestamp = Time.now - @backup_keep_time
@delete_old_backups = options[:delete_old_backups]
@removable_archives = []
end
# attribute methods
def archive_path
@archive_path = File.join(etc_backup_path, archive_name)
end
def archive_name
@archive_name ||= "gitlab_config_#{Time.now.strftime('%s_%Y_%m_%d')}.tar"
end
def node_attributes
@node_attributes ||= GitlabCtl::Util.get_node_attributes
rescue GitlabCtl::Errors::NodeError => e
warn(e.message)
warn("Defaulting to keeping all backups")
{}
end
def wants_pruned
@delete_old_backups.nil? ? true : @delete_old_backups
end
def removable_archives
return @removable_archives unless @removable_archives.empty?
Dir.chdir(@etc_backup_path) do
Dir.glob("gitlab_config_*.tar").map do |file_name|
next unless file_name =~ %r{gitlab_config_(\d{10})_(\d{4}_\d{2}_\d{2}).tar}
file_timestamp = Regexp.last_match(1).to_i
next if @backup_keep_time.zero?
next if Time.at(file_timestamp) >= @remove_timestamp
file_path = File.expand_path(file_name, @etc_backup_path)
@removable_archives.push(file_path)
end
end
@removable_archives
end
# class methods
def self.perform(options = {})
backup = new(options)
backup.perform
backup.prune
end
def prune
if wants_pruned && backup_keep_time.positive?
remove_backups
else
puts "Keeping all older configuration backups"
end
end
def perform(options = {})
abort "Could not find '#{etc_path}' directory. Is your package installed correctly?" unless File.exist?(etc_path)
unless File.exist?(etc_backup_path)
puts "Could not find '#{etc_backup_path}' directory. Creating."
FileUtils.mkdir(etc_backup_path, mode: 0700)
begin
FileUtils.chown('root', 'root', etc_backup_path)
rescue Errno::EPERM
warn("Warning: Could not change owner of #{etc_backup_path} to 'root:root'. As a result your " \
'backups may be accessible to some non-root users.')
end
end
warn("WARNING: #{etc_backup_path} may be read by non-root users") unless secure?(etc_backup_path)
puts "Running configuration backup\nCreating configuration backup archive: #{archive_name}"
command = %W(tar --absolute-names --dereference --verbose --create --file #{archive_path}
--exclude #{etc_backup_path} -- #{etc_path})
status = system(*command)
FileUtils.chmod(0600, archive_path) if File.exist?(archive_path)
exit!(1) unless status
puts "Configuration backup archive complete: #{archive_path}"
end
def remove_backups
# delete old backups
removed_count = 0
puts "Removing configuration backups older than #{@remove_timestamp} ..."
removable_archives.each do |archive_file|
FileUtils.rm(archive_file)
puts " Removed #{archive_file}"
removed_count += 1
rescue StandardError => e
warn("WARNING: Deleting file #{archive_file} failed: #{e.message}")
end
puts "done. Removed #{removed_count} older configuration backups."
end
def secure?(path)
stat_data = File.stat(path)
return false if stat_data.uid != 0
return false unless stat_data.world_readable?.nil?
true
end
end
end
| 31.376 | 119 | 0.657318 |
283eca72b605dbcaa4c1c780e3e6c2130728c7db | 534 | group 'docker' do
action :create
end
docker_service 'default' do
action [:create, :start]
end
execute 'install_docker_compose' do
command 'curl -L "https://github.com/docker/compose/releases/download/1.24.1/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose && chmod +x /usr/local/bin/docker-compose && /usr/local/bin/docker-compose version'
live_stream true
not_if { ::File.exist?('/usr/local/bin/docker-compose') }
end
execute 'docker_post_actions' do
command 'usermod -aG docker preludian'
end
| 29.666667 | 235 | 0.73221 |
6a4f50acaf7b03b4991c2e4c1a8cf2cfa7a2d90a | 823 | require "seqtrim_action"
########################################################
# Author: Almudena Bocinos Rioboo
#
# Defines the main methods that are necessary to execute PluginPairedReads
# Inherit: Plugin
########################################################
class ActionPairedReads < SeqtrimAction
def initialize(start_pos,end_pos)
super(start_pos,end_pos)
@cut =true
end
def apply_to(seq)
$LOG.debug "Applying #{self.class}"
#Storage the first and second subsequences
subseq1 = seq.seq_fasta[0,@start_pos-1]
subseq2 = seq.seq_fasta[@end_pos+1,seq.seq_fasta.length-1]
#$LOG.debug "\nSubsequence left: #{subseq1} \n Subsequence right: #{subseq2}}"
end
end
| 28.37931 | 127 | 0.523694 |
39927212588690c37a7f3cd1ced7ba316eada0dc | 2,341 | class Expense < ApplicationRecord
belongs_to :user
belongs_to :category
validates :exp_amount, presence: true
validates :exp_amount, numericality: true
validates :category_name, presence: true
validates :payee, presence: true
validate :exp_date_valid
def exp_date_valid
if exp_date.nil? || exp_date_format_not_valid?
errors.add(:exp_date, "Cannot be blank and must be provided in yyyy-mm-dd format and must be a valid date.")
end
end
def exp_date_format_not_valid?
if exp_date
y, m, d = exp_date.to_s.split('-')
!(Date.valid_date?(y.to_i, m.to_i, d.to_i))
end
end
#used in expense creation
def category_name=(name)
self.category = Category.find_or_create_by(name: name)
self.save
end
def category_name
self.category ? self.category.name : nil
end
#used in expense creation form
def category_new=(new_category)
empty = new_category[:name].nil? || new_category[:name].blank?
if !empty
category = Category.find_or_create_by(name: new_category[:name])
self.category = category
end
end
#filter expenses by category
def self.by_category(category_id)
where(category: category_id).order("exp_date DESC")
end
#filter expenses to just today's
def self.from_today
where("exp_date =?", Date.today)
end
#filter results to just this month's
def self.from_this_month
month_number = Date.today.month
month_beginning = Date.new(Date.today.year, month_number)
month_ending = month_beginning.end_of_month
where("exp_date <= ? AND exp_date >= ?", month_ending, month_beginning).order("exp_date DESC")
end
#sum all provided expenses
def self.sum_total
self.sum("exp_amount")
end
def self.last_five
self.all.order("exp_date DESC").limit(5)
end
def self.largest
self.order("exp_amount DESC").limit(1)
end
def self.sum_by_specific_category(category_id)
where("category_id =?", category_id).group("category_id").sum("exp_amount").values[0]
end
def self.sum_by_specific_category
# returns hash of category_id, sum of expenses key, pairs.
group("category_id").sum("exp_amount")
end
def self.category_w_largest_expense_sum
#returns hash of category id with sum of expenses
group("category_id").sum("exp_amount").max_by{|k,v| v}
end
end
| 25.445652 | 115 | 0.710807 |
03a78da86dcbed33a502e37d587183acc0e17cb5 | 2,568 | # coding:utf-8
require_relative '../test_helper'
require 'ostruct'
module SmartAnswer
class DateQuestionTest < ActiveSupport::TestCase
def setup
@initial_state = State.new(:example)
end
test "dates are parsed from hash form before being saved" do
q = Question::Date.new(:example) do
save_input_as :date
next_node :done
end
new_state = q.transition(@initial_state, {year: "2011", month: '2', day: '1'})
assert_equal '2011-02-01', new_state.date
end
test "incomplete dates raise an error" do
q = Question::Date.new(:example) do
save_input_as :date
next_node :done
end
assert_raise SmartAnswer::InvalidResponse do
q.transition(@initial_state, {year: "", month: '2', day: '1'})
end
end
test "define allowable range of dates" do
q = Question::Date.new(:example) do
save_input_as :date
next_node :done
from { Date.parse('2011-01-01') }
to { Date.parse('2011-01-03') }
end
assert_equal ::Date.parse('2011-01-01')..::Date.parse('2011-01-03'), q.range
end
test "define default date" do
q = Question::Date.new(:example) do
default { Date.today }
end
assert_equal Date.today, q.default
end
test "define default day" do
q = Question::Date.new(:example) do
default_day 11
end
assert_equal 11, q.default_day
end
test "define default month" do
q = Question::Date.new(:example) do
default_month 2
end
assert_equal 2, q.default_month
end
test "define default year" do
q = Question::Date.new(:example) do
default_year 2013
end
assert_equal 2013, q.default_year
end
test "incomplete dates are accepted if appropriate defaults are defined" do
q = Question::Date.new(:example) do
default_day 11
default_month 2
default_year 2013
save_input_as :date
next_node :done
end
new_state = q.transition(@initial_state, {year: "", month: "", day: ""})
assert_equal '2013-02-11', new_state.date
end
test "default the day to the last in the month of an incomplete date" do
q = Question::Date.new(:example) do
default_day -1
save_input_as :date
next_node :done
end
incomplete_date = {year: "2013", month: "2", day: ""}
new_state = q.transition(@initial_state, incomplete_date)
assert_equal '2013-02-28', new_state.date
end
end
end
| 26.474227 | 84 | 0.616044 |
91b6c2335f96c172877a7d9956fcae18c0283592 | 7,778 | =begin
#Datadog API V1 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License.
This product includes software developed at Datadog (https://www.datadoghq.com/).
Copyright 2020-Present Datadog, Inc.
=end
require 'date'
require 'time'
module DatadogAPIClient::V1
# Response with Host information from Datadog.
class HostListResponse
# whether the object has unparsed attributes
attr_accessor :_unparsed
# Array of hosts.
attr_accessor :host_list
# Number of host matching the query.
attr_accessor :total_matching
# Number of host returned.
attr_accessor :total_returned
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'host_list' => :'host_list',
:'total_matching' => :'total_matching',
:'total_returned' => :'total_returned'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'host_list' => :'Array<Host>',
:'total_matching' => :'Integer',
:'total_returned' => :'Integer'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V1::HostListResponse` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `DatadogAPIClient::V1::HostListResponse`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'host_list')
if (value = attributes[:'host_list']).is_a?(Array)
self.host_list = value
end
end
if attributes.key?(:'total_matching')
self.total_matching = attributes[:'total_matching']
end
if attributes.key?(:'total_returned')
self.total_returned = attributes[:'total_returned']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
host_list == o.host_list &&
total_matching == o.total_matching &&
total_returned == o.total_returned
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[host_list, total_matching, total_returned].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when :Array
# generic array, return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = DatadogAPIClient::V1.const_get(type)
res = klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
if res.instance_of? DatadogAPIClient::V1::UnparsedObject
self._unparsed = true
end
res
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 30.382813 | 216 | 0.631911 |
1d06ef1cc7bbb6daae1ff7a1fb260b59f0c4acbd | 251 | class CreateSmsDetails < ActiveRecord::Migration[5.2]
def change
create_table :sms_details do |t|
t.string :status
t.text :failure_message
t.references :attendance_entry, foreign_key: true
t.timestamps
end
end
end
| 20.916667 | 55 | 0.693227 |
1adeb1bd05eeecea14895751062a118defd3bf3f | 1,081 | cask 'omnigraffle' do
if MacOS.version <= :snow_leopard
version '5.4.4'
sha256 '7bcc64093f46bd4808b1a4cb86cf90c0380a5c5ffffd55ce8f742712818558df'
url "http://www.omnigroup.com/ftp1/pub/software/MacOSX/10.6/OmniGraffle-#{version}.dmg"
elsif MacOS.version <= :mavericks
version '6.0.5'
sha256 'a2eff19909d1ba38a4f01b2beecbde2f31f4af43d30e06d2c6921ae8880f85bc'
url "http://www.omnigroup.com/ftp1/pub/software/MacOSX/10.8/OmniGraffle-#{version}.dmg"
elsif MacOS.version <= :yosemite
version '6.6.1'
sha256 '7671d46ccd0b53a5917b0ccba5971fe1f1d7990b2d636f25c941b11b03c6e23c'
url "http://www.omnigroup.com/ftp1/pub/software/MacOSX/10.10/OmniGraffle-#{version}.dmg"
else
version '7.3.1'
sha256 '2f67d17fbc83ba4b66617f0d367685fbbb2f7d939c6a2a5d677de9a73bbf4534'
url "http://www.omnigroup.com/ftp1/pub/software/MacOSX/10.11/OmniGraffle-#{version}.dmg"
end
name 'OmniGraffle'
homepage 'https://www.omnigroup.com/omnigraffle/'
app 'OmniGraffle.app'
zap delete: '~/Library/Application Support/The Omni Group/OmniGraffle'
end
| 40.037037 | 92 | 0.760407 |
6297b07433e2efbe246af5c05a9bebb9b1fa8d1a | 4,648 | require 'puppet/application'
require 'puppet/util'
class Puppet::Application::Queue < Puppet::Application
should_parse_config
attr_accessor :daemon
def preinit
require 'puppet/daemon'
@daemon = Puppet::Daemon.new
@daemon.argv = ARGV.dup
# Do an initial trap, so that cancels don't get a stack trace.
# This exits with exit code 1
Signal.trap(:INT) do
$stderr.puts "Caught SIGINT; shutting down"
exit(1)
end
# This is a normal shutdown, so code 0
Signal.trap(:TERM) do
$stderr.puts "Caught SIGTERM; shutting down"
exit(0)
end
{
:verbose => false,
:debug => false
}.each do |opt,val|
options[opt] = val
end
end
option("--debug","-d")
option("--verbose","-v")
def help
<<-HELP
puppet-queue(8) -- Queuing daemon for asynchronous storeconfigs
========
SYNOPSIS
--------
Retrieves serialized storeconfigs records from a queue and processes
them in order.
USAGE
-----
puppet queue [-d|--debug] [-v|--verbose]
DESCRIPTION
-----------
This application runs as a daemon and processes storeconfigs data,
retrieving the data from a stomp server message queue and writing it to
a database.
For more information, including instructions for properly setting up
your puppet master and message queue, see the documentation on setting
up asynchronous storeconfigs at:
http://projects.puppetlabs.com/projects/1/wiki/Using_Stored_Configuration
OPTIONS
-------
Note that any configuration parameter that's valid in the configuration
file is also a valid long argument. For example, 'server' is a valid
configuration parameter, so you can specify '--server <servername>' as
an argument.
See the configuration file documentation at
http://docs.puppetlabs.com/references/stable/configuration.html for the
full list of acceptable parameters. A commented list of all
configuration options can also be generated by running puppet queue with
'--genconfig'.
* --debug:
Enable full debugging.
* --help:
Print this help message
* --verbose:
Turn on verbose reporting.
* --version:
Print the puppet version number and exit.
EXAMPLE
-------
$ puppet queue
AUTHOR
------
Luke Kanies
COPYRIGHT
---------
Copyright (c) 2011 Puppet Labs, LLC Licensed under the Apache 2.0 License
HELP
end
option("--logdest DEST", "-l DEST") do |arg|
begin
Puppet::Util::Log.newdestination(arg)
options[:setdest] = true
rescue => detail
Puppet.log_exception(detail)
end
end
option("--logdest DEST", "-l DEST") do |arg|
begin
Puppet::Util::Log.newdestination(arg)
options[:setdest] = true
rescue => detail
Puppet.log_exception(detail)
end
end
def main
require 'puppet/indirector/catalog/queue' # provides Puppet::Indirector::Queue.subscribe
Puppet.notice "Starting puppetqd #{Puppet.version}"
Puppet::Resource::Catalog::Queue.subscribe do |catalog|
# Once you have a Puppet::Resource::Catalog instance, passing it to save should suffice
# to put it through to the database via its active_record indirector (which is determined
# by the terminus_class = :active_record setting above)
Puppet::Util.benchmark(:notice, "Processing queued catalog for #{catalog.name}") do
begin
Puppet::Resource::Catalog.indirection.save(catalog)
rescue => detail
Puppet.log_exception(detail, "Could not save queued catalog for #{catalog.name}: #{detail}")
end
end
end
Thread.list.each { |thread| thread.join }
end
# Handle the logging settings.
def setup_logs
if options[:debug] or options[:verbose]
Puppet::Util::Log.newdestination(:console)
if options[:debug]
Puppet::Util::Log.level = :debug
else
Puppet::Util::Log.level = :info
end
end
Puppet::Util::Log.newdestination(:syslog) unless options[:setdest]
end
def setup
unless Puppet.features.stomp?
raise ArgumentError, "Could not load the 'stomp' library, which must be present for queueing to work. You must install the required library."
end
setup_logs
exit(Puppet.settings.print_configs ? 0 : 1) if Puppet.settings.print_configs?
require 'puppet/resource/catalog'
Puppet::Resource::Catalog.indirection.terminus_class = :store_configs
daemon.daemonize if Puppet[:daemonize]
# We want to make sure that we don't have a cache
# class set up, because if storeconfigs is enabled,
# we'll get a loop of continually caching the catalog
# for storage again.
Puppet::Resource::Catalog.indirection.cache_class = nil
end
end
| 25.538462 | 148 | 0.69062 |
1d56ca3e13711fdf0038c2a2e1151247ac864eb1 | 1,626 | # frozen_string_literal: true
# encoding: utf-8
# Copyright (C) 2014-2020 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Mongo
class Error
# Raised if there are no servers available matching the preference.
#
# @since 2.0.0
class NoServerAvailable < Error
# Instantiate the new exception.
#
# @example Instantiate the exception.
# Mongo::Error::NoServerAvailable.new(server_selector)
#
# @param [ Hash ] server_selector The server preference that could not be
# satisfied.
# @param [ Cluster ] cluster The cluster that server selection was
# performed on. (added in 2.7.0)
#
# @since 2.0.0
def initialize(server_selector, cluster=nil, msg=nil)
unless msg
msg = "No #{server_selector.name} server is available"
if cluster
msg += " in cluster: #{cluster.summary}"
end
msg += " with timeout=#{server_selector.server_selection_timeout}, " +
"LT=#{server_selector.local_threshold}"
end
super(msg)
end
end
end
end
| 31.269231 | 80 | 0.663592 |
f8f39811d40c4ffc8391baa80d7bb579cf1cfa90 | 746 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "google/cloud/functions" unless defined? Google::Cloud::Functions::VERSION
| 37.3 | 82 | 0.768097 |
62e0a6ff60f6bf3d8e51fb514dd8fe6d1fc30b71 | 1,427 | module SessionsHelper
# Logs in the given user.
def log_in(user)
session[:user_id] = user.id
end
# Remembers a user in a persistant Session
def remember(user)
user.remember
cookies.permanent.signed[:user_id] = user.id
cookies.permanent[:remember_token] = user.remember_token
end
# Returns the current logged-in user (if any)
def current_user
if (user_id = session[:user_id])
@current_user ||= User.find_by(id: user_id)
elsif (user_id = cookies.signed[:user_id])
user = User.find_by(id: user_id)
if user && user.authenticated?(:remember, cookies[:remember_token])
log_in user
@current_user = user
end
end
end
def current_user?(user)
user == current_user
end
# Returns true if the user is logged in, otherwise false
def logged_in?
!current_user.nil?
end
# Forgets a persistent user.
def forget(user)
user.forget
cookies.delete(:user_id)
cookies.delete(:remember_token)
end
# Logs out the current user
def log_out
forget(current_user)
session.delete(:user_id)
@current_user = nil
end
# Redirects to stored location (or to the default)
def redirect_back_or(default)
redirect_to(session[:forwarding_url] || default)
session.delete(:forwarding_url)
end
def store_location
session[:forwarding_url] = request.original_url if request.get?
end
end
| 23.393443 | 73 | 0.680448 |
1833ffd9cb8586a750ea48e64a0eeb22f6c38486 | 323 | Aws.add_service(:IAM, {
api: "#{Aws::API_DIR}/iam/2010-05-08/api-2.json",
docs: "#{Aws::API_DIR}/iam/2010-05-08/docs-2.json",
paginators: "#{Aws::API_DIR}/iam/2010-05-08/paginators-1.json",
resources: "#{Aws::API_DIR}/iam/2010-05-08/resources-1.json",
waiters: "#{Aws::API_DIR}/iam/2010-05-08/waiters-2.json",
})
| 40.375 | 65 | 0.659443 |
b99cdb6d4c4b827a25b05201a4c3f61caaf4982f | 2,807 | # frozen_string_literal: true
RSpec.describe RuboCop::Cop::Layout::AssignmentIndentation, :config do
let(:config) do
RuboCop::Config.new('Layout/AssignmentIndentation' => {
'IndentationWidth' => cop_indent
},
'Layout/IndentationWidth' => { 'Width' => 2 })
end
let(:cop_indent) { nil } # use indentation with from Layout/IndentationWidth
it 'registers an offense for incorrectly indented rhs' do
expect_offense(<<~RUBY)
a =
if b ; end
^^^^^^^^^^ Indent the first line of the right-hand-side of a multi-line assignment.
RUBY
expect_correction(<<~RUBY)
a =
if b ; end
RUBY
end
it 'allows assignments that do not start on a newline' do
expect_no_offenses(<<~RUBY)
a = if b
foo
end
RUBY
end
it 'allows a properly indented rhs' do
expect_no_offenses(<<~RUBY)
a =
if b ; end
RUBY
end
it 'allows a properly indented rhs with fullwidth characters' do
expect_no_offenses(<<~RUBY)
f 'Ruby', a =
b
RUBY
end
it 'registers an offense for multi-lhs' do
expect_offense(<<~RUBY)
a,
b =
if b ; end
^^^^^^^^^^ Indent the first line of the right-hand-side of a multi-line assignment.
RUBY
expect_correction(<<~RUBY)
a,
b =
if b ; end
RUBY
end
it 'ignores comparison operators' do
expect_no_offenses(<<~RUBY)
a ===
if b ; end
RUBY
end
context 'when indentation width is overridden for this cop only' do
let(:cop_indent) { 7 }
it 'allows a properly indented rhs' do
expect_no_offenses(<<~RUBY)
a =
if b ; end
RUBY
end
it 'auto-corrects indentation' do
expect_offense(<<~RUBY)
a =
if b ; end
^^^^^^^^^^ Indent the first line of the right-hand-side of a multi-line assignment.
RUBY
expect_correction(<<~RUBY)
a =
if b ; end
RUBY
end
end
it 'registers an offense for incorrectly indented rhs when multiple assignment' do
expect_offense(<<~RUBY)
foo = bar =
baz = ''
^^^^^^^^ Indent the first line of the right-hand-side of a multi-line assignment.
RUBY
expect_correction(<<~RUBY)
foo = bar =
baz = ''
RUBY
end
it 'registers an offense for incorrectly indented rhs when' \
'multiple assignment with line breaks on each line' do
expect_offense(<<~RUBY)
foo =
bar =
baz = 42
^^^^^^^^ Indent the first line of the right-hand-side of a multi-line assignment.
RUBY
expect_correction(<<~RUBY)
foo =
bar =
baz = 42
RUBY
end
end
| 23.008197 | 93 | 0.56751 |
acd4ed1283ef03904f17c0230309f52c9cbc1438 | 5,887 | # encoding: UTF-8
require File.expand_path('../test_helper', __FILE__)
require 'stringio'
class TestXml < Minitest::Test
# ----- Constants ------
def test_lib_versions
assert(XML.check_lib_versions)
end
def test_debug_entities
original = XML.debug_entities
XML.debug_entities = false
refute(XML.debug_entities)
XML.debug_entities = true
assert(XML.debug_entities)
XML.debug_entities = false
refute(XML.debug_entities)
XML.debug_entities = original
end
def test_default_compression
return unless XML.default_compression
original = XML.default_compression
0.upto(9) do |i|
XML.default_compression = i
assert_equal(i, XML.default_compression)
end
9.downto(0) do |i|
assert_equal(i, XML.default_compression = i)
assert_equal(i, XML.default_compression)
end
0.downto(-10) do |i|
assert_equal(i, XML.default_compression = i)
assert_equal(0, XML.default_compression)
end
10.upto(20) do |i|
assert_equal(i, XML.default_compression = i)
assert_equal(9, XML.default_compression)
end
XML.default_compression = original
end
def test_default_keep_blanks
original = XML.default_keep_blanks
XML.default_keep_blanks = false
refute(XML.default_keep_blanks)
assert_equal(XML::Parser::Options::NOBLANKS, XML.default_options)
XML.default_keep_blanks = true
assert(XML.default_keep_blanks)
assert_equal(0, XML.default_options)
XML.default_keep_blanks = original
end
def test_default_line_numbers
original = XML.default_line_numbers
XML.default_line_numbers = false
refute(XML.default_line_numbers)
XML.default_line_numbers = true
assert(XML.default_line_numbers)
XML.default_line_numbers = false
refute(XML.default_line_numbers)
XML.default_line_numbers = original
end
def test_default_substitute_entities
original = XML.default_substitute_entities
XML.default_substitute_entities = false
refute(XML.default_substitute_entities)
assert_equal(0, XML.default_options)
XML.default_substitute_entities = true
assert(XML.default_substitute_entities)
assert_equal(XML::Parser::Options::NOENT, XML.default_options)
XML.default_substitute_entities = false
refute(XML.default_substitute_entities)
XML.default_substitute_entities = original
end
def test_default_tree_indent_string
original = XML.default_tree_indent_string
s = XML.default_tree_indent_string
assert_instance_of(String, s)
assert_equal(' ', s)
XML.default_tree_indent_string = 'uga'
s = XML.default_tree_indent_string
assert_instance_of(String, s)
assert_equal('uga', s)
XML.default_tree_indent_string = ' '
s = XML.default_tree_indent_string
assert_instance_of(String, s)
assert_equal(' ', s)
XML.default_tree_indent_string = original
end
def test_default_validity_checking
original = XML.default_validity_checking
XML.default_validity_checking = false
refute(XML.default_validity_checking)
assert_equal(0, XML.default_options)
XML.default_validity_checking = true
assert(XML.default_validity_checking)
assert_equal(XML::Parser::Options::DTDVALID, XML.default_options)
XML.default_validity_checking = false
refute(XML.default_validity_checking)
XML.default_validity_checking = original
end
def test_default_warnings
original = XML.default_warnings
XML.default_warnings = false
refute(XML.default_warnings)
assert_equal(XML::Parser::Options::NOWARNING, XML.default_options)
XML.default_warnings = true
assert(XML.default_warnings)
assert_equal(0, XML.default_options)
XML.default_warnings = false
refute(XML.default_warnings)
XML.default_warnings = original
end
def test_enabled_automata
assert(XML.enabled_automata?)
end
def test_enabled_c14n
assert(XML.enabled_c14n?)
end
def test_enabled_catalog
assert(XML.enabled_catalog?)
end
def test_enabled_debug
assert(XML.enabled_debug?)
end
def test_enabled_docbook
assert(XML.enabled_docbook?)
end
def test_enabled_ftp
assert(XML.enabled_ftp?)
end
def test_enabled_http
assert(XML.enabled_http?)
end
def test_enabled_html
assert(XML.enabled_html?)
end
def test_enabled_iconv
assert(XML.enabled_iconv?)
end
def test_enabled_memory_debug
assert_equal(false, XML.enabled_memory_debug?)
end
def test_enabled_regexp
assert(XML.enabled_regexp?)
end
def test_enabled_schemas
assert(XML.enabled_schemas?)
end
def test_enabled_thread
assert(XML.enabled_thread?)
end
def test_enabled_unicode
assert(XML.enabled_unicode?)
end
def test_enabled_xinclude
assert(XML.enabled_xinclude?)
end
def test_enabled_xpath
assert(XML.enabled_xpath?)
end
def test_enabled_xpointer
assert(XML.enabled_xpointer?)
end
def test_enabled_zlib
assert(XML.enabled_zlib?.is_a?(TrueClass) || XML.enabled_zlib?.is_a?(FalseClass))
end
def test_intent_tree_output
assert(TrueClass, XML.indent_tree_output)
XML.indent_tree_output = false
assert(FalseClass, XML.indent_tree_output)
XML.indent_tree_output = true
assert(TrueClass, XML.indent_tree_output)
end
def test_version
assert_instance_of(String, XML::VERSION)
end
def test_vernum
assert_instance_of(Integer, XML::VERNUM)
end
def test_libxml_parser_features
assert_instance_of(Array, XML.features)
end
def test_default_options
assert_equal(0, XML.default_options)
end
end | 23.548 | 86 | 0.711738 |
18ea58fffe7d2be5d4e2fa6e46b272f4c315969c | 155 | module G2crowd
class Product < Base
has_many :survey_responses
has_many :categories
has_one :main_category, class_name: 'Category'
end
end
| 19.375 | 50 | 0.741935 |
f70abaceeaa8be966b5e43f82a9d96b46841bddf | 378 | maintainer "VMware, Inc"
maintainer_email "[email protected]"
license "Apache 2.0"
description "Allow nodes to discover the location for a given service at runtime, adapting when new services register."
long_description IO.read(File.join(File.dirname(__FILE__), 'README.textile'))
version "0.9"
depends "partial_search"
| 47.25 | 125 | 0.706349 |
e915df042c91d100dff848b2954d4f5291142f7a | 1,553 | require 'spec_helper'
describe Activity do
subject do
FactoryGirl.build(:activity)
end
it "creates a valid object given valid attributes" do
subject.save
subject.should be_persisted
end
it "does not create a valid object given invalid attributes" do
subject.message = ""
subject.save
subject.should_not be_persisted
end
it "creates an activity for an awarded badge" do
award = FactoryGirl.build(:award)
award.badge = FactoryGirl.create(:badge)
award.player = FactoryGirl.create(:player)
awarded_badge = "#{award.player.name} was awarded the #{award.badge.name} badge."
Activity.should_receive(:create).with({:message => awarded_badge}).at_least(1).times.and_return(true)
award.save!
end
it "creates an activity for a new game" do
game_attributes = FactoryGirl.attributes_for(:game)
new_game = "#{game_attributes[:challenger].name} challenged #{game_attributes[:challenged].name}."
Activity.should_receive(:create).with({:message => new_game}).at_least(1).times.and_return(true)
Game.create(game_attributes)
end
it "creates an activity for a completed game" do
game = FactoryGirl.build(:game)
game.complete!({
:challenger_score => 21,
:challenged_score => 10
})
completed_game = "#{game.challenger.name} completed their game against #{game.challenged.name} and won! (#{game.score})"
Activity.should_receive(:create).with({:message => completed_game}).at_least(1).times.and_return(true)
Activity.completed_game(game)
end
end
| 33.76087 | 124 | 0.716033 |
6a075b06e8034ebc95e7b509f408681724fae3c1 | 1,351 | class Cglm < Formula
desc "Optimized OpenGL/Graphics Math (glm) for C"
homepage "https://github.com/recp/cglm"
url "https://github.com/recp/cglm/archive/v0.6.2.tar.gz"
sha256 "6d097f16fecd55d301bda2a3ac51df1ce514195a1671dfab84a8a2d0170ea7ac"
bottle do
cellar :any
sha256 "d03dbd49ca7c4eaf65c79d8baf9ba9bdba80b09282022ad7677cc4d80cc07cb3" => :catalina
sha256 "eae499b98f846f8e48609ee12055b69297eef0d84baecb1baca0683211652d5a" => :mojave
sha256 "6184f901f7835a1fa00228d7e48951b12f8ae3c6be7c910c0786690134999778" => :high_sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
def install
system "autoreconf", "-fiv"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <cglm/cglm.h>
#include <assert.h>
int main() {
vec3 x = {1.0f, 0.0f, 0.0f},
y = {0.0f, 1.0f, 0.0f},
z = {0.0f, 0.0f, 1.0f};
vec3 r;
glm_cross(x, y, r);
assert(glm_vec3_eqv_eps(r, z));
return 0;
}
EOS
system ENV.cc, "-I#{include}", testpath/"test.c", "-o", "test"
system "./test"
end
end
| 29.369565 | 93 | 0.621021 |
012e804dc823ab0a2fd75338f073528dd05a0857 | 2,652 | module EventStore
class EventRepository
def initialize(adapter: ::Event)
@adapter = adapter
end
attr_reader :adapter
def create(event, stream_name)
data = event.to_h.merge!(stream: stream_name)
adapter.create(data)
# Notify observers of new event
event.emit if event.respond_to?(:emit)
event
end
def delete_stream(stream_name)
condition = {stream: stream_name}
adapter.destroy_all condition
end
def has_event?(event_id)
adapter.exists?(event_id: event_id)
end
def last_stream_event(stream_name)
build_event_entity(adapter.where(stream: stream_name).last)
end
def read_events_forward(stream_name, start_event_id, count)
stream = adapter.where(stream: stream_name)
unless start_event_id.equal?(:head)
starting_event = adapter.find_by(event_id: start_event_id)
stream = stream.where('id > ?', starting_event)
end
stream.limit(count)
.map(&method(:build_event_entity))
end
def read_events_backward(stream_name, start_event_id, count)
stream = adapter.where(stream: stream_name)
unless start_event_id.equal?(:head)
starting_event = adapter.find_by(event_id: start_event_id)
stream = stream.where('id < ?', starting_event)
end
stream.order('id DESC').limit(count)
.map(&method(:build_event_entity))
end
def read_stream_events_forward(stream_name)
adapter.where(stream: stream_name)
.map(&method(:build_event_entity))
end
def read_stream_events_backward(stream_name)
adapter.where(stream: stream_name).order('id DESC')
.map(&method(:build_event_entity))
end
def read_all_streams_forward(start_event_id, count)
stream = adapter
unless start_event_id.equal?(:head)
starting_event = adapter.find_by(event_id: start_event_id)
stream = stream.where('id > ?', starting_event)
end
stream.limit(count)
.map(&method(:build_event_entity))
end
def read_all_streams_backward(start_event_id, count)
stream = adapter
unless start_event_id.equal?(:head)
starting_event = adapter.find_by(event_id: start_event_id)
stream = stream.where('id < ?', starting_event)
end
stream.order('id DESC').limit(count)
.map(&method(:build_event_entity))
end
private
def build_event_entity(record)
return nil unless record
record.event_type.constantize.new(
event_id: record.event_id,
metadata: record.metadata,
data: record.data
)
end
end
end
| 27.340206 | 66 | 0.670437 |
b934743c0a785a1ba097d5e0deb9b225adcf53c3 | 377 | class Pd::FitWeekendRegistrationMailer < ActionMailer::Base
default from: 'Sarah Fairweather <[email protected]>'
default bcc: MailerConstants::PLC_EMAIL_LOG
def confirmation(registration)
@registration = registration
mail(
to: registration.pd_application.user.email,
subject: "We've received your FiT weekend registration form"
)
end
end
| 26.928571 | 66 | 0.748011 |
ed4698fa7109011c6a3423a6c14a714f67c6181d | 1,434 | module Inch
module Language
module Ruby
module Evaluation
module Role
# Roles assigned to class variables
module ClassVariable
class WithDoc < Object::WithDoc
applicable_if :has_doc?
end
class WithoutDoc < Object::WithoutDoc
applicable_unless :has_doc?
end
class TaggedAsNodoc < Object::TaggedAsNodoc
applicable_if :nodoc?
end
class InRoot < Object::InRoot
applicable_if :in_root?
end
class Public < Object::Public
applicable_if :public?
priority(-1)
end
class Private < Object::Private
applicable_if :private?
priority(-3)
end
class WithCodeExample < Object::WithCodeExample
applicable_if do |o|
o.has_code_example? && !o.has_multiple_code_examples?
end
end
class WithMultipleCodeExamples < Object::WithMultipleCodeExamples
applicable_if :has_multiple_code_examples?
end
class WithoutCodeExample < Object::WithoutCodeExample
applicable_unless :has_code_example?
def suggestion
nil
end
end
end
end
end
end
end
end
| 26.555556 | 77 | 0.527894 |
ac295e672a046aa2904a736e49d5e03182c153e7 | 2,323 | class Blueprint < ApplicationRecord
include PgSearch::Model
extend FriendlyId
acts_as_votable
acts_as_taggable_on :tags
paginates_per 32
# Pictures
include PictureUploader::Attachment(:cover_picture)
has_many :additional_pictures, dependent: :destroy, class_name: "Picture"
accepts_nested_attributes_for :additional_pictures, allow_destroy: true
belongs_to :collection
belongs_to :mod
has_one :user, through: :collection
has_rich_text :description
friendly_id :title, use: :slugged
after_save :decode_blueprint
validates :title, presence: true
validates :encoded_blueprint, presence: true
validates :tag_list, length: { minimum: 1, maximum: 10, message: "needs at least one tag, maximum 10." }
validates :cover_picture, presence: true
validates :additional_pictures, length: { maximum: 4, message: "Too many pictures. Please make sure you don't have too many pictures attached." }
validate :encoded_blueprint_parsable
pg_search_scope :search_by_title,
against: [:title],
using: {
tsearch: { prefix: true }
}
default_scope { with_rich_text_description.includes(:tags, :mod, :user, :additional_pictures) }
def formatted_mod_version
"#{mod.name} - #{mod_version}"
end
def is_mod_version_latest?
mod_version >= mod.latest
end
def mod_compatibility_range
# Handle retro compatibility only for <= 2.0.6
if mod_version <= "2.0.6"
[
mod.compatibility_range_for(mod_version).first,
mod.compatibility_range_for(mod.latest).last
]
else
mod.compatibility_range_for(mod_version)
end
end
private
def decode_blueprint
if saved_change_to_attribute?(:encoded_blueprint)
BlueprintParserJob.perform_now(self.id)
end
end
def encoded_blueprint_parsable
if self.mod.name == "MultiBuildBeta"
valid = Parsers::MultibuildBetaBlueprint.new(self).validate
elsif self.mod.name == "MultiBuild"
valid = Parsers::MultibuildBetaBlueprint.new(self).validate
elsif self.mod.name == "Dyson Sphere Program"
valid = Parsers::DysonSphereProgramBlueprint.new(self).validate
else
valid = true
end
if !valid
errors.add(:encoded_blueprint, "Wrong blueprint format for mod version: #{self.mod.name} - #{self.mod_version}")
end
end
end
| 28.329268 | 147 | 0.726647 |
1882d34279034328af47bde7714be2837501a792 | 388 | Rails.application.routes.draw do
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
namespace :api do
namespace :v1 do
resources :posts
resources :users, only: [:create]
post '/login', to: 'auth#create'
get '/profile', to: 'users#profile'
get '/autologin', to: 'auth#auto_login'
end
end
end
| 29.846154 | 102 | 0.657216 |
334db09eab83ec60c11660a677205a3bc33a17cf | 1,723 | # Manages settings in OpenSSH's sshd_config file
#
# Copyright (c) 2012 Raphaël Pinson
# Licensed under the Apache License, Version 2.0
Puppet::Type.newtype(:sshd_config) do
@doc = "Manages settings in an OpenSSH sshd_config file.
The resource name is used for the setting name, but if the `condition` is
given, then the name can be something else and the `key` given as the name
of the setting.
Subsystem entries are not managed by this type. There is a specific `sshd_config_subsystem` type to manage these entries."
ensurable
newparam(:name) do
desc "The name of the setting, or a unique string if `condition` given."
isnamevar
end
newparam(:key) do
desc "Overrides setting name to prevent resource conflicts if `condition` is
given."
end
newproperty(:value, :array_matching => :all) do
desc "Value to change the setting to. The follow parameters take an array of values:
- MACs;
- AcceptEnv;
- AllowGroups;
- AllowUsers;
- DenyGroups;
- DenyUsers.
All other parameters take a string. When passing an array to other parameters, only the first value in the array will be considered."
end
newparam(:target) do
desc "The file in which to store the settings, defaults to
`/etc/ssh/sshd_config`."
end
newparam(:condition) do
desc "Match group condition for the entry,
in the format:
sshd_config { 'PermitRootLogin':
value => 'without-password',
condition => 'Host example.net',
}
The value can contain multiple conditions, concatenated together with
whitespace. This is used if the `Match` block has multiple criteria.
condition => 'Host example.net User root'
"
end
autorequire(:file) do
self[:target]
end
end
| 26.507692 | 133 | 0.719095 |
38082cadfbf51055c8223b394e318dd1090b8b95 | 914 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
Gem::Specification.new do |spec|
spec.name = "sinatra_sample_provider"
spec.version = "0.0.1"
spec.authors = ["James Bowes"]
spec.email = ["[email protected]"]
spec.summary = "Sample Manifold provider app using Sinatra"
spec.homepage = "https://github.com/manifoldco/ruby-sinatra-sample-provider"
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency 'sinatra', '~> 1.4.8'
spec.add_dependency 'sinatra-contrib', '~> 1.4.7'
spec.add_dependency 'oauth2', '~> 1.3.1'
spec.add_dependency 'manifoldco_signature', '~> 0.1.4'
end
| 35.153846 | 83 | 0.633479 |
f8e7b8b5d42fc6a73a8cb5e7b0c71af017fe8e61 | 610 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
10.times do
user = User.create(username: Faker::Name.first_name, email: Faker::Internet.email, password: Faker::Internet.password)
5.times do
article = Article.create(title: Faker::Lorem.sentence, text: Faker::Lorem.paragraph)
user.articles << article
end
end | 43.571429 | 119 | 0.727869 |
21016ec5677a233d71401ce6c294b6f2a15c589f | 151 | class CreateTeams < ActiveRecord::Migration[5.1]
def change
create_table :teams do |t|
t.string :name
t.timestamps
end
end
end
| 16.777778 | 48 | 0.655629 |
4aacc84518abdeb378d7fef55422ce23432a3856 | 1,416 | module Panda
module Api
class UsersController < BaseController
skip_before_action :authenticate_user!, only: :create
def create
user = Panda::User.create! params.require(:user).permit(:name, :nickname, :email, :mobile, :password)
render json: user, methods: :auth_token
end
# TODO: Fixme
def upload_avatar
current_user.update({avatar: params[:user][:avatar]})
current_user.reload
my_info
end
def my_info
render json: current_user
end
# 绑定苹果device token
def bind_ios_device_token
token = params[:token]
if Panda::Device.where(user: current_user, token: token).count < 1
Panda::Device.create!(user: current_user, token: token)
end
render json: {msg: "success"}
end
def update_my_info
current_user.update params.require(:user).permit!
render json: current_user
end
# 上传个人照片
def append_photo
u = current_user
u.append_images params[:user][:photos] if params[:user][:photos]
render json: current_user, include: [:photos]
end
# put 删除个人顶部滚动照片
def delete_photos
u = current_user
u.remove_photos!
u.save
render json: u, include: [:photos]
end
def delete_photo
u = current_user
u.delete_photo(params[:file_name])
u.save
render json: u, include: [ :photos]
end
end
end
end
| 21.134328 | 107 | 0.639124 |
795c8e901ee66bb011545340b428914f3c13a39f | 15,166 | require "os/linux/glibc"
class Llvm < Formula
desc "Next-gen compiler infrastructure"
homepage "https://llvm.org/"
# The LLVM Project is under the Apache License v2.0 with LLVM Exceptions
license "Apache-2.0"
stable do
url "https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/llvm-10.0.1.src.tar.xz"
sha256 "c5d8e30b57cbded7128d78e5e8dad811bff97a8d471896812f57fa99ee82cdf3"
resource "clang" do
url "https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/clang-10.0.1.src.tar.xz"
sha256 "f99afc382b88e622c689b6d96cadfa6241ef55dca90e87fc170352e12ddb2b24"
unless OS.mac?
patch do
url "https://gist.githubusercontent.com/iMichka/9ac8e228679a85210e11e59d029217c1/raw/e50e47df860201589e6f43e9f8e9a4fc8d8a972b/clang9?full_index=1"
sha256 "65cf0dd9fdce510e74648e5c230de3e253492b8f6793a89534becdb13e488d0c"
end
end
end
resource "clang-tools-extra" do
url "https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/clang-tools-extra-10.0.1.src.tar.xz"
sha256 "d093782bcfcd0c3f496b67a5c2c997ab4b85816b62a7dd5b27026634ccf5c11a"
end
resource "compiler-rt" do
url "https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/compiler-rt-10.0.1.src.tar.xz"
sha256 "d90dc8e121ca0271f0fd3d639d135bfaa4b6ed41e67bd6eb77808f72629658fa"
end
if OS.mac?
resource "libcxx" do
url "https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/libcxx-10.0.1.src.tar.xz"
sha256 "def674535f22f83131353b3c382ccebfef4ba6a35c488bdb76f10b68b25be86c"
end
end
resource "libunwind" do
url "https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/libunwind-10.0.1.src.tar.xz"
sha256 "741903ec1ebff2253ff19d803629d88dc7612598758b6e48bea2da168de95e27"
end
resource "lld" do
url "https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/lld-10.0.1.src.tar.xz"
sha256 "591449e0aa623a6318d5ce2371860401653c48bb540982ccdd933992cb88df7a"
end
resource "lldb" do
url "https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/lldb-10.0.1.src.tar.xz"
sha256 "07abe87c25876aa306e73127330f5f37d270b6b082d50cc679e31b4fc02a3714"
end
resource "openmp" do
url "https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/openmp-10.0.1.src.tar.xz"
sha256 "d19f728c8e04fb1e94566c8d76aef50ec926cd2f95ef3bf1e0a5de4909b28b44"
end
resource "polly" do
url "https://github.com/llvm/llvm-project/releases/download/llvmorg-10.0.1/polly-10.0.1.src.tar.xz"
sha256 "d2fb0bb86b21db1f52402ba231da7c119c35c21dfb843c9496fe901f2d6aa25a"
end
end
unless OS.mac?
patch :p2 do
url "https://github.com/llvm/llvm-project/commit/7f5fe30a150e7e87d3fbe4da4ab0e76ec38b40b9.patch?full_index=1"
sha256 "9ed85d2b00d0b70c628a5d1256d87808d944532fe8c592516577a4f8906a042c"
end
# Needed for crystal
patch :p2, :DATA
end
livecheck do
url :homepage
regex(/LLVM (\d+.\d+.\d+)/i)
end
bottle do
cellar :any
sha256 "e3ec9fda84756750ac0b5620ff34da04ba5035c8276af1bebfe76e012bb0b14a" => :catalina
sha256 "bb8ede510e2a5664761281f1e7e6f2c01758229bdc49e19a9557ced5e4cb7717" => :mojave
sha256 "d39ebc8d856f0b5ef3709625cfdd3dc02299d2648431852d50577d5d839fd6aa" => :high_sierra
sha256 "13ab6fed69933d1128f02a8695defef2f35a489ac204f6852699dde2354713b8" => :x86_64_linux
end
# Clang cannot find system headers if Xcode CLT is not installed
pour_bottle? do
reason "The bottle needs the Xcode CLT to be installed."
satisfy { !OS.mac? || MacOS::CLT.installed? }
end
head do
url "https://github.com/llvm/llvm-project.git"
unless OS.mac?
patch do
url "https://gist.githubusercontent.com/iMichka/9ac8e228679a85210e11e59d029217c1/raw/e50e47df860201589e6f43e9f8e9a4fc8d8a972b/clang9?full_index=1"
sha256 "65cf0dd9fdce510e74648e5c230de3e253492b8f6793a89534becdb13e488d0c"
directory "clang"
end
end
end
keg_only :provided_by_macos
# https://llvm.org/docs/GettingStarted.html#requirement
# We intentionally use Make instead of Ninja.
# See: Homebrew/homebrew-core/issues/35513
depends_on "cmake" => :build
depends_on "[email protected]" => :build
depends_on "libffi"
uses_from_macos "libedit"
uses_from_macos "libxml2"
uses_from_macos "ncurses"
uses_from_macos "zlib"
unless OS.mac?
depends_on "pkg-config" => :build
depends_on "gcc" # needed for libstdc++
if Formula["glibc"].any_version_installed? || OS::Linux::Glibc.system_version < Formula["glibc"].version
depends_on "glibc"
end
depends_on "binutils" # needed for gold and strip
depends_on "libelf" # openmp requires <gelf.h>
conflicts_with "clang-format", because: "both install `clang-format` binaries"
end
def install
projects = %w[
clang
clang-tools-extra
lld
lldb
polly
]
# OpenMP currently fails to build on ARM
# https://github.com/Homebrew/brew/issues/7857#issuecomment-661484670
projects << "openmp" unless Hardware::CPU.arm?
runtimes = %w[
compiler-rt
libunwind
]
args << "libcxx" if OS.mac?
# Can likely be added to the base runtimes array when 11.0.0 is released.
runtimes << "libcxxabi" if build.head?
llvmpath = buildpath/"llvm"
unless build.head?
llvmpath.install buildpath.children - [buildpath/".brew_home"]
(projects + runtimes).each { |p| resource(p).stage(buildpath/p) }
end
py_ver = "3.8"
# Apple's libstdc++ is too old to build LLVM
ENV.libcxx if ENV.compiler == :clang
# compiler-rt has some iOS simulator features that require i386 symbols
# I'm assuming the rest of clang needs support too for 32-bit compilation
# to work correctly, but if not, perhaps universal binaries could be
# limited to compiler-rt. llvm makes this somewhat easier because compiler-rt
# can almost be treated as an entirely different build from llvm.
ENV.permit_arch_flags
unless OS.mac?
# see https://llvm.org/docs/HowToCrossCompileBuiltinsOnArm.html#the-cmake-try-compile-stage-fails
# Basically, the stage1 clang will try to locate a gcc toolchain and often
# get the default from /usr/local, which might contains an old version of
# gcc that can't build compiler-rt. This fixes the problem and, unlike
# setting the main project's cmake option -DGCC_INSTALL_PREFIX, avoid
# hardcoding the gcc path into the binary
inreplace "compiler-rt/CMakeLists.txt", /(cmake_minimum_required.*\n)/,
"\\1add_compile_options(\"--gcc-toolchain=#{Formula["gcc"].opt_prefix}\")"
end
args = %W[
-DLLVM_ENABLE_PROJECTS=#{projects.join(";")}
-DLLVM_ENABLE_RUNTIMES=#{runtimes.join(";")}
-DLLVM_POLLY_LINK_INTO_TOOLS=ON
-DLLVM_BUILD_EXTERNAL_COMPILER_RT=ON
-DLLVM_LINK_LLVM_DYLIB=ON
-DLLVM_ENABLE_EH=ON
-DLLVM_ENABLE_FFI=ON
-DLLVM_ENABLE_RTTI=ON
-DLLVM_INCLUDE_DOCS=OFF
-DLLVM_INCLUDE_TESTS=OFF
-DLLVM_INSTALL_UTILS=ON
-DLLVM_ENABLE_Z3_SOLVER=OFF
-DLLVM_OPTIMIZED_TABLEGEN=ON
-DLLVM_TARGETS_TO_BUILD=all
-DFFI_INCLUDE_DIR=#{Formula["libffi"].opt_lib}/libffi-#{Formula["libffi"].version}/include
-DFFI_LIBRARY_DIR=#{Formula["libffi"].opt_lib}
-DLLDB_USE_SYSTEM_DEBUGSERVER=ON
-DLLDB_ENABLE_PYTHON=OFF
-DLLDB_ENABLE_LUA=OFF
-DLLDB_ENABLE_LZMA=OFF
-DLIBOMP_INSTALL_ALIASES=OFF
-DCLANG_PYTHON_BINDINGS_VERSIONS=#{py_ver}
]
if OS.mac?
args << "-DLLVM_BUILD_LLVM_C_DYLIB=ON"
args << "-DLLVM_ENABLE_LIBCXX=ON"
args << "-DLLVM_CREATE_XCODE_TOOLCHAIN=#{MacOS::Xcode.installed? ? "ON" : "OFF"}"
else
args << "-DLLVM_BUILD_LLVM_C_DYLIB=OFF"
args << "-DLLVM_ENABLE_LIBCXX=OFF"
args << "-DLLVM_CREATE_XCODE_TOOLCHAIN=OFF"
args << "-DCLANG_DEFAULT_CXX_STDLIB=libstdc++"
end
sdk = MacOS.sdk_path_if_needed
args << "-DDEFAULT_SYSROOT=#{sdk}" if sdk
# Enable llvm gold plugin for LTO
args << "-DLLVM_BINUTILS_INCDIR=#{Formula["binutils"].opt_include}" unless OS.mac?
if OS.mac? & MacOS.version == :mojave && MacOS::CLT.installed?
# Mojave CLT linker via software update is older than Xcode.
# Use it to retain compatibility.
args << "-DCMAKE_LINKER=/Library/Developer/CommandLineTools/usr/bin/ld"
end
mkdir llvmpath/"build" do
system "cmake", "-G", "Unix Makefiles", "..", *(std_cmake_args + args)
system "make"
system "make", "install"
system "make", "install-xcode-toolchain" if OS.mac? && MacOS::Xcode.installed?
end
unless OS.mac?
# Strip executables/libraries/object files to reduce their size
system("strip", "--strip-unneeded", "--preserve-dates", *(Dir[bin/"**/*", lib/"**/*"]).select do |f|
f = Pathname.new(f)
f.file? && (f.elf? || f.extname == ".a")
end)
end
# Install LLVM Python bindings
# Clang Python bindings are installed by CMake
(lib/"python#{py_ver}/site-packages").install llvmpath/"bindings/python/llvm"
# Install Emacs modes
elisp.install Dir[llvmpath/"utils/emacs/*.el"] + Dir[share/"clang/*.el"]
end
def caveats
<<~EOS
To use the bundled libc++ please add the following LDFLAGS:
LDFLAGS="-L#{opt_lib} -Wl,-rpath,#{opt_lib}"
EOS
end
test do
assert_equal prefix.to_s, shell_output("#{bin}/llvm-config --prefix").chomp
(testpath/"omptest.c").write <<~EOS
#include <stdlib.h>
#include <stdio.h>
#include <omp.h>
int main() {
#pragma omp parallel num_threads(4)
{
printf("Hello from thread %d, nthreads %d\\n", omp_get_thread_num(), omp_get_num_threads());
}
return EXIT_SUCCESS;
}
EOS
clean_version = version.to_s[/(\d+\.?)+/]
system "#{bin}/clang", "-L#{lib}", "-fopenmp", "-nobuiltininc",
"-I#{lib}/clang/#{clean_version}/include",
"omptest.c", "-o", "omptest", *ENV["LDFLAGS"].split
testresult = shell_output("./omptest")
sorted_testresult = testresult.split("\n").sort.join("\n")
expected_result = <<~EOS
Hello from thread 0, nthreads 4
Hello from thread 1, nthreads 4
Hello from thread 2, nthreads 4
Hello from thread 3, nthreads 4
EOS
assert_equal expected_result.strip, sorted_testresult.strip
(testpath/"test.c").write <<~EOS
#include <stdio.h>
int main()
{
printf("Hello World!\\n");
return 0;
}
EOS
(testpath/"test.cpp").write <<~EOS
#include <iostream>
int main()
{
std::cout << "Hello World!" << std::endl;
return 0;
}
EOS
unless OS.mac?
system "#{bin}/clang++", "-v", "test.cpp", "-o", "test"
assert_equal "Hello World!", shell_output("./test").chomp
end
# Testing Command Line Tools
if OS.mac? && MacOS::CLT.installed?
libclangclt = Dir[
"/Library/Developer/CommandLineTools/usr/lib/clang/#{MacOS::CLT.version.to_i}*"
].last { |f| File.directory? f }
system "#{bin}/clang++", "-v", "-nostdinc",
"-I/Library/Developer/CommandLineTools/usr/include/c++/v1",
"-I#{libclangclt}/include",
"-I/usr/include",
# need it because /Library/.../usr/include/c++/v1/iosfwd refers to <wchar.h>,
# which CLT installs to /usr/include
"test.cpp", "-o", "testCLT++"
# Testing default toolchain and SDK location.
system "#{bin}/clang++", "-v",
"-std=c++11", "test.cpp", "-o", "test++"
assert_includes MachO::Tools.dylibs("test++"), "/usr/lib/libc++.1.dylib"
assert_equal "Hello World!", shell_output("./test++").chomp
system "#{bin}/clang", "-v", "test.c", "-o", "test"
assert_equal "Hello World!", shell_output("./test").chomp
toolchain_path = "/Library/Developer/CommandLineTools"
system "#{bin}/clang++", "-v",
"-isysroot", MacOS::CLT.sdk_path,
"-isystem", "#{toolchain_path}/usr/include/c++/v1",
"-isystem", "#{toolchain_path}/usr/include",
"-isystem", "#{MacOS::CLT.sdk_path}/usr/include",
"-std=c++11", "test.cpp", "-o", "testCLT++"
assert_includes MachO::Tools.dylibs("testCLT++"), "/usr/lib/libc++.1.dylib"
assert_equal "Hello World!", shell_output("./testCLT++").chomp
system "#{bin}/clang", "-v", "test.c", "-o", "testCLT"
assert_equal "Hello World!", shell_output("./testCLT").chomp
end
# Testing Xcode
if OS.mac? && MacOS::Xcode.installed?
system "#{bin}/clang++", "-v",
"-isysroot", MacOS::Xcode.sdk_path,
"-isystem", "#{MacOS::Xcode.toolchain_path}/usr/include/c++/v1",
"-isystem", "#{MacOS::Xcode.toolchain_path}/usr/include",
"-isystem", "#{MacOS::Xcode.sdk_path}/usr/include",
"-std=c++11", "test.cpp", "-o", "testXC++"
assert_includes MachO::Tools.dylibs("testXC++"), "/usr/lib/libc++.1.dylib"
assert_equal "Hello World!", shell_output("./testXC++").chomp
system "#{bin}/clang", "-v",
"-isysroot", MacOS.sdk_path,
"test.c", "-o", "testXC"
assert_equal "Hello World!", shell_output("./testXC").chomp
end
# link against installed libc++
# related to https://github.com/Homebrew/legacy-homebrew/issues/47149
if OS.mac?
system "#{bin}/clang++", "-v",
"-isystem", "#{opt_include}/c++/v1",
"-std=c++11", "-stdlib=libc++", "test.cpp", "-o", "testlibc++",
"-L#{opt_lib}", "-Wl,-rpath,#{opt_lib}"
assert_includes MachO::Tools.dylibs("testlibc++"), "#{opt_lib}/libc++.1.dylib"
assert_equal "Hello World!", shell_output("./testlibc++").chomp
(testpath/"scanbuildtest.cpp").write <<~EOS
#include <iostream>
int main() {
int *i = new int;
*i = 1;
delete i;
std::cout << *i << std::endl;
return 0;
}
EOS
assert_includes shell_output("#{bin}/scan-build clang++ scanbuildtest.cpp 2>&1"),
"warning: Use of memory after it is freed"
(testpath/"clangformattest.c").write <<~EOS
int main() {
printf("Hello world!"); }
EOS
assert_equal "int main() { printf(\"Hello world!\"); }\n",
shell_output("#{bin}/clang-format -style=google clangformattest.c")
end
end
end
__END__
diff --git a/llvm/include/llvm/BinaryFormat/Dwarf.h b/llvm/include/llvm/BinaryFormat/Dwarf.h
index 2ad201831..8bb9c6bed 100644
--- a/llvm/include/llvm/BinaryFormat/Dwarf.h
+++ b/llvm/include/llvm/BinaryFormat/Dwarf.h
@@ -232,6 +232,8 @@ inline bool isCPlusPlus(SourceLanguage S) {
case DW_LANG_hi_user:
return false;
}
+ if (S >= DW_LANG_lo_user && S <= DW_LANG_hi_user)
+ return false;
llvm_unreachable("Invalid source language");
}
| 36.900243 | 156 | 0.652051 |
6ae76cf0befc3b29c7cf07d8da15cb755368069b | 293 | class Hpuload < Cask
url 'http://www.fernlightning.com/lib/exe/fetch.php?id=software%3Ahpuload%3Astart&cache=cache&media=software:hpuload:hpuload.dmg'
homepage 'http://www.fernlightning.com/doku.php?id=software:hpuload:start'
version 'latest'
sha256 :no_check
link 'HPULoad.app'
end
| 36.625 | 131 | 0.771331 |
7af7f2b3d6617d2ae2f4bf1143de02c7e1f01902 | 30,638 | class Ansible < Formula
include Language::Python::Virtualenv
desc "Automate deployment, configuration, and upgrading"
homepage "https://www.ansible.com/"
url "https://releases.ansible.com/ansible/ansible-2.9.0.tar.gz"
sha256 "9f9a9ace1d63cd8a4692ab6ee6ed04823743f1bd339a6ef188860c02cf7f46f1"
head "https://github.com/ansible/ansible.git", :branch => "devel"
bottle do
cellar :any
sha256 "33bf9433221f6818839e9a2c660c6d46298d195533870276607377a1e896c797" => :catalina
sha256 "92df82bb9a6e9da39c9676d6a7fc5230b9614d007258c3a2cc7c1cc7570a173e" => :mojave
sha256 "e55981b1724d0010b662be7244608a189cb22275dff37a00edaa8d6127c1413f" => :high_sierra
end
depends_on "pkg-config" => :build
depends_on "libyaml"
depends_on "[email protected]"
depends_on "python"
# Collect requirements from:
# ansible
# docker-py
# python-neutronclient (OpenStack)
# shade (OpenStack)
# pywinrm (Windows)
# kerberos (Windows)
# xmltodict (Windows)
# boto (AWS)
# boto3 (AWS)
# botocore (AWS)
# apache-libcloud (Google GCE)
# python-keyczar (Accelerated Mode)
# passlib (htpasswd core module)
# zabbix-api (Zabbix extras module)
# junos-eznc (Juniper device support)
# jxmlease (Juniper device support)
# dnspython (DNS Lookup - dig)
# pysphere (VMware vSphere support)
# python-consul (Consul support)
# requests-credssp (CredSSP support for windows hosts)
# openshift (k8s module support)
### setup_requires dependencies
resource "pbr" do
url "https://files.pythonhosted.org/packages/99/f1/7807d3409c79905a907f1c616d910c921b2a8e73c17b2969930318f44777/pbr-5.4.3.tar.gz"
sha256 "2c8e420cd4ed4cec4e7999ee47409e876af575d4c35a45840d59e8b5f3155ab8"
end
resource "pytz" do
url "https://files.pythonhosted.org/packages/82/c3/534ddba230bd4fbbd3b7a3d35f3341d014cca213f369a9940925e7e5f691/pytz-2019.3.tar.gz"
sha256 "b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"
end
### end
### extras for requests[security]
resource "cryptography" do
url "https://files.pythonhosted.org/packages/be/60/da377e1bed002716fb2d5d1d1cab720f298cb33ecff7bf7adea72788e4e4/cryptography-2.8.tar.gz"
sha256 "3cda1f0ed8747339bbdf71b9f38ca74c7b592f24f65cdb3ab3765e4b02871651"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"
sha256 "c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"
end
resource "pyOpenSSL" do
url "https://files.pythonhosted.org/packages/40/d0/8efd61531f338a89b4efa48fcf1972d870d2b67a7aea9dcf70783c8464dc/pyOpenSSL-19.0.0.tar.gz"
sha256 "aeca66338f6de19d1aa46ed634c3b9ae519a64b458f8468aec688e7e3c20f200"
end
### end
# The rest of this list should always be sorted by:
# pip install homebrew-pypi-poet && poet_lint $(brew formula ansible)
resource "Babel" do
url "https://files.pythonhosted.org/packages/bd/78/9fb975cbb3f4b136de2cd4b5e5ce4a3341169ebf4c6c03630996d05428f1/Babel-2.7.0.tar.gz"
sha256 "e86135ae101e31e2c8ec20a4e0c5220f4eed12487d5cf3f78be7e98d3a57fc28"
end
resource "Jinja2" do
url "https://files.pythonhosted.org/packages/7b/db/1d037ccd626d05a7a47a1b81ea73775614af83c2b3e53d86a0bb41d8d799/Jinja2-2.10.3.tar.gz"
sha256 "9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de"
end
resource "MarkupSafe" do
url "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz"
sha256 "29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"
end
resource "PrettyTable" do
url "https://files.pythonhosted.org/packages/ef/30/4b0746848746ed5941f052479e7c23d2b56d174b82f4fd34a25e389831f5/prettytable-0.7.2.tar.bz2"
sha256 "853c116513625c738dc3ce1aee148b5b5757a86727e67eff6502c7ca59d43c36"
end
resource "PyNaCl" do
url "https://files.pythonhosted.org/packages/61/ab/2ac6dea8489fa713e2b4c6c5b549cc962dd4a842b5998d9e80cf8440b7cd/PyNaCl-1.3.0.tar.gz"
sha256 "0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c"
end
resource "PyYAML" do
url "https://files.pythonhosted.org/packages/e3/e8/b3212641ee2718d556df0f23f78de8303f068fe29cdaa7a91018849582fe/PyYAML-5.1.2.tar.gz"
sha256 "01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4"
end
resource "apache-libcloud" do
url "https://files.pythonhosted.org/packages/08/c7/951755bbfecf5eab30448062ba706e71a1f2e8b51413916c2dcea8f2ad41/apache-libcloud-2.6.0.tar.gz"
sha256 "201751f738109f25d58dcdfb5804e17216e0dc8f68b522e9e26ac16e0b9ff2ea"
end
resource "appdirs" do
url "https://files.pythonhosted.org/packages/48/69/d87c60746b393309ca30761f8e2b49473d43450b150cb08f3c6df5c11be5/appdirs-1.4.3.tar.gz"
sha256 "9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92"
end
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/c1/a9/86bfedaf41ca590747b4c9075bc470d0b2ec44fb5db5d378bc61447b3b6b/asn1crypto-1.2.0.tar.gz"
sha256 "87620880a477123e01177a1f73d0f327210b43a3cdbd714efcd2fa49a8d7b384"
end
resource "backports.ssl_match_hostname" do
url "https://files.pythonhosted.org/packages/ff/2b/8265224812912bc5b7a607c44bf7b027554e1b9775e9ee0de8032e3de4b2/backports.ssl_match_hostname-3.7.0.1.tar.gz"
sha256 "bb82e60f9fbf4c080eabd957c39f0641f0fc247d9a16e31e26d594d8f42b9fd2"
end
resource "bcrypt" do
url "https://files.pythonhosted.org/packages/fa/aa/025a3ab62469b5167bc397837c9ffc486c42a97ef12ceaa6699d8f5a5416/bcrypt-3.1.7.tar.gz"
sha256 "0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42"
end
resource "boto" do
url "https://files.pythonhosted.org/packages/c8/af/54a920ff4255664f5d238b5aebd8eedf7a07c7a5e71e27afcfe840b82f51/boto-2.49.0.tar.gz"
sha256 "ea0d3b40a2d852767be77ca343b58a9e3a4b00d9db440efb8da74b4e58025e5a"
end
resource "boto3" do
url "https://files.pythonhosted.org/packages/ec/d8/023e616dd474dbeb8c1f09315081c49de9340d96923a0f6d5f7e64a79033/boto3-1.10.8.tar.gz"
sha256 "593f67dcf6417b8c39641236761604cf47134bc27c25ef4d7c3d22da824e2c05"
end
resource "botocore" do
url "https://files.pythonhosted.org/packages/b8/c8/ce8469577f289f640684d664d2297d143295e930ec82225ef72dc45c1625/botocore-1.13.8.tar.gz"
sha256 "5cc7e0bfa41eb2789674485fdda62ce44ff5e3fee8accb4e0ef317ebdf1a319a"
end
resource "cachetools" do
url "https://files.pythonhosted.org/packages/ae/37/7fd45996b19200e0cb2027a0b6bef4636951c4ea111bfad36c71287247f6/cachetools-3.1.1.tar.gz"
sha256 "8ea2d3ce97850f31e4a08b0e2b5e6c34997d7216a9d2c98e0f3978630d4da69a"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/62/85/7585750fd65599e88df0fed59c74f5075d4ea2fe611deceb95dd1c2fb25b/certifi-2019.9.11.tar.gz"
sha256 "e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/2d/bf/960e5a422db3ac1a5e612cb35ca436c3fc985ed4b7ed13a1b4879006f450/cffi-1.13.2.tar.gz"
sha256 "599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "cliff" do
url "https://files.pythonhosted.org/packages/3d/78/40a180430ff54afabe613f16d8f2658a24b1675c45dd806620511219af89/cliff-2.16.0.tar.gz"
sha256 "622e777b8ac2eb479708fe53893c37b2fd5469ce2c6c5b794a658246f05c6b81"
end
resource "cmd2" do
url "https://files.pythonhosted.org/packages/9c/2b/263241eb120357090fcb2acc5245cf528207a73f3c41eb75655f0862f5cd/cmd2-0.9.19.tar.gz"
sha256 "c81284083d993af18b8fef57d89d854d49d051d4c2c8a8e12d0281e369ac3682"
end
resource "contextlib2" do
url "https://files.pythonhosted.org/packages/02/54/669207eb72e3d8ae8b38aa1f0703ee87a0e9f88f30d3c0a47bebdb6de242/contextlib2-0.6.0.post1.tar.gz"
sha256 "01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e"
end
resource "debtcollector" do
url "https://files.pythonhosted.org/packages/c5/2a/686898a291a9f4d40aaf121c0e686498032dda78367d7d3e56079b05abaf/debtcollector-1.22.0.tar.gz"
sha256 "d1756440d25a50e3eca2fc399c9e5f1ca2f9e6b837570a80b9450999f4290525"
end
resource "decorator" do
url "https://files.pythonhosted.org/packages/dc/c3/9d378af09f5737cfd524b844cd2fbb0d2263a35c11d712043daab290144d/decorator-4.4.1.tar.gz"
sha256 "54c38050039232e1db4ad7375cfce6748d7b41c29e95a081c8a6d2c30364a2ce"
end
resource "deprecation" do
url "https://files.pythonhosted.org/packages/cd/94/8d9d6303f5ddcbf40959fc2b287479bd9a201ea9483373d9b0882ae7c3ad/deprecation-2.0.7.tar.gz"
sha256 "c0392f676a6146f0238db5744d73e786a43510d54033f80994ef2f4c9df192ed"
end
resource "dictdiffer" do
url "https://files.pythonhosted.org/packages/ba/ed/cee2a41eefad60860a8b64513d2be7b15cbc5a4e3ecaa4c9921b11732629/dictdiffer-0.8.0.tar.gz"
sha256 "b3ad476fc9cca60302b52c50e1839342d2092aeaba586d69cbf9249f87f52463"
end
resource "dnspython" do
url "https://files.pythonhosted.org/packages/ec/c5/14bcd63cb6d06092a004793399ec395405edf97c2301dfdc146dfbd5beed/dnspython-1.16.0.zip"
sha256 "36c5e8e38d4369a08b6780b7f27d790a292b2b08eea01607865bf0936c558e01"
end
resource "docker-py" do
url "https://files.pythonhosted.org/packages/fa/2d/906afc44a833901fc6fed1a89c228e5c88fbfc6bd2f3d2f0497fdfb9c525/docker-py-1.10.6.tar.gz"
sha256 "4c2a75875764d38d67f87bc7d03f7443a3895704efc57962bdf6500b8d4bc415"
end
resource "docker-pycreds" do
url "https://files.pythonhosted.org/packages/c5/e6/d1f6c00b7221e2d7c4b470132c931325c8b22c51ca62417e300f5ce16009/docker-pycreds-0.4.0.tar.gz"
sha256 "6ce3270bcaf404cc4c3e27e4b6c70d3521deae82fb508767870fdbf772d584d4"
end
resource "docutils" do
url "https://files.pythonhosted.org/packages/93/22/953e071b589b0b1fee420ab06a0d15e5aa0c7470eb9966d60393ce58ad61/docutils-0.15.2.tar.gz"
sha256 "a2aeea129088da402665e92e0b25b04b073c04b2dce4ab65caaa38b7ce2e1a99"
end
resource "dogpile.cache" do
url "https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz"
sha256 "b348835825c9dcd251d9aad1f89f257277ac198a3e35a61980ab4cb28c75216b"
end
resource "funcsigs" do
url "https://files.pythonhosted.org/packages/94/4a/db842e7a0545de1cdb0439bb80e6e42dfe82aaeaadd4072f2263a4fbed23/funcsigs-1.0.2.tar.gz"
sha256 "a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
end
resource "google-auth" do
url "https://files.pythonhosted.org/packages/ef/77/eb1d3288dbe2ba6f4fe50b9bb41770bac514cd2eb91466b56d44a99e2f8d/google-auth-1.6.3.tar.gz"
sha256 "0f7c6a64927d34c1a474da92cfc59e552a5d3b940d3266606c6a28b72888b9e4"
end
resource "ipaddress" do
url "https://files.pythonhosted.org/packages/b9/9a/3e9da40ea28b8210dd6504d3fe9fe7e013b62bf45902b458d1cdc3c34ed9/ipaddress-1.0.23.tar.gz"
sha256 "b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2"
end
resource "iso8601" do
url "https://files.pythonhosted.org/packages/45/13/3db24895497345fb44c4248c08b16da34a9eb02643cea2754b21b5ed08b0/iso8601-0.1.12.tar.gz"
sha256 "49c4b20e1f38aa5cf109ddcd39647ac419f928512c869dc01d5c7098eddede82"
end
resource "jmespath" do
url "https://files.pythonhosted.org/packages/2c/30/f0162d3d83e398c7a3b70c91eef61d409dea205fb4dc2b47d335f429de32/jmespath-0.9.4.tar.gz"
sha256 "bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"
end
resource "jsonpatch" do
url "https://files.pythonhosted.org/packages/30/ac/9b6478a560627e4310130a9e35c31a9f4d650704bbd03946e77c73abcf6c/jsonpatch-1.24.tar.gz"
sha256 "cbb72f8bf35260628aea6b508a107245f757d1ec839a19c34349985e2c05645a"
end
resource "jsonpointer" do
url "https://files.pythonhosted.org/packages/52/e7/246d9ef2366d430f0ce7bdc494ea2df8b49d7a2a41ba51f5655f68cfe85f/jsonpointer-2.0.tar.gz"
sha256 "c192ba86648e05fdae4f08a17ec25180a9aef5008d973407b581798a83975362"
end
resource "junos-eznc" do
url "https://files.pythonhosted.org/packages/d5/73/81ecb7b98e86f6d41d9b4deba72a76f5600c63b425bb4308532b38e728bc/junos-eznc-2.3.0.tar.gz"
sha256 "c0f853cdad12256ae8c33a80ff6c31a3ce867c481f805b085d554fbb5b5b084f"
end
resource "jxmlease" do
url "https://files.pythonhosted.org/packages/80/b3/a1ffc5ea763c84780a9acfaa4f69a98f6c974eaf297e20d9d3648ef7d95b/jxmlease-1.0.1.tar.gz"
sha256 "fb04cfd54d8d7e4cc533108750047e9ccf43139c3c0220f8a082274b19564e98"
end
resource "kerberos" do
url "https://files.pythonhosted.org/packages/34/18/9c86fdfdb27e0f7437b7d5a9e22975dcc382637b2a68baac07843be512fc/kerberos-1.3.0.tar.gz"
sha256 "f039b7dd4746df56f6102097b3dc250fe0078be75130b9dc4211a85a3b1ec6a4"
end
resource "keystoneauth1" do
url "https://files.pythonhosted.org/packages/9e/36/afc5ff283a790784941c7f180398193b84694a1f5172c396cc69dc42c29e/keystoneauth1-3.18.0.tar.gz"
sha256 "3ae67c6542ed66a37cddcd26a35457c1ff5cd14b20f5490973273cf9eb555a52"
end
resource "kubernetes" do
url "https://files.pythonhosted.org/packages/db/4e/af5af9e1cf3d6c9d001f0fcf1a0efc29a02c078da97a5fc9d7b0d17e631e/kubernetes-10.0.1.tar.gz"
sha256 "3770a496663396ad1def665eeadb947b3f45217a08b64b10c01a57e981ac8592"
end
resource "lxml" do
url "https://files.pythonhosted.org/packages/c4/43/3f1e7d742e2a7925be180b6af5e0f67d38de2f37560365ac1a0b9a04c015/lxml-4.4.1.tar.gz"
sha256 "c81cb40bff373ab7a7446d6bbca0190bccc5be3448b47b51d729e37799bb5692"
end
resource "monotonic" do
url "https://files.pythonhosted.org/packages/19/c1/27f722aaaaf98786a1b338b78cf60960d9fe4849825b071f4e300da29589/monotonic-1.5.tar.gz"
sha256 "23953d55076df038541e648a53676fb24980f7a1be290cdda21300b3bc21dfb0"
end
resource "msgpack" do
url "https://files.pythonhosted.org/packages/74/0a/de673c1c987f5779b65ef69052331ec0b0ebd22958bda77a8284be831964/msgpack-0.6.2.tar.gz"
sha256 "ea3c2f859346fcd55fc46e96885301d9c2f7a36d453f5d8f2967840efa1e1830"
end
resource "munch" do
url "https://files.pythonhosted.org/packages/43/a1/ec48010724eedfe2add68eb7592a0d238590e14e08b95a4ffb3c7b2f0808/munch-2.5.0.tar.gz"
sha256 "2d735f6f24d4dba3417fa448cae40c6e896ec1fdab6cdb5e6510999758a4dbd2"
end
resource "ncclient" do
url "https://files.pythonhosted.org/packages/36/b1/1f909193588dfd35726e8ae35e01463386bbcf670b90ed61c4930ce447db/ncclient-0.6.6.tar.gz"
sha256 "2b367354d1cd25b79b8798a0b4c1949590d890057f2a252e6e970a9ab744e009"
end
resource "netaddr" do
url "https://files.pythonhosted.org/packages/0c/13/7cbb180b52201c07c796243eeff4c256b053656da5cfe3916c3f5b57b3a0/netaddr-0.7.19.tar.gz"
sha256 "38aeec7cdd035081d3a4c306394b19d677623bf76fa0913f6695127c7753aefd"
end
resource "netifaces" do
url "https://files.pythonhosted.org/packages/0d/18/fd6e9c71a35b67a73160ec80a49da63d1eed2d2055054cc2995714949132/netifaces-0.10.9.tar.gz"
sha256 "2dee9ffdd16292878336a58d04a20f0ffe95555465fee7c9bd23b3490ef2abf3"
end
resource "ntlm-auth" do
url "https://files.pythonhosted.org/packages/12/8c/b7b286360a0876c1a4d0a1651eb3c57f471661e126f5c5f097fbad735f40/ntlm-auth-1.4.0.tar.gz"
sha256 "350f2389c8ee5517f47db55a36ac2f8efc9742a60a678d6e2caa92385bdcaa9a"
end
resource "oauthlib" do
url "https://files.pythonhosted.org/packages/fc/c7/829c73c64d3749da7811c06319458e47f3461944da9d98bb4df1cb1598c2/oauthlib-3.1.0.tar.gz"
sha256 "bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889"
end
resource "openshift" do
url "https://files.pythonhosted.org/packages/fd/df/ac9cfd29499fdb5c73e28590af4dca5ad3e03f8fd8e84612d9ff2fc40e26/openshift-0.10.0.tar.gz"
sha256 "8af7e1e1fd85af79e363b1a8a323f091764b3d9f2e9640bf32cbd11e52d4324f"
end
resource "openstacksdk" do
url "https://files.pythonhosted.org/packages/4b/ed/a0dbdad01b826d1502488e5dad5ea2575afeda5d1644307c1a2d21e65191/openstacksdk-0.37.0.tar.gz"
sha256 "0428b210ff187d911ef67cb8d2deea1ec440aee374aa09f74df3fd9c55c8da60"
end
resource "os-client-config" do
url "https://files.pythonhosted.org/packages/8d/1d/1a194b95aca009da5e94a3368e70bc1b0353bb0dcc54323a19fb0a2410ac/os-client-config-1.33.0.tar.gz"
sha256 "1237837355179e149ba0c6bfce169f243f634b6c6d6000984c90e4ffa45c2baf"
end
resource "os-service-types" do
url "https://files.pythonhosted.org/packages/58/3f/09e93eb484b69d2a0d31361962fb667591a850630c8ce47bb177324910ec/os-service-types-1.7.0.tar.gz"
sha256 "31800299a82239363995b91f1ebf9106ac7758542a1e4ef6dc737a5932878c6c"
end
resource "osc-lib" do
url "https://files.pythonhosted.org/packages/ab/ab/2d534f2d4821a012e9f09d8d2790bb3551a78e0868a37a2be17fea7702c1/osc-lib-1.14.1.tar.gz"
sha256 "3467a1edf62946f1b67724fa7f9c699b5e31d80b111ed9e4c7aec21633a3e30d"
end
resource "oslo.config" do
url "https://files.pythonhosted.org/packages/3e/d2/b2c35afb40a00d6e34254a48f4318b6c069882d8e648753c8dbe84648ae9/oslo.config-6.11.1.tar.gz"
sha256 "e99964a95e40b443b08e21f15fad5170bcc3a213f11ae5d187ce9046344bea91"
end
resource "oslo.context" do
url "https://files.pythonhosted.org/packages/ca/52/2cecd0122a1c47e1bf0a4c3bc8e89eb746e650fa5c33d8bd923b96e094c6/oslo.context-2.23.0.tar.gz"
sha256 "b346f000a93aa5893b616c837322dccf14ca0b16db5d705c3ede9cc123bc133a"
end
resource "oslo.i18n" do
url "https://files.pythonhosted.org/packages/05/92/af5abe2f9e4315f89408c5632241e6776eeec793d3d7f084d411668682d7/oslo.i18n-3.24.0.tar.gz"
sha256 "2fe3d0ace3c1d6769675b94d446818799d3022886deea3b219beb7325c86f4fe"
end
resource "oslo.log" do
url "https://files.pythonhosted.org/packages/c6/45/2d6a13d70349f9b092daaf502f6680cfb8f592fc16fbecb0803abd130b75/oslo.log-3.44.1.tar.gz"
sha256 "5e11e18b1ba20b8f660711a14f1e49ebedd904fb88c29b3ba7f64d5441b91fe2"
end
resource "oslo.serialization" do
url "https://files.pythonhosted.org/packages/0d/7b/3405bd8b9c7d8246ed60a31fa2c7b41f4891aff67d0db424833043de9723/oslo.serialization-2.29.2.tar.gz"
sha256 "f1571f477be946fbb5ee80a939db51a9fb8f1da7902a7dfe0c8b3b2657e8469e"
end
resource "oslo.utils" do
url "https://files.pythonhosted.org/packages/50/7f/ef68b0a2cc136969a34577e61a595dd12a8bce222121c6e3aa3938eddd26/oslo.utils-3.41.2.tar.gz"
sha256 "5fc7724f020643d3da6e2ef8c071f85f004746c13619f71531c4501c1b332886"
end
resource "packaging" do
url "https://files.pythonhosted.org/packages/5a/2f/449ded84226d0e2fda8da9252e5ee7731bdf14cd338f622dfcd9934e0377/packaging-19.2.tar.gz"
sha256 "28b924174df7a2fa32c1953825ff29c61e2f5e082343165438812f00d3a7fc47"
end
resource "paramiko" do
url "https://files.pythonhosted.org/packages/54/68/dde7919279d4ecdd1607a7eb425a2874ccd49a73a5a71f8aa4f0102d3eb8/paramiko-2.6.0.tar.gz"
sha256 "f4b2edfa0d226b70bd4ca31ea7e389325990283da23465d572ed1f70a7583041"
end
resource "passlib" do
url "https://files.pythonhosted.org/packages/25/4b/6fbfc66aabb3017cd8c3bd97b37f769d7503ead2899bf76e570eb91270de/passlib-1.7.1.tar.gz"
sha256 "3d948f64138c25633613f303bcc471126eae67c04d5e3f6b7b8ce6242f8653e0"
end
resource "pyasn1" do
url "https://files.pythonhosted.org/packages/ca/f8/2a60a2c88a97558bdd289b6dc9eb75b00bd90ff34155d681ba6dbbcb46b2/pyasn1-0.4.7.tar.gz"
sha256 "a9495356ca1d66ed197a0f72b41eb1823cf7ea8b5bd07191673e8147aecf8604"
end
resource "pyasn1-modules" do
url "https://files.pythonhosted.org/packages/75/93/c51104ea6a74252957c341ccd110b65efecc18edfd386b666637d67d4d10/pyasn1-modules-0.2.7.tar.gz"
sha256 "0c35a52e00b672f832e5846826f1fb7507907f7d52fba6faa9e3c4cbe874fe4b"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz"
sha256 "a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
end
resource "pycrypto" do
url "https://files.pythonhosted.org/packages/60/db/645aa9af249f059cc3a368b118de33889219e0362141e75d4eaf6f80f163/pycrypto-2.6.1.tar.gz"
sha256 "f2ce1e989b272cfcb677616763e0a2e7ec659effa67a88aa92b3a65528f60a3c"
end
resource "pyparsing" do
url "https://files.pythonhosted.org/packages/7e/24/eaa8d7003aee23eda270099eeec754d7bf4399f75c6a011ef948304f66a2/pyparsing-2.4.2.tar.gz"
sha256 "6f98a7b9397e206d78cc01df10131398f1c8b8510a2f4d97d9abd82e1aacdd80"
end
resource "pyperclip" do
url "https://files.pythonhosted.org/packages/2d/0f/4eda562dffd085945d57c2d9a5da745cfb5228c02bc90f2c74bbac746243/pyperclip-1.7.0.tar.gz"
sha256 "979325468ccf682104d5dcaf753f869868100631301d3e72f47babdea5700d1c"
end
resource "pyserial" do
url "https://files.pythonhosted.org/packages/cc/74/11b04703ec416717b247d789103277269d567db575d2fd88f25d9767fe3d/pyserial-3.4.tar.gz"
sha256 "6e2d401fdee0eab996cf734e67773a0143b932772ca8b42451440cfed942c627"
end
resource "pysphere" do
url "https://files.pythonhosted.org/packages/a3/53/582ad19aae059b777f1105e6c7f6fa96f2ab6e7f018d94497fbe1518548d/pysphere-0.1.7.zip"
sha256 "cef3cb3a6836f1cf092caf4613123d084f36b0e96fa48a27708c0e868df8a1ea"
end
resource "python-consul" do
url "https://files.pythonhosted.org/packages/7f/06/c12ff73cb1059c453603ba5378521e079c3f0ab0f0660c410627daca64b7/python-consul-1.1.0.tar.gz"
sha256 "168f1fa53948047effe4f14d53fc1dab50192e2a2cf7855703f126f469ea11f4"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz"
sha256 "73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"
end
resource "python-keyczar" do
url "https://files.pythonhosted.org/packages/c8/14/3ffb68671fef927fa5b60f21c43a04a4a007acbe939a26ba08b197fea6b3/python-keyczar-0.716.tar.gz"
sha256 "f9b614112dc8248af3d03b989da4aeca70e747d32fe7e6fce9512945365e3f83"
end
resource "python-keystoneclient" do
url "https://files.pythonhosted.org/packages/f8/f6/c54a3e0ce02dac89f23b35ef73f17f803dda02051030f95b2cfa77a9b134/python-keystoneclient-3.22.0.tar.gz"
sha256 "6e2b6d2a5ae5d7aa26d4e52d1c682e08417d2c5d73ccc54cb65c54903a868cb4"
end
resource "python-neutronclient" do
url "https://files.pythonhosted.org/packages/c0/47/ec107ea24d54a0ca3ad8f4e04b933f0da95ed6c4719d41fcc282c91a66b0/python-neutronclient-6.14.0.tar.gz"
sha256 "d4a09f1fe74236563f84b958d9088b932be9a1978c6dd0e49134a1ad79b7e68e"
end
resource "python-string-utils" do
url "https://files.pythonhosted.org/packages/5d/13/216f2d4a71307f5a4e5782f1f59e6e8e5d6d6c00eaadf9f92aeccfbb900c/python-string-utils-0.6.0.tar.gz"
sha256 "05d24a8d884b629b534af992dc1f35dc4de4c73678ffdffa0efcbe667058af1f"
end
resource "pywinrm" do
url "https://files.pythonhosted.org/packages/fc/88/be0ea1af44c3bcc54e4c41e4056986743551693c77dfe50b48a3f4ba1bf7/pywinrm-0.4.1.tar.gz"
sha256 "4ede5c6c85b53780ad0dbf9abef2fa2ea58f44c82256a84a63eae5f1205cea81"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz"
sha256 "11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4"
end
resource "requests-credssp" do
url "https://files.pythonhosted.org/packages/24/a6/a8a1a9238497c00013b6b5f343781034f3d56faa1d3b4b18558bdc6426a6/requests-credssp-1.1.0.tar.gz"
sha256 "02244edb2fb7e747eb645294b14eaaf6cdd7540b86d858d78ac72daf4c5c866a"
end
resource "requests-oauthlib" do
url "https://files.pythonhosted.org/packages/de/a2/f55312dfe2f7a344d0d4044fdfae12ac8a24169dc668bd55f72b27090c32/requests-oauthlib-1.2.0.tar.gz"
sha256 "bd6533330e8748e94bf0b214775fed487d309b8b8fe823dc45641ebcd9a32f57"
end
resource "requests_ntlm" do
url "https://files.pythonhosted.org/packages/3e/02/6b31dfc8334caeea446a2ac3aea5b8e197710e0b8ad3c3035f7c79e792a8/requests_ntlm-1.1.0.tar.gz"
sha256 "9189c92e8c61ae91402a64b972c4802b2457ce6a799d658256ebf084d5c7eb71"
end
resource "requestsexceptions" do
url "https://files.pythonhosted.org/packages/82/ed/61b9652d3256503c99b0b8f145d9c8aa24c514caff6efc229989505937c1/requestsexceptions-1.4.0.tar.gz"
sha256 "b095cbc77618f066d459a02b137b020c37da9f46d9b057704019c9f77dba3065"
end
resource "rfc3986" do
url "https://files.pythonhosted.org/packages/34/c9/bcba83f13f628e947e23a0e54e18d0a6f13e5d03ca4ec04def0105c81bfc/rfc3986-1.3.2.tar.gz"
sha256 "0344d0bd428126ce554e7ca2b61787b6a28d2bbd19fc70ed2dd85efe31176405"
end
resource "rsa" do
url "https://files.pythonhosted.org/packages/cb/d0/8f99b91432a60ca4b1cd478fd0bdf28c1901c58e3a9f14f4ba3dba86b57f/rsa-4.0.tar.gz"
sha256 "1a836406405730121ae9823e19c6e806c62bbad73f890574fff50efa4122c487"
end
resource "ruamel.yaml" do
url "https://files.pythonhosted.org/packages/de/76/cf97d739365eff258e2af0457a150bf2818f3eaa460328610eafeed0894a/ruamel.yaml-0.16.5.tar.gz"
sha256 "412a6f5cfdc0525dee6a27c08f5415c7fd832a7afcb7a0ed7319628aed23d408"
end
resource "ruamel.yaml.clib" do
url "https://files.pythonhosted.org/packages/92/28/612085de3fae9f82d62d80255d9f4cf05b1b341db1e180adcf28c1bf748d/ruamel.yaml.clib-0.2.0.tar.gz"
sha256 "b66832ea8077d9b3f6e311c4a53d06273db5dc2db6e8a908550f3c14d67e718c"
end
resource "s3transfer" do
url "https://files.pythonhosted.org/packages/39/12/150cd55c606ebca6725683642a8e7068cd6af10f837ce5419a9f16b7fb55/s3transfer-0.2.1.tar.gz"
sha256 "6efc926738a3cd576c2a79725fed9afde92378aa5c6a957e3af010cb019fac9d"
end
resource "scp" do
url "https://files.pythonhosted.org/packages/05/e0/ac4169e773e12a08d941ca3c006cb8c91bee9d6d80328a15af850b5e7480/scp-0.13.2.tar.gz"
sha256 "ef9d6e67c0331485d3db146bf9ee9baff8a48f3eb0e6c08276a8584b13bf34b3"
end
resource "shade" do
url "https://files.pythonhosted.org/packages/50/c2/33fc3e4f18cddb09df33138981632f88bcab9988c43659bc3a4ef08734f7/shade-1.32.0.tar.gz"
sha256 "514326efb926cea5b77efe06a69bcdc0bdb996c1bc95bd2349638cc21b772430"
end
resource "simplejson" do
url "https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz"
sha256 "b1f329139ba647a9548aa05fb95d046b4a677643070dc2afc05fa2e975d09ca5"
end
resource "six" do
url "https://files.pythonhosted.org/packages/dd/bf/4138e7bfb757de47d1f4b6994648ec67a51efe58fa907c1e11e350cddfca/six-1.12.0.tar.gz"
sha256 "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
end
resource "stevedore" do
url "https://files.pythonhosted.org/packages/05/79/516aa1c427da04f818242ec147a2f417dd0e8f4abbc9302ed07459fb152d/stevedore-1.31.0.tar.gz"
sha256 "e0739f9739a681c7a1fda76a102b65295e96a144ccdb552f2ae03c5f0abe8a14"
end
resource "subprocess32" do
url "https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz"
sha256 "eb2937c80497978d181efa1b839ec2d9622cf9600a039a79d0e108d1f9aec79d"
end
resource "unicodecsv" do
url "https://files.pythonhosted.org/packages/6f/a4/691ab63b17505a26096608cc309960b5a6bdf39e4ba1a793d5f9b1a53270/unicodecsv-0.14.1.tar.gz"
sha256 "018c08037d48649a0412063ff4eda26eaa81eff1546dbffa51fa5293276ff7fc"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/ff/44/29655168da441dff66de03952880c6e2d17b252836ff1aa4421fba556424/urllib3-1.25.6.tar.gz"
sha256 "9a107b99a5393caf59c7aa3c1249c16e6879447533d0887f4336dde834c7be86"
end
resource "wcwidth" do
url "https://files.pythonhosted.org/packages/55/11/e4a2bb08bb450fdbd42cc709dd40de4ed2c472cf0ccb9e64af22279c5495/wcwidth-0.1.7.tar.gz"
sha256 "3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e"
end
resource "websocket_client" do
url "https://files.pythonhosted.org/packages/c5/01/8c9c7de6c46f88e70b5a3276c791a2be82ae83d8e0d0cc030525ee2866fd/websocket_client-0.56.0.tar.gz"
sha256 "1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a"
end
resource "wrapt" do
url "https://files.pythonhosted.org/packages/23/84/323c2415280bc4fc880ac5050dddfb3c8062c2552b34c2e512eb4aa68f79/wrapt-1.11.2.tar.gz"
sha256 "565a021fd19419476b9362b05eeaa094178de64f8361e44468f9e9d7843901e1"
end
resource "xmltodict" do
url "https://files.pythonhosted.org/packages/58/40/0d783e14112e064127063fbf5d1fe1351723e5dfe9d6daad346a305f6c49/xmltodict-0.12.0.tar.gz"
sha256 "50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"
end
resource "zabbix-api" do
url "https://files.pythonhosted.org/packages/e3/ed/2092731880f0de5b07067fc446dc0fc5166f2ee98018b6d524cd3e28a69d/zabbix-api-0.5.4.tar.gz"
sha256 "2d6c62001cb79a7de6fe286424967276edaca09d3833b72fb04f7863f29fce4b"
end
def install
ENV.prepend_path "PATH", Formula["python"].opt_libexec/"bin"
# Fix "ld: file not found: /usr/lib/system/libsystem_darwin.dylib" for lxml
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version == :sierra
# Work around Xcode 11 clang bug
# https://code.videolan.org/videolan/libbluray/issues/20
ENV.append_to_cflags "-fno-stack-check" if DevelopmentTools.clang_build_version >= 1010
# https://github.com/Homebrew/homebrew-core/issues/7197
ENV.prepend "CPPFLAGS", "-I#{MacOS.sdk_path}/usr/include/ffi"
virtualenv_install_with_resources
# prettytable 0.7.2 has file permissions 600 for some files.
# We need to add read permissions in order to be able to use it as a
# different user than the one installing it.
# See: https://github.com/Homebrew/homebrew-core/issues/6975
# Also: https://github.com/Homebrew/brew/pull/1709
Pathname.glob(libexec/"lib/python*/site-packages/prettytable-0.7.2-py*.egg-info").each do |prettytable_path|
chmod_R("a+r", prettytable_path)
end
man1.install Dir["docs/man/man1/*.1"]
end
test do
ENV["ANSIBLE_REMOTE_TEMP"] = testpath/"tmp"
(testpath/"playbook.yml").write <<~EOS
---
- hosts: all
gather_facts: False
tasks:
- name: ping
ping:
EOS
(testpath/"hosts.ini").write "localhost ansible_connection=local\n"
system bin/"ansible-playbook", testpath/"playbook.yml", "-i", testpath/"hosts.ini"
# Ensure requests[security] is activated
script = "import requests as r; r.get('https://mozilla-modern.badssl.com')"
system libexec/"bin/python3", "-c", script
end
end
| 48.172956 | 160 | 0.81709 |
7a41d582bca1685984f8a0c1d770f4085ae9e390 | 1,472 | module Discretion
class << self
def can_see_record?(viewer, record)
return true unless record.is_a?(Discretion::DiscreetModel)
return true if Discretion.currently_acting_as?(Discretion::OMNISCIENT_VIEWER) ||
Discretion.currently_acting_as?(Discretion::OMNIPOTENT_VIEWER)
record.send(:can_see?, viewer)
end
def current_viewer_can_see_record?(record)
can_see_record?(Discretion.current_viewer, record)
end
def can_write_record?(viewer, record, changes, new_record)
return true unless record.is_a?(Discretion::DiscreetModel)
return true if Discretion.currently_acting_as?(Discretion::OMNIPOTENT_VIEWER)
record.respond_to?(:can_write?, true) ?
record.send(:can_write?, viewer, changes, new_record) :
can_see_record?(viewer, record)
end
def current_viewer_can_write_record?(record, changes, new_record)
can_write_record?(Discretion.current_viewer, record, changes, new_record)
end
def can_destroy_record?(viewer, record)
return true unless record.is_a?(Discretion::DiscreetModel)
return true if Discretion.currently_acting_as?(Discretion::OMNIPOTENT_VIEWER)
record.respond_to?(:can_destroy?, true) ?
record.send(:can_destroy?, viewer) : can_write_record?(viewer, record, {}, false)
end
def current_viewer_can_destroy_record?(record)
can_destroy_record?(Discretion.current_viewer, record)
end
end
end
| 35.902439 | 89 | 0.726223 |
d58e27a066d729297f5a32f95e4a68bba3dec2c6 | 321 | class CreateUsers < ActiveRecord::Migration[5.1]
def change
create_table :users do |t|
t.string :name
t.string :password_hash
t.string :token
t.datetime :token_expiration_date
t.string :refresh_token
t.datetime :refresh_token_expiration_date
t.timestamps
end
end
end
| 22.928571 | 48 | 0.682243 |
035ad3f4d8426cd789ec482453c8569622ced19b | 95 | # frozen_string_literal: true
module Facter
VERSION = '4.0.39' unless defined?(VERSION)
end
| 15.833333 | 45 | 0.747368 |
62b2faf505be7730c62c3c5c16dd6d5b2cfd7aae | 585 | module RoutingHelper
def registration_url
request.subdomain.present? ? new_users_registrations_path : new_users_registrations_url(subdomain: 'it')
end
def session_url
if request.subdomain.present?
new_passwordless_sessions_path('user')
else
new_passwordless_sessions_url('user',
subdomain: 'it')
end
end
def home_page_cv_url(cv_obj)
if request.subdomain.present?
cv_section_path(cv_obj.user_subdomain)
else
cv_section_url(cv_obj.user_subdomain, subdomain: cv_obj.locale)
end
end
end
| 25.434783 | 108 | 0.700855 |
1a6c3d14fdab90ab11759a9a382c81d1dde7b08e | 2,548 | class RecipeIngredientsUnitsController < ApplicationController
before_action :set_recipe_ingredients_unit, only: [:show, :edit, :update, :destroy]
# GET /recipe_ingredients_units
# GET /recipe_ingredients_units.json
def index
@recipe_ingredients_units = RecipeIngredientsUnit.all
end
# GET /recipe_ingredients_units/1
# GET /recipe_ingredients_units/1.json
def show
end
# GET /recipe_ingredients_units/new
def new
@recipe_ingredients_unit = RecipeIngredientsUnit.new
end
# GET /recipe_ingredients_units/1/edit
def edit
end
# POST /recipe_ingredients_units
# POST /recipe_ingredients_units.json
def create
@recipe_ingredients_unit = RecipeIngredientsUnit.new(recipe_ingredients_unit_params)
respond_to do |format|
if @recipe_ingredients_unit.save
format.html { redirect_to recipe_ingredients_units_path, notice: 'Recipe ingredients unit was successfully created.' }
format.json { render :show, status: :created, location: @recipe_ingredients_unit }
else
format.html { render :new }
format.json { render json: @recipe_ingredients_unit.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /recipe_ingredients_units/1
# PATCH/PUT /recipe_ingredients_units/1.json
def update
respond_to do |format|
if @recipe_ingredients_unit.update(recipe_ingredients_unit_params)
format.html { redirect_to recipe_ingredients_units_path, notice: 'Recipe ingredients unit was successfully updated.' }
format.json { render :show, status: :ok, location: @recipe_ingredients_unit }
else
format.html { render :edit }
format.json { render json: @recipe_ingredients_unit.errors, status: :unprocessable_entity }
end
end
end
# DELETE /recipe_ingredients_units/1
# DELETE /recipe_ingredients_units/1.json
def destroy
@recipe_ingredients_unit.destroy
respond_to do |format|
format.html { redirect_to recipe_ingredients_units_url, notice: 'Recipe ingredients unit was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_recipe_ingredients_unit
@recipe_ingredients_unit = RecipeIngredientsUnit.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def recipe_ingredients_unit_params
params.require(:recipe_ingredients_unit).permit(:ingredient_unit_name)
end
end
| 33.973333 | 126 | 0.746075 |
ff3bbde90912bfcaa85a6320e39df1c201c5e1d5 | 880 | # frozen_string_literal: true
require "utils/repology"
describe Repology do
describe "formula_data" do
it "returns nil for invalid Homebrew Formula" do
expect(described_class.formula_data("invalidName")).to be_nil
end
end
describe "single_package_query" do
it "returns nil for non-existent package" do
response = described_class.single_package_query("invalidName")
expect(response).to be_nil
end
it "returns a hash for existing package" do
response = described_class.single_package_query("openclonk")
expect(response).not_to be_nil
expect(response).to be_a(Hash)
end
end
describe "parse_api_response" do
limit = 1
response = described_class.parse_api_response(limit)
it "returns a hash of data" do
expect(response).not_to be_nil
expect(response).to be_a(Hash)
end
end
end
| 23.783784 | 68 | 0.714773 |
084af8e486e6484153ce042d6b9cbd4aebaa156a | 603 | def sort_itself(array)
result = []
for i in 1...array.size
current = array[i]
j = i - 1
while j >= 0 && current < array[j]
array[j + 1] = array[j]
j -= 1
end
array[j + 1] = current
# print
result << array.clone
end
result
end
describe 'sort_itself' do
let(:array) { [1, 4, 3, 6, 9, 2] }
it 'returns and array of arrays => as snapshots of the insertion progress' do
expect(sort_itself array).to match [
[1, 4, 3, 6, 9, 2],
[1, 3, 4, 6, 9, 2],
[1, 3, 4, 6, 9, 2],
[1, 3, 4, 6, 9, 2],
[1, 2, 3, 4, 6, 9]
]
end
end | 21.535714 | 79 | 0.500829 |
08d83da9f6a606006d1b37fb194cd5c95dccded3 | 1,075 | class Exempi < Formula
desc "Library to parse XMP metadata"
homepage "https://wiki.freedesktop.org/libopenraw/Exempi/"
url "https://libopenraw.freedesktop.org/download/exempi-2.5.2.tar.bz2"
sha256 "52f54314aefd45945d47a6ecf4bd21f362e6467fa5d0538b0d45a06bc6eaaed5"
livecheck do
url "https://libopenraw.freedesktop.org/exempi/"
regex(/href=.*?exempi[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
cellar :any
sha256 "3ef58fd5cbd177ac785cfab9b58f813ce24320a507243d9d7b6c940fd463564f" => :catalina
sha256 "189bb3c57e78845c33678353cb877ad7cdedd665087c0a4525397f32088abc39" => :mojave
sha256 "0843f9bc589fd3c9ed0f5dfd724ba60eea4832410a0b6ff831bdb22c6563eafd" => :high_sierra
sha256 "178e85b9647be2f5a484c03075c98982a0a3695fab4486f0d1f08750cd406c8c" => :x86_64_linux
end
depends_on "boost"
uses_from_macos "expat"
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-boost=#{HOMEBREW_PREFIX}"
system "make", "install"
end
end
| 34.677419 | 94 | 0.71907 |
e9737a4e0cadf48e16e7a936220998c52f24c2d6 | 248 | class CreateEvidences < ActiveRecord::Migration[5.2]
def change
create_table :evidences, id: :uuid do |t|
t.text :description
t.belongs_to :defect, foreign_key: true, index: true, type: :uuid
t.timestamps
end
end
end
| 22.545455 | 71 | 0.673387 |
874b31c3c15bd1af5eeebb98d44972d6db9be6bf | 2,881 | require 'coveralls'
Coveralls.wear!
# Configure Rails Environment
ENV['RAILS_ENV'] = 'test'
require File.expand_path('../dummy/config/environment.rb', __FILE__)
require 'rspec/rails'
require 'database_cleaner'
require 'factory_girl'
FactoryGirl.find_definitions
require 'ffaker'
require 'shoulda-matchers'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[File.join(File.dirname(__FILE__), 'support/**/*.rb')].each { |f| require f }
require 'spree/testing_support/authorization_helpers'
require 'spree/testing_support/capybara_ext'
require 'spree/testing_support/controller_requests'
require 'spree/testing_support/factories'
require 'spree/testing_support/preferences'
require 'spree/testing_support/url_helpers'
require 'spree_drop_ship/factories'
RSpec.configure do |config|
config.include FactoryGirl::Syntax::Methods
config.include IntegrationHelpers
config.include Spree::TestingSupport::ControllerRequests
config.include Spree::TestingSupport::Preferences
config.include Spree::TestingSupport::UrlHelpers
# == URL Helpers
#
# Allows access to Spree's routes in specs:
#
# visit spree.admin_path
# current_path.should eql(spree.products_path)
config.include Spree::TestingSupport::UrlHelpers
config.include Spree::TestingSupport::ControllerRequests
# Capybara javascript drivers require transactional fixtures set to false, and we use DatabaseCleaner
# to cleanup after each test instead. Without transactional fixtures set to false the records created
# to setup a test will be unavailable to the browser, which runs under a seperate server instance.
config.use_transactional_fixtures = false
# Ensure Suite is set to use transactions for speed.
config.before :suite do
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.clean_with :truncation
end
# Before each spec check if it is a Javascript test and switch between using database transactions or not where necessary.
config.before :each do
DatabaseCleaner.strategy = RSpec.current_example.metadata[:js] ? :truncation : :transaction
DatabaseCleaner.start
reset_spree_preferences
end
# After each spec clean the database.
config.after :each do
DatabaseCleaner.clean
end
# If true, the base class of anonymous controllers will be inferred
# automatically. This will be the default behavior in future versions of
# rspec-rails.
config.infer_base_class_for_anonymous_controllers = false
config.infer_spec_type_from_file_location!
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
# config.order = "random"
config.color = true
config.infer_spec_type_from_file_location!
end
| 35.134146 | 124 | 0.782714 |
188d7c6c834801bf9bdce6f60608fb406f5a2621 | 615 | module Rsync
class Change
def file_type
t = case raw_file_type
when 'f'
:file
when 'd'
:directory
when 'L'
:symlink
when 'D'
:device
when 'S'
:special
end
# custom
return t unless t == :directory
return t if filename.to_s[-1] == '/'
return :file if update_type == :message && message == 'deleting'
t
end
end
class Result
def status_code
@exitcode
end
def status
{:success => success?, :code => @exitcode, :message => error }
end
end
end
| 18.088235 | 70 | 0.500813 |
bb8380bfda01db214dd986e7fb5dcaed0eae23eb | 1,938 | module Importing
class CsvUserParser
class << self
REQUIRED_USER_HEADERS = %w(
kw_id first_name last_name email phone
)
def parse(file:)
parsed_data = []
invalid_lines = {}
return Failure.new(:invalid, message: I18n.t('.empty_file')) if file.size.zero?
CSV.open(file.path, col_sep: ';', headers: true).each_with_index do |row, index|
next if row.empty?
if index == 0
result = check_headers(row)
return result if result.failure?
end
parsed_object = Importing::User.new(
first_name: row['first_name'],
last_name: row['last_name'],
email: row['email'],
phone: row['phone'],
kw_id: row['kw_id'].to_i,
password: Devise.friendly_token.first(4)
)
if parsed_object.invalid?
invalid_lines[index + 1] = parsed_object.errors.messages
else
parsed_data << parsed_object
end
end
return failure_with_lines(invalid_lines) if invalid_lines.any?
Success.new(parsed_data: parsed_data)
end
private
def failure_with_lines(invalid_lines)
errors = []
invalid_lines.each do |key, value|
errors << "Line #{key}: #{value.keys.join(', ')}"
end
Failure.new(:invalid, message: "Invalid fields: #{errors.join('; ')}")
end
def check_headers(row)
missing_headers = REQUIRED_USER_HEADERS - row.headers
if missing_headers.any?
return Failure.new(:invalid, message: "missing columns: #{missing_headers.join(', ')}")
end
extra_headers = row.headers - REQUIRED_USER_HEADERS
if extra_headers.any?
return Failure.new(:invalid, message: "extra columns: #{extra_headers.join(', ')}")
end
Success.new
end
end
end
end
| 27.295775 | 97 | 0.576367 |
ff2b6fbb8eca368e24f449d10e42cdbf05857cb3 | 3,586 | # frozen_string_literal: true
require 'spec_helper'
describe MetricsDashboard do
include MetricsDashboardHelpers
describe 'GET #metrics_dashboard' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { project_with_dashboard('.gitlab/dashboards/test.yml') }
let_it_be(:environment) { create(:environment, project: project) }
before do
sign_in(user)
project.add_maintainer(user)
end
controller(::ApplicationController) do
include MetricsDashboard # rubocop:disable RSpec/DescribedClass
end
let(:json_response) do
routes.draw { get "metrics_dashboard" => "anonymous#metrics_dashboard" }
response = get :metrics_dashboard, format: :json
JSON.parse(response.parsed_body)
end
context 'when no parameters are provided' do
it 'returns an error json_response' do
expect(json_response['status']).to eq('error')
end
end
context 'when params are provided' do
let(:params) { { environment: environment } }
before do
allow(controller).to receive(:project).and_return(project)
allow(controller)
.to receive(:metrics_dashboard_params)
.and_return(params)
end
it 'returns the specified dashboard' do
expect(json_response['dashboard']['dashboard']).to eq('Environment metrics')
expect(json_response).not_to have_key('all_dashboards')
end
context 'when the params are in an alternate format' do
let(:params) { ActionController::Parameters.new({ environment: environment }).permit! }
it 'returns the specified dashboard' do
expect(json_response['dashboard']['dashboard']).to eq('Environment metrics')
expect(json_response).not_to have_key('all_dashboards')
end
end
context 'when parameters are provided and the list of all dashboards is required' do
before do
allow(controller).to receive(:include_all_dashboards?).and_return(true)
end
it 'returns a dashboard in addition to the list of dashboards' do
expect(json_response['dashboard']['dashboard']).to eq('Environment metrics')
expect(json_response).to have_key('all_dashboards')
end
context 'in all_dashboard list' do
let(:system_dashboard) { json_response['all_dashboards'].find { |dashboard| dashboard["system_dashboard"] == true } }
let(:project_dashboard) { json_response['all_dashboards'].find { |dashboard| dashboard["system_dashboard"] == false } }
it 'includes project_blob_path only for project dashboards' do
expect(system_dashboard['project_blob_path']).to be_nil
expect(project_dashboard['project_blob_path']).to eq("/#{project.namespace.path}/#{project.name}/blob/master/.gitlab/dashboards/test.yml")
end
describe 'project permissions' do
using RSpec::Parameterized::TableSyntax
where(:can_collaborate, :system_can_edit, :project_can_edit) do
false | false | false
true | false | true
end
with_them do
before do
allow(controller).to receive(:can_collaborate_with_project?).and_return(can_collaborate)
end
it "sets can_edit appropriately" do
expect(system_dashboard["can_edit"]).to eq(system_can_edit)
expect(project_dashboard["can_edit"]).to eq(project_can_edit)
end
end
end
end
end
end
end
end
| 35.156863 | 150 | 0.653653 |
4a00f36d57138f128a9937a81a98862fe6e9d220 | 1,118 | # Copyright (c) 2008-2013 Michael Dvorkin and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
require 'spec_helper'
describe "/campaigns/index" do
include CampaignsHelper
before do
login_and_assign
end
it "should render [campaign] template with @campaigns collection if there are campaigns" do
assign(:campaigns, [FactoryGirl.create(:campaign, id: 42)].paginate)
render template: 'campaigns/index', formats: [:js]
expect(rendered).to include("$('#campaigns').html('<li class=\\'campaign highlight\\' id=\\'campaign_42\\'")
expect(rendered).to include("#paginate")
end
it "should render [empty] template if @campaigns collection if there are no campaigns" do
assign(:campaigns, [].paginate)
render template: 'campaigns/index', formats: [:js]
expect(rendered).to include("$('#campaigns').html('<div id=\\'empty\\'>")
expect(rendered).to include("#paginate")
end
end
| 33.878788 | 112 | 0.660107 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.