hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
911ecc2884d41d23a55a4ce47b00e1a9848ce55e | 1,121 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v9/errors/string_format_error.proto
require 'google/api/annotations_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v9/errors/string_format_error.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v9.errors.StringFormatErrorEnum" do
end
add_enum "google.ads.googleads.v9.errors.StringFormatErrorEnum.StringFormatError" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :ILLEGAL_CHARS, 2
value :INVALID_FORMAT, 3
end
end
end
module Google
module Ads
module GoogleAds
module V9
module Errors
StringFormatErrorEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v9.errors.StringFormatErrorEnum").msgclass
StringFormatErrorEnum::StringFormatError = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v9.errors.StringFormatErrorEnum.StringFormatError").enummodule
end
end
end
end
end
| 35.03125 | 194 | 0.752007 |
333cb9fd1e04c4d1e51512ddbe95d20320c81f50 | 893 | class Api::V0::TeamClaimsController < Api::V0::BaseController
before_action :require_team_claim, only: [:update]
def index
@team_claims = TeamClaim.where(accepted_at: nil, rejected_at: nil).order(:created_at).includes(:team, :person => :github_account)
end
def update
raise "already accepted/rejected" if @team_claim.accepted_at || @team_claim.rejected_at
if params[:accepted]
@team_claim.update_attributes!(accepted_at: Time.now)
@team_claim.team.claim_team(@team_claim.person)
elsif params[:rejected]
@team_claim.update_attributes(rejected_at: Time.now, rejected_notes: params[:rejected_notes])
@team_claim.person.send_email(:claim_team_rejected, team: @team_claim.team, team_claim: @team_claim)
end
render json: true
end
protected
def require_team_claim
@team_claim = TeamClaim.where(id: params[:id]).first!
end
end
| 29.766667 | 133 | 0.737962 |
ab45b7ebccfb296b755392a1234e154c16016b10 | 121 | # open custom monit web port
firewall_rule "http" do
port 2812
action :allow
notifies :enable, "firewall[ufw]"
end
| 17.285714 | 35 | 0.727273 |
79650e00deda4d1568003072c15202f7f2b89889 | 303 | # frozen_string_literal: true
module Milestoner
# Gem identity information.
module Identity
NAME = "milestoner"
LABEL = "Milestoner"
VERSION = "12.1.0"
VERSION_LABEL = "#{LABEL} #{VERSION}".freeze
SUMMARY = "A command line interface for crafting Git repository tags."
end
end
| 23.307692 | 74 | 0.693069 |
f85ad48dda2394920d43d1f1588652331f439c9e | 157 | # frozen_string_literal: true
require 'faker'
FactoryBot.define do
factory :answer do
association :question
value { Faker::Lorem.word }
end
end
| 15.7 | 31 | 0.726115 |
4a3c6afe4aa5bb093761b9ae4074f71674efa30b | 668 | class ApplicationController < ActionController::Base
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
before_action :configure_permitted_parameters, if: :devise_controller?
protected
def configure_permitted_parameters
devise_parameter_sanitizer.for(:sign_up) << :name
devise_parameter_sanitizer.for(:account_update) << :name
end
rescue_from ActiveRecord::RecordNotFound do
flash[:warning] = 'Resource not found.'
redirect_back_or root_path
end
def redirect_back_or(path)
redirect_to request.referer || path
end
end
| 27.833333 | 72 | 0.755988 |
d56533f41da4b2e7a656bf6d39259c25753ff11c | 1,701 | module ActiveRecordSeek
module Scopes
class SeekScope < BaseScope
attr_reader(*%w[ seek_query active_record_query components components_hash ])
def query=(new_query)
@seek_query = new_query.to_seek_query
@active_record_query = @seek_query.to_active_record_query
@seek_query
end
def components_hash=(new_components_hash)
@components = new_components_hash.stringify_keys.map do |key, value|
Component.new(base_query: self, key: key, value: value)
end
end
def apply
components_for_base_query = components.select(&:is_base_query_component?)
components_by_association = components.reject(&:is_base_query_component?).group_by(&:association)
self.query = seek_query.apply(components_for_base_query)
self.query = active_record_query.seek_or(self) do |this|
components_by_association.each do |association, association_components|
add_query do
AssociationScope.new(
base_query: to_seek_query,
association: association,
components: association_components,
).apply
end
end
end
seek_query
end
module ActiveRecordScopeConcern
extend ActiveSupport::Concern
class_methods do
def seek(components_hash = {}, &block)
raise(ArgumentError, "#{self.class}#seek does not accept a block") if block
SeekScope.new(
query: all,
components_hash: components_hash,
).apply.to_active_record_query
end
end
end
end
end
end
| 30.927273 | 105 | 0.631393 |
b90d6a00e3a0cac73938ee1574722418f1d42b9f | 839 | class SessionsController < ApplicationController
skip_before_action :require_login
def new
end
def create
if auth
@user = User.find_or_create_by(uid: auth['uid']) do |u|
u.name = auth['info']['name']
u.email = auth['info']['email']
end
session[:user_id] = @user.id
redirect_to home_path
else
@user = User.find_by(email: params[:session][:email])
if @user && @user.authenticate(params[:session][:password])
session[:user_id] = @user.id
redirect_to home_path
else
flash[:message] = "There was an error, please try again."
render :new
end
end
end
def destroy
if session[:user_id]
session.delete :user_id
end
redirect_to root_path
end
private
def auth
request.env['omniauth.auth']
end
end | 21.512821 | 65 | 0.617402 |
ab18d77877cc973de6a6682fce880acca9235210 | 2,779 | require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
# require "action_mailer/railtie"
require "active_resource/railtie"
# require "sprockets/railtie"
# require "rails/test_unit/railtie"
Bundler.require
require "esendex"
module Dummy
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
# config.active_record.whitelist_attributes = true
# Enable the asset pipeline
# config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
# config.assets.version = '1.0'
end
end
| 41.477612 | 100 | 0.737316 |
28447e861fcbf5169abf46782c59e367173d62cd | 2,942 | # frozen_string_literal: true
module GraphQL8
class Query
class SerialExecution
module ValueResolution
def self.resolve(parent_type, field_defn, field_type, value, selection, query_ctx)
if value.nil? || value.is_a?(GraphQL8::ExecutionError)
if field_type.kind.non_null?
if value.nil?
type_error = GraphQL8::InvalidNullError.new(parent_type, field_defn, value)
query_ctx.schema.type_error(type_error, query_ctx)
end
raise GraphQL8::Query::Executor::PropagateNull
else
nil
end
else
case field_type.kind
when GraphQL8::TypeKinds::SCALAR, GraphQL8::TypeKinds::ENUM
field_type.coerce_result(value, query_ctx)
when GraphQL8::TypeKinds::LIST
wrapped_type = field_type.of_type
result = []
i = 0
value.each do |inner_value|
inner_ctx = query_ctx.spawn_child(
key: i,
object: inner_value,
irep_node: selection,
)
result << resolve(
parent_type,
field_defn,
wrapped_type,
inner_value,
selection,
inner_ctx,
)
i += 1
end
result
when GraphQL8::TypeKinds::NON_NULL
wrapped_type = field_type.of_type
resolve(
parent_type,
field_defn,
wrapped_type,
value,
selection,
query_ctx,
)
when GraphQL8::TypeKinds::OBJECT
query_ctx.execution_strategy.selection_resolution.resolve(
value,
field_type,
selection,
query_ctx
)
when GraphQL8::TypeKinds::UNION, GraphQL8::TypeKinds::INTERFACE
query = query_ctx.query
resolved_type = query.resolve_type(value)
possible_types = query.possible_types(field_type)
if !possible_types.include?(resolved_type)
type_error = GraphQL8::UnresolvedTypeError.new(value, field_defn, parent_type, resolved_type, possible_types)
query.schema.type_error(type_error, query_ctx)
raise GraphQL8::Query::Executor::PropagateNull
else
resolve(
parent_type,
field_defn,
resolved_type,
value,
selection,
query_ctx,
)
end
else
raise("Unknown type kind: #{field_type.kind}")
end
end
end
end
end
end
end
| 33.431818 | 125 | 0.495921 |
1d53212ca24548b520eb658de6ca2c67b6d57367 | 1,935 | class Povray < Formula
desc "Persistence Of Vision RAYtracer (POVRAY)"
homepage "https://www.povray.org/"
url "https://github.com/POV-Ray/povray/archive/v3.7.0.8.tar.gz"
sha256 "53d11ebd2972fc452af168a00eb83aefb61387662c10784e81b63e44aa575de4"
revision 1
bottle do
rebuild 1
sha256 "eae4cf975215cdfdeadb665c53061c6ed2b4f9fa95121e7145222409b0e44c56" => :mojave
sha256 "4472bb00380eb26d3045dd5e67effa4f75934936263129009f9a80bbf5290633" => :high_sierra
sha256 "f21cb29c30c8367aa14f6a4485bf03377f23e30b2e7178be466d12bb84be26a9" => :sierra
sha256 "f2f0bf20fbe2d5b1ce91ecdf4eca52e4a544323910febae396d8b9fb1c0044ec" => :el_capitan
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "boost"
depends_on "jpeg"
depends_on "libpng"
depends_on "libtiff"
depends_on "openexr"
def install
ENV.cxx11
args = %W[
COMPILED_BY=homebrew
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
--mandir=#{man}
--with-boost=#{Formula["boost"].opt_prefix}
--with-openexr=#{Formula["openexr"].opt_prefix}
--without-libsdl
--without-x
]
# Adjust some scripts to search for `etc` in HOMEBREW_PREFIX.
%w[allanim allscene portfolio].each do |script|
inreplace "unix/scripts/#{script}.sh",
/^DEFAULT_DIR=.*$/, "DEFAULT_DIR=#{HOMEBREW_PREFIX}"
end
cd "unix" do
system "./prebuild.sh"
end
system "./configure", *args
system "make", "install"
end
test do
# Condensed version of `share/povray-3.7/scripts/allscene.sh` that only
# renders variants of the famous Utah teapot as a quick smoke test.
scenes = Dir["#{share}/povray-3.7/scenes/advanced/teapot/*.pov"]
assert !scenes.empty?, "Failed to find test scenes."
scenes.each do |scene|
system "#{share}/povray-3.7/scripts/render_scene.sh", ".", scene
end
end
end
| 30.234375 | 93 | 0.69199 |
bbfa00f7faa8b8d6b8e054b71c57c7a4a1c5079b | 14,170 | require 'active_model'
require 'crud_methods'
require 'zoho_crm_utils'
class RubyZoho::Crm
class << self
attr_accessor :module_name
end
@module_name = 'Crm'
include CrudMethods
include ZohoCrmUtils
def initialize(object_attribute_hash = {})
@fields = object_attribute_hash == {} ? RubyZoho.configuration.api.fields(self.class.module_name) :
object_attribute_hash.keys
create_accessor(self.class, @fields)
create_accessor(self.class, [:module_name])
public_send(:module_name=, self.class.module_name)
update_or_create_attrs(object_attribute_hash)
self
end
def self.method_missing(meth, *args, &block)
if meth.to_s =~ /^find_by_(.+)$/
run_find_by_method($1, *args, &block)
else
super
end
end
#
# Save multiple objects using single request
# @param objects [Array] List of object to save
#
# @return [Hash] Zoho return hash
def self.multi_save(objects)
request_url = RubyZoho.configuration.api.create_url(self.module_name, 'insertRecords')
request_document = REXML::Document.new
module_element = request_document.add_element self.module_name
groupped_by_url = {}
objects.each_with_index do |object, row_id|
fields_values_hash = {}
object.fields.each { |f| fields_values_hash.merge!({ f => object.send(f) }) }
fields_values_hash.delete_if { |k, v| v.nil? }
row = module_element.add_element 'row', { 'no' => row_id+1 }
fields_values_hash.each_pair { |k, v| RubyZoho.configuration.api.add_field(row, ApiUtils.symbol_to_string(k), v) }
end
request_result = RubyZoho.configuration.api.class.post(request_url, {
:query => {
:newFormat => 1,
:authtoken => RubyZoho.configuration.api_key,
:scope => 'crmapi', :xmlData => request_document
},
:headers => { 'Content-length' => '0'}
})
RubyZoho.configuration.api.check_for_errors(request_result)
x_r = REXML::Document.new(request_result.body).elements.to_a('//recorddetail')
return RubyZoho.configuration.api.to_hash(x_r, module_name)[0]
end
#
# batch insert objects using single request
# @param objects [Array] List of object to save
#
# @return [Hash] status
def self.batch_insert(objects, wfTrigger=false, verbose=false)
request_url = RubyZoho.configuration.api.create_url(self.module_name, 'insertRecords')
request_document = REXML::Document.new
module_element = request_document.add_element self.module_name
objects.each_with_index do |object, index|
fields_values_hash = {}
object.fields.each { |f| fields_values_hash.merge!({ f => object.send(f) }) }
fields_values_hash.delete_if { |k, v| v.nil? }
row = module_element.add_element('row', { 'no' => index+1 })
fields_values_hash.each_pair { |k, v| RubyZoho.configuration.api.add_field(row, ApiUtils.symbol_to_string(k), v) }
puts "insert_field_values=#{fields_values_hash.to_json}" if verbose
end
puts "request_document=#{request_document}" if verbose
request_result = RubyZoho.configuration.api.class.post(request_url, {
:query => {
:wfTrigger=>wfTrigger,
:duplicateCheck=>'1',
:newFormat=>1,
:version=>4,
:authtoken => RubyZoho.configuration.api_key,
:scope => 'crmapi', :xmlData => request_document
},
:headers => { 'Content-length' => '0'}
})
unless request_result.code == 200
return {success: false, error_code: 'WEB_SERVICE_CALL_FAILED', error_message: "Web service call failed with #{request_result.code}", request_result:request_result}
end
puts "ws_request_result=#{request_result}" if verbose
begin
request_result_by_row = build_batch_request_result(objects, request_result, verbose)
return {success:true, request_result: request_result_by_row}
rescue => e
puts e.inspect
puts e.backtrace.join("\n")
return {success: false, error_code: 'INVALID_REQUEST_RESULT', error_message: "Web service call returned invalid/malformed request result", request_result:request_result}
end
end
#
# batch update objects using single request
# @param objects [Array] List of objects to update. Each object must have :id field value
#
# @return [Hash] status
def self.batch_update(objects, wfTrigger=false, verbose=false)
request_url = RubyZoho.configuration.api.create_url(self.module_name, 'updateRecords')
request_document = REXML::Document.new
module_element = request_document.add_element self.module_name
invalid_objects = []
objects.each_with_index do |object, index|
fields_values_hash = {}
object.fields.each { |f| fields_values_hash.merge!({ f => object.send(f) })}
fields_values_hash.delete_if { |k, v| v.nil? }
puts "update_field_values=#{fields_values_hash.to_json}" if verbose
id = fields_values_hash[:id]
if id
puts "id=#{id}" if verbose
fields_values_hash.delete(:id)
row = module_element.add_element 'row', { 'no' => index+1 }
RubyZoho.configuration.api.add_id_field(row, id)
fields_values_hash.each_pair { |k, v| RubyZoho.configuration.api.add_field(row, ApiUtils.symbol_to_string(k), v) }
else
invalid_objects << {:error_message => 'id not found', :error_object =>object}
end
end
unless invalid_objects.empty?
return {success: false, error_code: 'INVALID_REQUEST_OBJECT', error_message: "Invalid request object(s)", invalid_objects: invalid_objects}
end
puts "request_document=#{request_document}" if verbose
#return {success: true, request_result: {}} # keep this for debug
# :version=>4 is required to execute in batch mode
request_result = RubyZoho.configuration.api.class.post(request_url, {
:query => {
:wfTrigger=>wfTrigger,
:newFormat=>1,
:duplicateCheck=>'1',
:version=>4,
:authtoken => RubyZoho.configuration.api_key,
:scope => 'crmapi', :xmlData => request_document
},
:headers => { 'Content-length' => '0'}
})
unless request_result.code == 200
return {success: false, error_code: 'WEB_SERVICE_CALL_FAILED', error_message: "Web service call failed with #{request_result.code}", request_result:request_result}
end
puts "ws_request_result=#{request_result}" if verbose
begin
request_result_by_row = build_batch_request_result(objects, request_result, verbose)
return {success:true, request_result: request_result_by_row}
rescue => e
puts e.inspect
puts e.backtrace.join("\n")
return {success: false, error_code: 'INVALID_REQUEST_RESULT', error_message: "Web service call returned invalid/malformed request result", request_result:request_result}
end
end
#
# convertLead API wrapper
# @param LeadID #
# @return [Hash] status
def self.convert_lead(lead_id, potential_name, potential_stage, closing_date, verbose=false)
#validate
if lead_id.blank?
return {success: false, error_code: 'INVALID_LEAD_ID', error_message: 'Invalid Lead ID'}
end
if potential_name.blank?
return {success: false, error_code: 'INVALID_POTENTIAL_NAME', error_message: 'Invalid Potential Name'}
end
if potential_stage.blank?
return {success: false, error_code: 'INVALID_POTENTIAL_STAGE', error_message: 'Invalid Potential Stage'}
end
if closing_date.blank?
return {success: false, error_code: 'INVALID_CLOSING_DATE', error_message: 'Invalid Closing Date'}
end
zoho_closing_date = closing_date.strftime("%m/%d/%Y")
request_url = RubyZoho.configuration.api.create_url('Leads', 'convertLead')
request_document = REXML::Document.new
module_element = request_document.add_element 'Potentials'
row1 = module_element.add_element 'row', { 'no' => 1 }
RubyZoho.configuration.api.add_option_field(row1, 'createPotential', true)
RubyZoho.configuration.api.add_option_field(row1, 'notifyLeadOwner', false)
RubyZoho.configuration.api.add_option_field(row1, 'notifyNewEntityOwner', false)
row2 = module_element.add_element 'row', { 'no' => 2 }
RubyZoho.configuration.api.add_option_field(row2, 'Closing Date', zoho_closing_date)
RubyZoho.configuration.api.add_option_field(row2, 'Potential Stage', potential_stage)
RubyZoho.configuration.api.add_option_field(row2, 'Potential Name', potential_name)
puts "request_document=#{request_document}" if verbose
#return {success: true, request_result: {}} # keep this for debug
# :version=>4 is required to execute in batch mode
request_result = RubyZoho.configuration.api.class.post(request_url, {
:query => {
:leadId => lead_id,
:newFormat=>1,
:version=>1,
:authtoken => RubyZoho.configuration.api_key,
:scope => 'crmapi', :xmlData => request_document
},
:headers => { 'Content-length' => '0'}
})
unless request_result.code == 200
return {success: false, error_code: 'WEB_SERVICE_CALL_FAILED', error_message: "Web service call failed with #{request_result.code}", request_result:request_result}
end
puts "ws_request_result=#{request_result}" if verbose
begin
doc = REXML::Document.new(request_result.body)
successTag =doc.elements['//success']
if successTag
account_id = safe_xml_element_text_value(successTag.elements["//Account"])
contact_id = safe_xml_element_text_value(successTag.elements["//Contact"])
potential_id = safe_xml_element_text_value(successTag.elements["//Potential"])
return {success: true, account_id: account_id, contact_id: contact_id, potential_id: potential_id}
end
errorTag = doc.elements['//response//error']
if errorTag
error_code = safe_xml_element_text_value(errorTag.elements["//code"])
error_message = safe_xml_element_text_value(errorTag.elements["//message"])
return {success: false, error_code: error_code, error_message: error_message}
end
return {success: false, error_code: 'INVALID_REQUEST_RESULT', error_message: "Web service call returned invalid/malformed request result", request_result:request_result}
rescue => e
puts e.inspect
puts e.backtrace.join("\n")
return {success: false, error_code: 'INVALID_REQUEST_RESULT', error_message: "Web service call returned invalid/malformed request result", request_result:request_result}
end
end
def self.build_batch_request_result(objects, request_result, verbose=false)
result_by_row = {}
REXML::Document.new(request_result.body).elements.to_a('//response/result/row').each do |row|
row_no = row.attribute('no').value
row_result = row.elements.first #only one element expected either 'success' or 'error'
unless row_result
result_by_row[row_no] = {row: row_no, success: false, message: 'Malformed request result', id: nil}
next
end
code = safe_xml_element_text_value(row_result.elements['code'])
status = row_result.name
if status == 'success'
id = safe_xml_element_text_value(row_result.elements["details/FL[@val='Id']"])
if id
success = true
message = ''
else
success = false
message = 'Request processed successfully, but Id not found in request result'
end
elsif status =='error'
id = nil
success = false
message = safe_xml_element_text_value(row_result.elements['details'])
else
id = nil
success = false
message = 'Unknown request result status'
end
result_by_row[row_no] = {row: row_no, success: success, code: code, message: message, id: id}
end
objects.each_with_index do |object, index|
row_no = index + 1
result = result_by_row[row_no.to_s]
unless result
message = "No request result found for #{row_no}"
puts message if verbose
result = result_by_row[row_no.to_s] = {row: row_no, success: false, code: nil, message: message, id: nil}
end
result[:source_object] = object
end
puts "result_by_row=#{result_by_row}" if verbose
return result_by_row
end
def self.safe_xml_element_text_value(element)
return nil unless element
return element.text
end
def method_missing(meth, *args, &block)
if [:seid=, :semodule=].index(meth)
run_create_accessor(self.class, meth)
self.send(meth, args[0])
else
super
end
end
def self.run_find_by_method(attrs, *args, &block)
attrs = attrs.split('_and_')
conditions = Array.new(args.size, '=')
h = RubyZoho.configuration.api.find_records(
self.module_name, ApiUtils.string_to_symbol(attrs[0]), conditions[0], args[0]
)
return h.collect { |r| new(r) } unless h.nil?
nil
end
def << object
object.semodule = self.module_name
object.seid = self.id
object.fields << :seid
object.fields << :semodule
save_object(object)
end
def primary_key
RubyZoho.configuration.api.primary_key(self.class.module_name)
end
def self.setup_classes
RubyZoho.configuration.crm_modules.each do |module_name|
klass_name = module_name.chop
c = Class.new(self) do
include RubyZoho
include ActiveModel
extend ActiveModel::Naming
attr_reader :fields
@module_name = module_name
end
const_set(klass_name, c)
end
end
c = Class.new(self) do
def initialize(object_attribute_hash = {})
module_name = 'Users'
super
end
def self.delete(id)
raise 'Cannot delete users through API'
end
def save
raise 'Cannot delete users through API'
end
def self.all
result = RubyZoho.configuration.api.users('AllUsers')
result.collect { |r| new(r) }
end
def self.find_by_email(email)
r = []
self.all.index { |u| r << u if u.email == email }
r
end
def self.method_missing(meth, *args, &block)
Crm.module_name = 'Users'
super
end
end
Kernel.const_set 'CRMUser', c
end
| 36.240409 | 175 | 0.684615 |
8708b7a8f1dc214ad62bc55edf68372f302c57cb | 262 | # frozen_string_literal: true
class Slide < ApplicationRecord
# mountes
mount_uploader :image, SlideUploader
# validations
validates :title, presence: true
validates :image, presence: true
# scopes
scope :active, -> {where(is_active: true)}
end
| 18.714286 | 44 | 0.732824 |
d54eb0fec2443f40940e2c690eaabf4b511684a3 | 1,683 | cask 'safari-technology-preview' do
if MacOS.version <= :mojave
version '91,061-10708-20190903-4bf79804-9f40-4b31-97c9-c914330c30b1'
sha256 'ad39ec6525db9c95eaa9559ad446ab24bf179cdb599a8d5b47e01f4b395e1897'
else
version '91,061-10706-20190903-6cbd1abb-4b46-4a3f-9229-5e3f89061692'
sha256 'f9db8462b5dc2433e4af3deb1c354985c135bf8ce4173bda34f0602466fd7187'
end
url "https://secure-appldnld.apple.com/STP/#{version.after_comma}/SafariTechnologyPreview.dmg"
appcast 'https://developer.apple.com/safari/download/'
name 'Safari Technology Preview'
homepage 'https://developer.apple.com/safari/download/'
auto_updates true
depends_on macos: '>= :mojave'
pkg 'Safari Technology Preview.pkg'
uninstall delete: '/Applications/Safari Technology Preview.app'
zap trash: [
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.apple.safaritechnologypreview.sfl*',
'~/Library/Caches/com.apple.SafariTechnologyPreview',
'~/Library/Preferences/com.apple.SafariTechnologyPreview.plist',
'~/Library/SafariTechnologyPreview',
'~/Library/Saved Application State/com.apple.SafariTechnologyPreview.savedState',
'~/Library/SyncedPreferences/com.apple.SafariTechnologyPreview-com.apple.Safari.UserRequests.plist',
'~/Library/SyncedPreferences/com.apple.SafariTechnologyPreview-com.apple.Safari.WebFeedSubscriptions.plist',
'~/Library/SyncedPreferences/com.apple.SafariTechnologyPreview.plist',
'~/Library/WebKit/com.apple.SafariTechnologyPreview',
]
end
| 49.5 | 165 | 0.738562 |
38975c98269a6fb72d303e7e4423f97fd3070af5 | 12,792 | # frozen_string_literal: true
require 'rails_helper'
require File.join(Rails.root, 'spec/shared_contexts/test_enterprise_admin_seed')
RSpec.describe Admin::TenantsController, type: :controller, dbclean: :after_each do
include_context 'setup enterprise admin seed'
let!(:tenant_account) { FactoryBot.create(:account, email: 'admin@market_place.org', enterprise_id: enterprise.id) }
let!(:invalid_account) { FactoryBot.create(:account, enterprise_id: enterprise.id) }
let(:tenant_params) do
{ key: :ma, owner_organization_name: 'MA Marketplace', account_email: tenant_account.email }
end
let(:value_for_age_rated) { 'age_rated' }
let(:value_for_geo_rating_area) { 'zipcode' }
include_context 'setup tenant'
let(:controller_params) do
{'tenants_tenant' => {'owner_organization_name'=>'DC mpl', 'sites_attributes'=>{'0'=>
{'options_attributes'=>
{'1'=>
{'child_options_attributes'=>
{'0'=>{'value'=>'My Health Connector', 'id'=>'id'},
'1'=>{'value'=>'https://openhbx.org', 'id'=>'id'},
'2'=>{'value'=>'1-800-555-1212', 'id'=>'id'}},
'id'=>'id'},
'2'=>
{'child_options_attributes'=>
{'0'=>{'value'=>'https://test', 'id'=>'id'},
'1'=>
{'child_options_attributes'=>
{'0'=>{'value'=>'#007bff', 'id'=>'id'},
'1'=>{'value'=>'#868e96', 'id'=>'id'},
'2'=>{'value'=>'#28a745', 'id'=>'id'},
'3'=>{'value'=>'#dc3545', 'id'=>'id'},
'4'=>{'value'=>'#ffc107', 'id'=>'id'},
'5'=>{'value'=>'#cce5ff', 'id'=>'id'}},
'id'=>'id'}},
'id'=>'id'},
'3'=>{'child_options_attributes'=>{'0'=>{'id'=>'id'}}, 'id'=>'id'}},
'id'=>'id'}}},'id'=>tenant.id}
end
describe "POST #update" do
context 'for success case' do
before do
allow_any_instance_of(Transactions::UpdateTenant).to receive(:call).with(anything).and_return(Dry::Monads::Result::Success.new(tenant))
sign_in tenant_account
post :update, params: controller_params
end
it "returns http redirect" do
expect(response).to have_http_status(:redirect)
end
it 'should redirect to a specific path' do
expect(response).to redirect_to(admin_tenant_path(id: tenant.id, tab_name: tenant.id.to_s+"_profile"))
end
it 'should set a flash notice' do
expect(flash[:notice]).to eq 'Successfully updated marketplace settings'
end
end
context 'for failure case' do
before do
allow_any_instance_of(Transactions::UpdateTenant).to receive(:call).with(anything).and_return(Dry::Monads::Result::Failure.new(tenant))
sign_in tenant_account
post :update, params: controller_params
end
it "returns http redirect" do
expect(response).to have_http_status(:redirect)
end
it 'should redirect to a specific path' do
expect(response).to redirect_to(admin_tenant_path(id: tenant.id, tab_name: tenant.id.to_s+"_profile"))
end
it 'should set a flash error' do
expect(flash[:error]).to eq 'Something went wrong.'
end
end
end
describe "PUT #features_update" do
context 'for success case' do
before do
allow_any_instance_of(Transactions::UpdateTenant).to receive(:call).with(anything).and_return(Dry::Monads::Result::Success.new(tenant))
sign_in tenant_account
put :features_update, params: controller_params.merge!(tenant_id: tenant.id)
end
it "returns http redirect" do
expect(response).to have_http_status(:redirect)
end
it 'should redirect to a specific path' do
expect(response).to redirect_to(admin_tenant_features_show_path(id: tenant.id, tab_name: tenant.id.to_s+"_features"))
end
it 'should set a flash notice' do
expect(flash[:notice]).to eq 'Successfully updated marketplace settings'
end
end
context 'for failure case' do
before do
allow_any_instance_of(Transactions::UpdateTenant).to receive(:call).with(anything).and_return(Dry::Monads::Result::Failure.new(tenant))
sign_in tenant_account
put :features_update, params: controller_params.merge!(tenant_id: tenant.id)
end
it "returns http redirect" do
expect(response).to have_http_status(:redirect)
end
it 'should redirect to a specific path' do
expect(response).to redirect_to(admin_tenant_features_show_path(id: tenant.id, tab_name: tenant.id.to_s+"_features"))
end
it 'should set a flash error' do
expect(flash[:error]).to eq 'Something went wrong.'
end
end
end
describe "GET #plan_index" do
before do
sign_in tenant_account
get :plan_index, params: {tenant_id: tenant.id}
end
it "returns http success" do
expect(response).to have_http_status(:success)
end
it 'should set some instance variables' do
expect(controller.instance_variable_get(:@years)).to include([Enterprises::BenefitYear.first.calendar_year])
expect(controller.instance_variable_get(:@tenant)).to eq(tenant)
expect(controller.instance_variable_get(:@products)).to eq(tenant.products.all)
end
it 'should render template' do
expect(response).to render_template('plan_index')
end
end
describe "POST #upload_plan_data" do
context 'for success case' do
before do
allow_any_instance_of(Transactions::SerffTemplateUpload).to receive(:call).with(anything).and_return(Dry::Monads::Result::Success.new(tenant))
sign_in tenant_account
get :upload_plan_data, params: { tenant_id: tenant.id }
end
it "returns http redirect" do
expect(response).to have_http_status(:redirect)
end
it 'should redirect to a specific path' do
expect(response).to redirect_to(admin_tenant_plan_index_path(tenant.id, tab_name: tenant.id.to_s+"_plans"))
end
it 'should set a flash notice' do
expect(flash[:notice]).to eq 'Successfully uploaded plans'
end
end
context 'for failure case' do
before do
allow_any_instance_of(Transactions::SerffTemplateUpload).to receive(:call).with(anything).and_return(Dry::Monads::Result::Failure.new({errors: ['Failure case']}))
sign_in tenant_account
get :upload_plan_data, params: { tenant_id: tenant.id }
end
it "returns http redirect" do
expect(response).to have_http_status(:redirect)
end
it 'should redirect to a specific path' do
expect(response).to redirect_to(admin_tenant_plan_index_path(tenant.id, tab_name: tenant.id.to_s+"_plans"))
end
it 'should set a flash error' do
expect(flash[:error]).to eq 'Failure case'
end
end
end
describe "GET #plans_destroy" do
context 'for success case' do
before do
allow_any_instance_of(Transactions::PlansDestroy).to receive(:call).with(anything).and_return(Dry::Monads::Result::Success.new(tenant))
sign_in tenant_account
get :plans_destroy, params: { id: tenant.id, tenant_id: tenant.id }
end
it "returns http redirect" do
expect(response).to have_http_status(:redirect)
end
it 'should redirect to a specific path' do
expect(response).to redirect_to(admin_tenant_plan_index_path(tenant.id, tab_name: tenant.id.to_s+"_plans"))
end
it 'should set a flash notice' do
expect(flash[:notice]).to eq 'Successfully destroyed plans'
end
end
context 'for failure case' do
before do
allow_any_instance_of(Transactions::PlansDestroy).to receive(:call).with(anything).and_return(Dry::Monads::Result::Failure.new({errors: ["Unable to find tenant record with id #{tenant.id}"]}))
sign_in tenant_account
get :plans_destroy, params: { id: tenant.id, tenant_id: tenant.id }
end
it "returns http redirect" do
expect(response).to have_http_status(:redirect)
end
it 'should redirect to a specific path' do
expect(response).to redirect_to(admin_tenant_plan_index_path(tenant.id, tab_name: tenant.id.to_s+"_plans"))
end
it 'should set a flash error' do
expect(flash[:error]).to eq "Unable to find tenant record with id #{tenant.id}"
end
end
end
describe 'GET #translations_show' do
let(:translations_show_params) do
{ tab_name: "#{tenant.id.to_s}_translations", tenant_id: tenant.id.to_s }
end
before do
sign_in tenant_account
get :translations_show, params: translations_show_params
end
it "returns http success" do
expect(response).to have_http_status(:success)
end
it 'should set instance variable translation_entity' do
expect(controller.instance_variable_get(:@translation_entity)).to be_a Translation
end
it 'should render template' do
expect(response).to render_template('translations_show')
end
end
describe 'GET #fetch_locales' do
let(:fetch_locales_params) do
{ page: 'site', from_locale: 'en', to_locale: 'es', tenant_id: tenant.id.to_s}
end
before do
sign_in tenant_account
get :fetch_locales, params: fetch_locales_params, xhr: true
end
it "returns http success" do
expect(response).to have_http_status(:success)
end
it 'should set instance variable translation_entity' do
expect(controller.instance_variable_get(:@translation_entity)).to be_a Translation
end
it 'should render template' do
expect(response).to render_template(partial: 'admin/tenants/_source_translations')
end
end
describe 'GET #edit_translation' do
let(:edit_translation_params) do
{ page: 'about_hra', from_locale: 'en', to_locale: 'en', translation_key: 'about_hra.header.title', tenant_id: tenant.id.to_s }
end
before do
sign_in tenant_account
get :edit_translation, params: edit_translation_params, xhr: true
end
it "returns http success" do
expect(response).to have_http_status(:success)
end
it 'should set instance variable translation_entity' do
expect(controller.instance_variable_get(:@translation_entity)).to be_a Translation
end
it 'should render template' do
expect(response).to render_template(partial: 'admin/tenants/_edit_translation')
end
end
describe 'POST #update_translation' do
context 'for success case' do
let(:value) { "<div>About the HRA You've Been ____Offered</div>" }
let(:update_translation_params) do
{ translation: { current_locale: 'en', translation_key: 'about_hra.header.title', value: value },
translation_key: 'about_hra.header.title',
tenant_id: tenant.id.to_s }
end
before do
sign_in tenant_account
post :update_translation, params: update_translation_params, xhr: true
end
it "returns http success" do
expect(response).to have_http_status(:success)
end
it 'should set instance variable translation_entity' do
expect(controller.instance_variable_get(:@translation_entity).editable_translation.value).to eq(value)
end
it 'should set instance variable messages' do
expect(controller.instance_variable_get(:@messages)).to eq({ success: 'Successfully updated translation.' })
end
it 'should render template' do
expect(response).to render_template('update_translation')
end
end
context 'for failure case' do
let(:value) { "<div>About the HRA You've Been ____Offered</div>" }
let(:update_translation_params) do
{ translation: { current_locale: 'en', translation_key: 'about_hra.header.title', value: value },
translation_key: 'about_hra.header.title',
tenant_id: tenant.id.to_s }
end
before do
allow_any_instance_of(Options::Option).to receive(:save).and_return(false)
sign_in tenant_account
post :update_translation, params: update_translation_params, xhr: true
end
it "returns http success" do
expect(response).to have_http_status(:success)
end
it 'should set instance variable translation_entity' do
expect(controller.instance_variable_get(:@translation_entity).editable_translation.value).to eq(value)
end
it 'should set instance variable messages' do
expect(controller.instance_variable_get(:@messages)).to eq(error: 'Something went wrong.')
end
it 'should render template' do
expect(response).to render_template('update_translation')
end
end
end
end
| 34.95082 | 200 | 0.664009 |
f7083985c2bb3af9db4df7e7f42a52aa41e479b2 | 2,632 | module Sanitizer
# Sanitize params recursively
#
# @param [Hash] passed_params
#
def sanitize_params_recursively(passed_param)
if passed_param.is_a?(String)
# if the passed_param is a string, sanitize it directly to remove script tags etc
passed_param = Sanitize.fragment(passed_param.to_s).gsub("`", "`")
elsif passed_param.is_a?(Hash) || passed_param.is_a?(ActionController::Parameters)
# if the passed_param is a hash, sanitize the values.
# we are not sanitizing keys, as not known keys will not be accessed - assumption
passed_param.each do |key, val|
passed_param[key] = sanitize_params_recursively(val)
end
elsif passed_param.is_a?(Array)
# if passed_param is a array, sanitize each element
passed_param.each_with_index do |val, index|
passed_param[index] = sanitize_params_recursively(val)
end
end
passed_param
end
# Hashify params recursively
#
# @param [Hash] passed_params
#
def hashify_params_recursively(passed_param)
if passed_param.is_a?(ActionController::Parameters)
# if the passed_param is a ActionController::Parameters, convert it to a Hash
# and recursively call this method over that Hash
hashified_param = HashWithIndifferentAccess.new(passed_param.to_unsafe_hash)
hashify_params_recursively(hashified_param)
elsif passed_param.is_a?(Hash)
hashified_param = HashWithIndifferentAccess.new
passed_param.each do |key, val|
hashified_param[key] = hashify_params_recursively(val)
end
elsif passed_param.is_a?(Array)
hashified_param = passed_param.deep_dup
hashified_param.each_with_index do |p, i|
hashified_param[i] = hashify_params_recursively(p)
end
else
hashified_param = passed_param
end
hashified_param
end
# Recursively check keys sanity
#
# @param [Hash] passed_params
#
def recursively_check_keys_sanity(passed_params)
if passed_params.is_a?(Hash)
passed_params.each do |params_key, params_value|
sanitized_params_key = Sanitize.fragment(params_key.to_s.dup).gsub('`', '`')
unless sanitized_params_key.eql?(params_key.to_s)
# send notification mail and redirect properly.
end
recursively_check_keys_sanity(params_value) if params_value.is_a?(Hash) || params_value.is_a?(Array)
end
elsif passed_params.is_a?(Array)
passed_params.each do |params_value|
recursively_check_keys_sanity(params_value) if params_value.is_a?(Hash) || params_value.is_a?(Array)
end
end
end
end | 34.631579 | 108 | 0.714286 |
5d1eaea0f7460a41504b783e1500852dde67978f | 169,123 | describe MiqExpression do
describe '#reporting_available_fields' do
let(:vm) { FactoryBot.create(:vm) }
let!(:custom_attribute) { FactoryBot.create(:custom_attribute, :name => 'my_attribute_1', :resource => vm) }
let(:extra_fields) do
%w(start_date
end_date
interval_name
display_range
entity
tag_name
label_name
id
vm_id
vm_name)
end
it 'lists custom attributes in ChargebackVm' do
skip('removing of virtual custom attributes is needed to do first in other specs')
displayed_columms = described_class.reporting_available_fields('ChargebackVm').map(&:second)
expected_columns = (ChargebackVm.attribute_names - extra_fields).map { |x| "ChargebackVm-#{x}" }
CustomAttribute.all.each do |custom_attribute|
expected_columns.push("#{vm.class}-#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}#{custom_attribute.name}")
end
expect(displayed_columms).to match_array(expected_columns)
end
context 'with ChargebackVm' do
context 'with dynamic fields' do
let(:volume_1) { FactoryBot.create(:cloud_volume, :volume_type => 'TYPE1') }
let(:volume_2) { FactoryBot.create(:cloud_volume, :volume_type => 'TYPE2') }
let(:volume_3) { FactoryBot.create(:cloud_volume, :volume_type => 'TYPE3') }
let(:model) { "ChargebackVm" }
let(:volume_1_type_field_cost) { "#{model}-storage_allocated_#{volume_1.volume_type}_cost" }
let(:volume_2_type_field_cost) { "#{model}-storage_allocated_#{volume_2.volume_type}_cost" }
let(:volume_3_type_field_cost) { "#{model}-storage_allocated_#{volume_3.volume_type}_cost" }
before do
volume_1
volume_2
end
it 'returns uncached actual fields also when dynamic fields chas been changed' do
report_fields = described_class.reporting_available_fields(model).map(&:second)
expect(report_fields).to include(volume_1_type_field_cost)
expect(report_fields).to include(volume_2_type_field_cost)
# case: change name
volume_2.update!(:volume_type => 'NEW_TYPE_2')
ChargebackVm.current_volume_types_clear_cache
report_fields = described_class.reporting_available_fields(model).map(&:second)
expect(report_fields).to include(volume_1_type_field_cost)
expect(report_fields).not_to include(volume_2_type_field_cost) # old field
# check existence of new name
ChargebackVm.current_volume_types_clear_cache
report_fields = described_class.reporting_available_fields(model).map(&:second)
volume_2_type_field_cost = "#{model}-storage_allocated_#{volume_2.volume_type}_cost"
expect(report_fields).to include(volume_1_type_field_cost)
expect(report_fields).to include(volume_2_type_field_cost)
# case: add volume_type
volume_3
ChargebackVm.current_volume_types_clear_cache
report_fields = described_class.reporting_available_fields(model).map(&:second)
expect(report_fields).to include(volume_1_type_field_cost)
expect(report_fields).to include(volume_3_type_field_cost)
# case: remove volume_types
volume_2.destroy
volume_3.destroy
ChargebackVm.current_volume_types_clear_cache
report_fields = described_class.reporting_available_fields(model).map(&:second)
expect(report_fields).to include(volume_1_type_field_cost)
expect(report_fields).not_to include(volume_2_type_field_cost)
expect(report_fields).not_to include(volume_3_type_field_cost)
end
end
end
end
describe "#valid?" do
it "returns true for a valid flat expression" do
expression = described_class.new("=" => {"field" => "Vm-name", "value" => "foo"})
expect(expression).to be_valid
end
it "returns false for an invalid flat expression" do
expression = described_class.new("=" => {"field" => "Vm-destroy", "value" => true})
expect(expression).not_to be_valid
end
it "returns true if all the subexressions in an 'AND' expression are valid" do
expression = described_class.new(
"AND" => [
{"=" => {"field" => "Vm-name", "value" => "foo"}},
{"=" => {"field" => "Vm-description", "value" => "bar"}}
]
)
expect(expression).to be_valid
end
it "returns false if one of the subexressions in an 'AND' expression is invalid" do
expression = described_class.new(
"AND" => [
{"=" => {"field" => "Vm-destroy", "value" => true}},
{"=" => {"field" => "Vm-description", "value" => "bar"}}
]
)
expect(expression).not_to be_valid
end
it "returns true if all the subexressions in an 'OR' expression are valid" do
expression = described_class.new(
"OR" => [
{"=" => {"field" => "Vm-name", "value" => "foo"}},
{"=" => {"field" => "Vm-description", "value" => "bar"}}
]
)
expect(expression).to be_valid
end
it "returns false if one of the subexressions in an 'OR' expression is invalid" do
expression = described_class.new(
"OR" => [
{"=" => {"field" => "Vm-destroy", "value" => true}},
{"=" => {"field" => "Vm-description", "value" => "bar"}}
]
)
expect(expression).not_to be_valid
end
it "returns true if the subexression in a 'NOT' expression is valid" do
expression1 = described_class.new("NOT" => {"=" => {"field" => "Vm-name", "value" => "foo"}})
expression2 = described_class.new("!" => {"=" => {"field" => "Vm-name", "value" => "foo"}})
expect([expression1, expression2]).to all(be_valid)
end
it "returns false if the subexression in a 'NOT' expression is invalid" do
expression1 = described_class.new("NOT" => {"=" => {"field" => "Vm-destroy", "value" => true}})
expression2 = described_class.new("!" => {"=" => {"field" => "Vm-destroy", "value" => true}})
expect(expression1).not_to be_valid
expect(expression2).not_to be_valid
end
it "returns true if the subexpressions in a 'FIND'/'checkall' expression are all valid" do
expression = described_class.new(
"FIND" => {
"search" => {"=" => {"field" => "Host.filesystems-name", "value" => "/etc/passwd"}},
"checkall" => {"=" => {"field" => "Host.filesystems-permissions", "value" => "0644"}}
}
)
expect(expression).to be_valid
end
it "returns false if a subexpression in a 'FIND'/'checkall' expression is invalid" do
expression1 = described_class.new(
"FIND" => {
"search" => {"=" => {"field" => "Host.filesystems-destroy", "value" => true}},
"checkall" => {"=" => {"field" => "Host.filesystems-permissions", "value" => "0644"}}
}
)
expression2 = described_class.new(
"FIND" => {
"search" => {"=" => {"field" => "Host.filesystems-name", "value" => "/etc/passwd"}},
"checkall" => {"=" => {"field" => "Host.filesystems-destroy", "value" => true}}
}
)
expect(expression1).not_to be_valid
expect(expression2).not_to be_valid
end
it "returns true if the subexpressions in a 'FIND'/'checkany' expression are all valid" do
expression = described_class.new(
"FIND" => {
"search" => {"=" => {"field" => "Host.filesystems-name", "value" => "/etc/passwd"}},
"checkany" => {"=" => {"field" => "Host.filesystems-permissions", "value" => "0644"}}
}
)
expect(expression).to be_valid
end
it "returns false if a subexpression in a 'FIND'/'checkany' expression is invalid" do
expression1 = described_class.new(
"FIND" => {
"search" => {"=" => {"field" => "Host.filesystems-destroy", "value" => true}},
"checkany" => {"=" => {"field" => "Host.filesystems-permissions", "value" => "0644"}}
}
)
expression2 = described_class.new(
"FIND" => {
"search" => {"=" => {"field" => "Host.filesystems-name", "value" => "/etc/passwd"}},
"checkany" => {"=" => {"field" => "Host.filesystems-destroy", "value" => true}}
}
)
expect(expression1).not_to be_valid
expect(expression2).not_to be_valid
end
it "returns true if the subexpressions in a 'FIND'/'checkcount' expression are all valid" do
expression = described_class.new(
"FIND" => {
"search" => {"IS NOT EMPTY" => {"field" => "Vm.snapshots-name"}},
"checkcount" => {">" => {"field" => "<count>", "value" => 0}}
}
)
expect(expression).to be_valid
end
it "returns false if a subexpression in a 'FIND'/'checkcount' expression is invalid" do
expression = described_class.new(
"FIND" => {
"search" => {"=" => {"field" => "Vm.snapshots-destroy"}},
"checkcount" => {">" => {"field" => "<count>", "value" => 0}}
}
)
expect(expression).not_to be_valid
end
end
describe "#preprocess_for_sql" do
it "convert size value in units to integer for comparasing operators on integer field" do
expession_hash = {"=" => {"field" => "Vm-allocated_disk_storage", "value" => "5.megabytes"}}
expession = MiqExpression.new(expession_hash)
exp, _ = expession.preprocess_for_sql(expession_hash)
expect(exp.values.first["value"]).to eq("5.megabyte".to_i_with_method)
expession_hash = {">" => {"field" => "Vm-allocated_disk_storage", "value" => "5.kilobytes"}}
expession = MiqExpression.new(expession_hash)
exp, _ = expession.preprocess_for_sql(expession_hash)
expect(exp.values.first["value"]).to eq("5.kilobytes".to_i_with_method)
expession_hash = {"<" => {"field" => "Vm-allocated_disk_storage", "value" => "2.terabytes"}}
expession = MiqExpression.new(expession_hash)
exp, _ = expession.preprocess_for_sql(expession_hash)
expect(exp.values.first["value"]).to eq(2.terabytes.to_i_with_method)
end
end
describe "#to_sql" do
it "returns nil if SQL generation for that expression is not supported" do
sql, * = MiqExpression.new("=" => {"field" => "Service-custom_1", "value" => ""}).to_sql
expect(sql).to be_nil
end
it "does not raise error and returns nil if SQL generation for expression is not supported and 'token' key present in expression's Hash" do
sql, * = MiqExpression.new("=" => {"field" => "Service-custom_1", "value" => ""}, :token => 1).to_sql
expect(sql).to be_nil
end
it "generates the SQL for an = expression if SQL generation for expression supported and 'token' key present in expression's Hash" do
sql, * = MiqExpression.new("=" => {"field" => "Vm-name", "value" => "foo"}, :token => 1).to_sql
expect(sql).to eq("\"vms\".\"name\" = 'foo'")
end
it "generates the SQL for an EQUAL expression" do
sql, * = MiqExpression.new("EQUAL" => {"field" => "Vm-name", "value" => "foo"}).to_sql
expect(sql).to eq("\"vms\".\"name\" = 'foo'")
end
it "generates the SQL for an EQUAL expression with an association" do
exp = {"EQUAL" => {"field" => "Vm.guest_applications-name", "value" => 'foo'}}
sql, includes, * = MiqExpression.new(exp).to_sql
expect(sql).to eq("\"guest_applications\".\"name\" = 'foo'")
expect(includes).to eq(:guest_applications => {})
end
it "generates the SQL for a = expression" do
sql, * = MiqExpression.new("=" => {"field" => "Vm-name", "value" => "foo"}).to_sql
expect(sql).to eq("\"vms\".\"name\" = 'foo'")
end
it "generates the SQL for a = expression with expression as a value" do
sql, * = MiqExpression.new("=" => {"field" => "Vm-name", "value" => "Vm-name"}).to_sql
expect(sql).to eq("\"vms\".\"name\" = \"vms\".\"name\"")
end
it "will handle values that look like they contain MiqExpression-encoded constants but cannot be loaded" do
sql, * = described_class.new("=" => {"field" => "Vm-name", "value" => "VM-name"}).to_sql
expect(sql).to eq(%q("vms"."name" = 'VM-name'))
end
it "generates the SQL for a < expression" do
sql, * = described_class.new("<" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}).to_sql
expect(sql).to eq("\"hardwares\".\"cpu_sockets\" < 2")
end
it "generates the SQL for a < expression with expression as a value" do
sql, * = described_class.new("<" => {"field" => "Vm.hardware-cpu_sockets", "value" => "Vm.hardware-cpu_sockets"}).to_sql
expect(sql).to eq("\"hardwares\".\"cpu_sockets\" < \"hardwares\".\"cpu_sockets\"")
end
it "generates the SQL for a <= expression" do
sql, * = described_class.new("<=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}).to_sql
expect(sql).to eq("\"hardwares\".\"cpu_sockets\" <= 2")
end
it "generates the SQL for a <= expression with expression as a value" do
sql, * = described_class.new("<=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "Vm.hardware-cpu_sockets"}).to_sql
expect(sql).to eq("\"hardwares\".\"cpu_sockets\" <= \"hardwares\".\"cpu_sockets\"")
end
it "generates the SQL for a > expression" do
sql, * = described_class.new(">" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}).to_sql
expect(sql).to eq("\"hardwares\".\"cpu_sockets\" > 2")
end
it "generates the SQL for a > expression with expression as a value" do
sql, * = described_class.new(">" => {"field" => "Vm.hardware-cpu_sockets", "value" => "Vm.hardware-cpu_sockets"}).to_sql
expect(sql).to eq("\"hardwares\".\"cpu_sockets\" > \"hardwares\".\"cpu_sockets\"")
end
it "generates the SQL for a >= expression" do
sql, * = described_class.new(">=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}).to_sql
expect(sql).to eq("\"hardwares\".\"cpu_sockets\" >= 2")
end
it "generates the SQL for a >= expression with expression as a value" do
sql, * = described_class.new(">=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "Vm.hardware-cpu_sockets"}).to_sql
expect(sql).to eq("\"hardwares\".\"cpu_sockets\" >= \"hardwares\".\"cpu_sockets\"")
end
it "generates the SQL for a != expression" do
sql, * = described_class.new("!=" => {"field" => "Vm-name", "value" => "foo"}).to_sql
expect(sql).to eq("\"vms\".\"name\" != 'foo'")
end
it "generates the SQL for a != expression with expression as a value" do
sql, * = described_class.new("!=" => {"field" => "Vm-name", "value" => "Vm-name"}).to_sql
expect(sql).to eq("\"vms\".\"name\" != \"vms\".\"name\"")
end
it "generates the SQL for a LIKE expression" do
sql, * = MiqExpression.new("LIKE" => {"field" => "Vm-name", "value" => "foo"}).to_sql
expect(sql).to eq("\"vms\".\"name\" LIKE '%foo%'")
end
it "generates the SQL for a NOT LIKE expression" do
sql, * = MiqExpression.new("NOT LIKE" => {"field" => "Vm-name", "value" => "foo"}).to_sql
expect(sql).to eq("\"vms\".\"name\" NOT LIKE '%foo%'")
end
it "generates the SQL for a STARTS WITH expression " do
sql, * = MiqExpression.new("STARTS WITH" => {"field" => "Vm-name", "value" => "foo"}).to_sql
expect(sql).to eq("\"vms\".\"name\" LIKE 'foo%'")
end
it "generates the SQL for an ENDS WITH expression" do
sql, * = MiqExpression.new("ENDS WITH" => {"field" => "Vm-name", "value" => "foo"}).to_sql
expect(sql).to eq("\"vms\".\"name\" LIKE '%foo'")
end
it "generates the SQL for an INCLUDES" do
sql, * = MiqExpression.new("INCLUDES" => {"field" => "Vm-name", "value" => "foo"}).to_sql
expect(sql).to eq("\"vms\".\"name\" LIKE '%foo%'")
end
it "generates the SQL for an INCLUDES ANY with expression method" do
sql, * = MiqExpression.new("INCLUDES ANY" => {"field" => "Vm-ipaddresses", "value" => "foo"}).to_sql
expected_sql = <<-EXPECTED.strip_heredoc.split("\n").join(" ")
1 = (SELECT 1
FROM "hardwares"
INNER JOIN "networks" ON "networks"."hardware_id" = "hardwares"."id"
WHERE "hardwares"."vm_or_template_id" = "vms"."id"
AND (\"networks\".\"ipaddress\" ILIKE '%foo%' OR \"networks\".\"ipv6address\" ILIKE '%foo%')
LIMIT 1)
EXPECTED
expect(sql).to eq(expected_sql)
end
it "does not generate SQL for an INCLUDES ANY without an expression method" do
sql, _, attrs = MiqExpression.new("INCLUDES ANY" => {"field" => "Vm-name", "value" => "foo"}).to_sql
expect(sql).to be nil
expect(attrs).to eq(:supported_by_sql => false)
end
it "does not generate SQL for an INCLUDES ALL without an expression method" do
sql, _, attrs = MiqExpression.new("INCLUDES ALL" => {"field" => "Vm-ipaddresses", "value" => "foo"}).to_sql
expect(sql).to be nil
expect(attrs).to eq(:supported_by_sql => false)
end
it "does not generate SQL for an INCLUDES ONLY without an expression method" do
sql, _, attrs = MiqExpression.new("INCLUDES ONLY" => {"field" => "Vm-ipaddresses", "value" => "foo"}).to_sql
expect(sql).to be nil
expect(attrs).to eq(:supported_by_sql => false)
end
it "generates the SQL for an AND expression" do
exp1 = {"STARTS WITH" => {"field" => "Vm-name", "value" => "foo"}}
exp2 = {"ENDS WITH" => {"field" => "Vm-name", "value" => "bar"}}
sql, * = MiqExpression.new("AND" => [exp1, exp2]).to_sql
expect(sql).to eq("(\"vms\".\"name\" LIKE 'foo%' AND \"vms\".\"name\" LIKE '%bar')")
end
it "generates the SQL for an AND expression where only one is supported by SQL" do
exp1 = {"STARTS WITH" => {"field" => "Vm-name", "value" => "foo"}}
exp2 = {"ENDS WITH" => {"field" => "Vm-platform", "value" => "bar"}}
sql, * = MiqExpression.new("AND" => [exp1, exp2]).to_sql
expect(sql).to eq("(\"vms\".\"name\" LIKE 'foo%')")
end
it "returns nil for an AND expression where none is supported by SQL" do
exp1 = {"STARTS WITH" => {"field" => "Vm-platform", "value" => "foo"}}
exp2 = {"ENDS WITH" => {"field" => "Vm-platform", "value" => "bar"}}
sql, * = MiqExpression.new("AND" => [exp1, exp2]).to_sql
expect(sql).to be_nil
end
it "generates the SQL for an OR expression" do
exp1 = {"STARTS WITH" => {"field" => "Vm-name", "value" => "foo"}}
exp2 = {"ENDS WITH" => {"field" => "Vm-name", "value" => "bar"}}
sql, * = MiqExpression.new("OR" => [exp1, exp2]).to_sql
expect(sql).to eq(%q(("vms"."name" LIKE 'foo%' OR "vms"."name" LIKE '%bar')))
end
it "returns nil for an OR expression where one is not supported by SQL" do
exp1 = {"STARTS WITH" => {"field" => "Vm-name", "value" => "foo"}}
exp2 = {"ENDS WITH" => {"field" => "Vm-platform", "value" => "bar"}}
sql, * = MiqExpression.new("OR" => [exp1, exp2]).to_sql
expect(sql).to be_nil
end
it "returns nil for an OR expression where none is supported by SQL" do
exp1 = {"STARTS WITH" => {"field" => "Vm-platform", "value" => "foo"}}
exp2 = {"ENDS WITH" => {"field" => "Vm-platform", "value" => "bar"}}
sql, * = MiqExpression.new("OR" => [exp1, exp2]).to_sql
expect(sql).to be_nil
end
context "nested expressions" do
it "properly groups the items in an AND/OR expression" do
exp = {"AND" => [{"EQUAL" => {"field" => "Vm-power_state", "value" => "on"}},
{"OR" => [{"EQUAL" => {"field" => "Vm-name", "value" => "foo"}},
{"EQUAL" => {"field" => "Vm-name", "value" => "bar"}}]}]}
sql, * = described_class.new(exp).to_sql
expect(sql).to eq(%q(("vms"."power_state" = 'on' AND ("vms"."name" = 'foo' OR "vms"."name" = 'bar'))))
end
it "properly groups the items in an OR/AND expression" do
exp = {"OR" => [{"EQUAL" => {"field" => "Vm-power_state", "value" => "on"}},
{"AND" => [{"EQUAL" => {"field" => "Vm-name", "value" => "foo"}},
{"EQUAL" => {"field" => "Vm-name", "value" => "bar"}}]}]}
sql, * = described_class.new(exp).to_sql
expect(sql).to eq(%q(("vms"."power_state" = 'on' OR ("vms"."name" = 'foo' AND "vms"."name" = 'bar'))))
end
it "properly groups the items in an OR/OR expression" do
exp = {"OR" => [{"EQUAL" => {"field" => "Vm-power_state", "value" => "on"}},
{"OR" => [{"EQUAL" => {"field" => "Vm-name", "value" => "foo"}},
{"EQUAL" => {"field" => "Vm-name", "value" => "bar"}}]}]}
sql, * = described_class.new(exp).to_sql
expect(sql).to eq(%q(("vms"."power_state" = 'on' OR ("vms"."name" = 'foo' OR "vms"."name" = 'bar'))))
end
end
it "generates the SQL for a NOT expression" do
sql, * = MiqExpression.new("NOT" => {"=" => {"field" => "Vm-name", "value" => "foo"}}).to_sql
expect(sql).to eq("NOT (\"vms\".\"name\" = 'foo')")
end
it "generates the SQL for a ! expression" do
sql, * = MiqExpression.new("!" => {"=" => {"field" => "Vm-name", "value" => "foo"}}).to_sql
expect(sql).to eq("NOT (\"vms\".\"name\" = 'foo')")
end
it "generates the SQL for an IS NULL expression" do
sql, * = MiqExpression.new("IS NULL" => {"field" => "Vm-name"}).to_sql
expect(sql).to eq("\"vms\".\"name\" IS NULL")
end
it "generates the SQL for an IS NOT NULL expression" do
sql, * = MiqExpression.new("IS NOT NULL" => {"field" => "Vm-name"}).to_sql
expect(sql).to eq("\"vms\".\"name\" IS NOT NULL")
end
it "generates the SQL for an IS EMPTY expression" do
sql, * = MiqExpression.new("IS EMPTY" => {"field" => "Vm-name"}).to_sql
expect(sql).to eq("(\"vms\".\"name\" IS NULL OR \"vms\".\"name\" = '')")
end
it "generates the SQL for an IS NOT EMPTY expression" do
sql, * = MiqExpression.new("IS NOT EMPTY" => {"field" => "Vm-name"}).to_sql
expect(sql).to eq("\"vms\".\"name\" IS NOT NULL AND \"vms\".\"name\" != ''")
end
it "generates the SQL for a CONTAINS expression with field" do
sql, * = MiqExpression.new("CONTAINS" => {"field" => "Vm.guest_applications-name", "value" => "foo"}).to_sql
expect(sql).to eq("\"vms\".\"id\" IN (SELECT DISTINCT \"guest_applications\".\"vm_or_template_id\" FROM \"guest_applications\" WHERE \"guest_applications\".\"name\" = 'foo')")
end
it "cant generates the SQL for a CONTAINS expression with association.association-field" do
sql, * = MiqExpression.new("CONTAINS" => {"field" => "Vm.guest_applications.host-name", "value" => "foo"}).to_sql
expect(sql).to be_nil
end
it "cant generat the SQL for a CONTAINS expression virtualassociation" do
sql, * = MiqExpression.new("CONTAINS" => {"field" => "Vm.processes-name", "value" => "foo"}).to_sql
expect(sql).to be_nil
end
it "cant generat the SQL for a CONTAINS expression with [association.virtualassociation]" do
sql, * = MiqExpression.new("CONTAINS" => {"field" => "Vm.users.active_vms-name", "value" => "foo"}).to_sql
expect(sql).to be_nil
end
it "generates the SQL for a CONTAINS expression with field containing a scope" do
sql, * = MiqExpression.new("CONTAINS" => {"field" => "Vm.users-name", "value" => "foo"}).to_sql
expected = "\"vms\".\"id\" IN (SELECT DISTINCT \"accounts\".\"vm_or_template_id\" FROM \"accounts\" "\
"WHERE \"accounts\".\"name\" = 'foo' AND \"accounts\".\"accttype\" = 'user')"
expect(sql).to eq(expected)
end
it "generates the SQL for a CONTAINS expression with tag" do
tag = FactoryBot.create(:tag, :name => "/managed/operations/analysis_failed")
vm = FactoryBot.create(:vm_vmware, :tags => [tag])
exp = {"CONTAINS" => {"tag" => "VmInfra.managed-operations", "value" => "analysis_failed"}}
sql, * = MiqExpression.new(exp).to_sql
expect(sql).to eq("\"vms\".\"id\" IN (#{vm.id})")
end
it "returns nil for a Registry expression" do
exp = {"=" => {"regkey" => "test", "regval" => "value", "value" => "data"}}
sql, * = MiqExpression.new(exp).to_sql
expect(sql).to be_nil
end
it "raises an error for an expression with unknown operator" do
expect do
MiqExpression.new("FOOBAR" => {"field" => "Vm-name", "value" => "foo"}).to_sql
end.to raise_error(/operator 'FOOBAR' is not supported/)
end
it "should test virtual column FB15509" do
exp = YAML.load '--- !ruby/object:MiqExpression
context_type:
exp:
CONTAINS:
field: MiqGroup.vms-uncommitted_storage
value: "false"
'
*, attrs = exp.to_sql
expect(attrs[:supported_by_sql]).to eq(false)
end
context "date/time support" do
it "generates the SQL for a = expression with a date field" do
sql, * = described_class.new("=" => {"field" => "Vm-retires_on", "value" => "2016-01-01"}).to_sql
expect(sql).to eq(%q("vms"."retires_on" = '2016-01-01'))
end
it "generates the SQL for an AFTER expression" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"retires_on\" > '2011-01-10 23:59:59.999999'")
end
it "generates the SQL for a BEFORE expression" do
exp = MiqExpression.new("BEFORE" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"retires_on\" < '2011-01-10 00:00:00'")
end
it "generates the SQL for an AFTER expression with date/time" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-last_scan_on", "value" => "2011-01-10 9:00"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"last_scan_on\" > '2011-01-10 09:00:00'")
end
it "generates the SQL for a != expression with a date field" do
sql, * = described_class.new("!=" => {"field" => "Vm-retires_on", "value" => "2016-01-01"}).to_sql
expect(sql).to eq(%q("vms"."retires_on" != '2016-01-01'))
end
it "generates the SQL for an IS expression" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"retires_on\" BETWEEN '2011-01-10 00:00:00' AND '2011-01-10 23:59:59.999999'")
end
it "generates the SQL for a FROM expression" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["2011-01-09", "2011-01-10"]})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"retires_on\" BETWEEN '2011-01-09 00:00:00' AND '2011-01-10 23:59:59.999999'")
end
it "generates the SQL for a FROM expression with MM/DD/YYYY dates" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["01/09/2011", "01/10/2011"]})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"retires_on\" BETWEEN '2011-01-09 00:00:00' AND '2011-01-10 23:59:59.999999'")
end
it "generates the SQL for a FROM expression with date/time" do
exp = MiqExpression.new(
"FROM" => {"field" => "Vm-last_scan_on", "value" => ["2011-01-10 8:00", "2011-01-10 17:00"]}
)
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"last_scan_on\" BETWEEN '2011-01-10 08:00:00' AND '2011-01-10 17:00:00'")
end
it "generates the SQL for a FROM expression with two identical datetimes" do
exp = MiqExpression.new(
"FROM" => {"field" => "Vm-last_scan_on", "value" => ["2011-01-10 00:00", "2011-01-10 00:00"]}
)
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"last_scan_on\" BETWEEN '2011-01-10 00:00:00' AND '2011-01-10 00:00:00'")
end
end
context "relative date/time support" do
around { |example| Timecop.freeze("2011-01-11 17:30 UTC") { example.run } }
context "given a non-UTC timezone" do
it "generates the SQL for a AFTER expression with a value of 'Yesterday' for a date field" do
exp = described_class.new("AFTER" => {"field" => "Vm-retires_on", "value" => "Yesterday"})
sql, * = exp.to_sql("Asia/Jakarta")
expect(sql).to eq(%q("vms"."retires_on" > '2011-01-11 16:59:59.999999'))
end
it "generates the SQL for a BEFORE expression with a value of 'Yesterday' for a date field" do
exp = described_class.new("BEFORE" => {"field" => "Vm-retires_on", "value" => "Yesterday"})
sql, * = exp.to_sql("Asia/Jakarta")
expect(sql).to eq(%q("vms"."retires_on" < '2011-01-10 17:00:00'))
end
it "generates the SQL for an IS expression with a value of 'Yesterday' for a date field" do
exp = described_class.new("IS" => {"field" => "Vm-retires_on", "value" => "Yesterday"})
sql, * = exp.to_sql("Asia/Jakarta")
expect(sql).to eq(%q("vms"."retires_on" BETWEEN '2011-01-10 17:00:00' AND '2011-01-11 16:59:59.999999'))
end
it "generates the SQL for a FROM expression with a value of 'Yesterday'/'Today' for a date field" do
exp = described_class.new("FROM" => {"field" => "Vm-retires_on", "value" => %w(Yesterday Today)})
sql, * = exp.to_sql("Asia/Jakarta")
expect(sql).to eq(%q("vms"."retires_on" BETWEEN '2011-01-10 17:00:00' AND '2011-01-12 16:59:59.999999'))
end
end
it "generates the SQL for an AFTER expression with an 'n Days Ago' value for a date field" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-retires_on", "value" => "2 Days Ago"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"retires_on\" > '2011-01-09 23:59:59.999999'")
end
it "generates the SQL for an AFTER expression with an 'n Days Ago' value for a datetime field" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-last_scan_on", "value" => "2 Days Ago"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"last_scan_on\" > '2011-01-09 23:59:59.999999'")
end
it "generates the SQL for a BEFORE expression with an 'n Days Ago' value for a date field" do
exp = MiqExpression.new("BEFORE" => {"field" => "Vm-retires_on", "value" => "2 Days Ago"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"retires_on\" < '2011-01-09 00:00:00'")
end
it "generates the SQL for a BEFORE expression with an 'n Days Ago' value for a datetime field" do
exp = MiqExpression.new("BEFORE" => {"field" => "Vm-last_scan_on", "value" => "2 Days Ago"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"last_scan_on\" < '2011-01-09 00:00:00'")
end
it "generates the SQL for a FROM expression with a 'Last Hour'/'This Hour' value for a datetime field" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["Last Hour", "This Hour"]})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"last_scan_on\" BETWEEN '2011-01-11 16:00:00' AND '2011-01-11 17:59:59.999999'")
end
it "generates the SQL for a FROM expression with a 'Last Week'/'Last Week' value for a date field" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["Last Week", "Last Week"]})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"retires_on\" BETWEEN '2011-01-03 00:00:00' AND '2011-01-09 23:59:59.999999'")
end
it "generates the SQL for a FROM expression with a 'Last Week'/'Last Week' value for a datetime field" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["Last Week", "Last Week"]})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"last_scan_on\" BETWEEN '2011-01-03 00:00:00' AND '2011-01-09 23:59:59.999999'")
end
it "generates the SQL for a FROM expression with an 'n Months Ago'/'Last Month' value for a datetime field" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2 Months Ago", "Last Month"]})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"last_scan_on\" BETWEEN '2010-11-01 00:00:00' AND '2010-12-31 23:59:59.999999'")
end
it "generates the SQL for an IS expression with a 'Today' value for a date field" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "Today"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"retires_on\" BETWEEN '2011-01-11 00:00:00' AND '2011-01-11 23:59:59.999999'")
end
it "generates the SQL for an IS expression with an 'n Hours Ago' value for a date field" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "3 Hours Ago"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"retires_on\" BETWEEN '2011-01-11 14:00:00' AND '2011-01-11 14:59:59.999999'")
end
it "generates the SQL for an IS expression with an 'n Hours Ago' value for a datetime field" do
exp = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "3 Hours Ago"})
sql, * = exp.to_sql
expect(sql).to eq("\"vms\".\"last_scan_on\" BETWEEN '2011-01-11 14:00:00' AND '2011-01-11 14:59:59.999999'")
end
end
describe "integration" do
context "date/time support" do
it "finds the correct instances for an gt expression with a dynamic integer field" do
_vm1 = FactoryBot.create(:vm_vmware, :memory_reserve => 1, :cpu_reserve => 2)
vm2 = FactoryBot.create(:vm_vmware, :memory_reserve => 2, :cpu_reserve => 1)
filter = MiqExpression.new(">" => {"field" => "Vm-memory_reserve", "value" => "Vm-cpu_reserve"})
result = Vm.where(filter.to_sql.first)
expect(result).to eq([vm2])
end
it "finds the correct instances for an gt expression with a custom attribute dynamic integer field" do
custom_attribute = FactoryBot.create(:custom_attribute, :name => "example", :value => 10)
vm1 = FactoryBot.create(:vm, :memory_reserve => 2)
vm1.custom_attributes << custom_attribute
_vm2 = FactoryBot.create(:vm, :memory_reserve => 0)
name_of_attribute = "VmOrTemplate-#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}example"
filter = MiqExpression.new("<" => {"field" => "VmOrTemplate-memory_reserve", "value" => name_of_attribute})
result = Rbac.search(:targets => Vm, :filter => filter).first.first
expect(filter.to_sql.last).to eq(:supported_by_sql => false)
expect(result).to eq(vm1)
end
it "finds the correct instances for an AFTER expression with a datetime field" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-11 9:00")
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-11 9:00:00.000001")
filter = MiqExpression.new("AFTER" => {"field" => "Vm-last_scan_on", "value" => "2011-01-11 9:00"})
result = Vm.where(filter.to_sql.first)
expect(result).to eq([vm2])
end
it "finds the correct instances for an IS EMPTY expression with a datetime field" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-11 9:01")
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => nil)
filter = MiqExpression.new("IS EMPTY" => {"field" => "Vm-last_scan_on"})
result = Vm.where(filter.to_sql.first)
expect(result).to eq([vm2])
end
it "finds the correct instances for an IS EMPTY expression with a date field" do
_vm1 = FactoryBot.create(:vm_vmware, :retires_on => "2011-01-11")
vm2 = FactoryBot.create(:vm_vmware, :retires_on => nil)
filter = MiqExpression.new("IS EMPTY" => {"field" => "Vm-retires_on"})
result = Vm.where(filter.to_sql.first)
expect(result).to eq([vm2])
end
it "finds the correct instances for an IS NOT EMPTY expression with a datetime field" do
vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-11 9:01")
_vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => nil)
filter = MiqExpression.new("IS NOT EMPTY" => {"field" => "Vm-last_scan_on"})
result = Vm.where(filter.to_sql.first)
expect(result).to eq([vm1])
end
it "finds the correct instances for an IS NOT EMPTY expression with a date field" do
vm1 = FactoryBot.create(:vm_vmware, :retires_on => "2011-01-11")
_vm2 = FactoryBot.create(:vm_vmware, :retires_on => nil)
filter = MiqExpression.new("IS NOT EMPTY" => {"field" => "Vm-retires_on"})
result = Vm.where(filter.to_sql.first)
expect(result).to eq([vm1])
end
it "finds the correct instances for an IS expression with a date field" do
_vm1 = FactoryBot.create(:vm_vmware, :retires_on => "2011-01-09")
vm2 = FactoryBot.create(:vm_vmware, :retires_on => "2011-01-10")
_vm3 = FactoryBot.create(:vm_vmware, :retires_on => "2011-01-11")
filter = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
result = Vm.where(filter.to_sql.first)
expect(result).to eq([vm2])
end
it "finds the correct instances for an IS expression with a datetime field" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-10 23:59:59.999999")
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-11 0:00")
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-11 23:59:59.999999")
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-12 0:00")
filter = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "2011-01-11"})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
it "finds the correct instances for a FROM expression with a datetime field, given date values" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => "2010-07-10 23:59:59.999999")
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => "2010-07-11 00:00:00")
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => "2010-12-31 23:59:59.999999")
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-01 00:00:00")
filter = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2010-07-11", "2010-12-31"]})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
it "finds the correct instances for a FROM expression with a date field" do
_vm1 = FactoryBot.create(:vm_vmware, :retires_on => "2010-07-10")
vm2 = FactoryBot.create(:vm_vmware, :retires_on => "2010-07-11")
vm3 = FactoryBot.create(:vm_vmware, :retires_on => "2010-12-31")
_vm4 = FactoryBot.create(:vm_vmware, :retires_on => "2011-01-01")
filter = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["2010-07-11", "2010-12-31"]})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
it "finds the correct instances for a FROM expression with a datetime field, given datetimes" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-09 16:59:59.999999")
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-09 17:30:00")
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-10 23:30:59")
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-10 23:31:00")
filter = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on",
"value" => ["2011-01-09 17:00", "2011-01-10 23:30:59"]})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
end
context "relative date/time support" do
around { |example| Timecop.freeze("2011-01-11 17:30 UTC") { example.run } }
it "finds the correct instances for an IS expression with 'Today'" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => Time.zone.yesterday.end_of_day)
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => Time.zone.today)
_vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => Time.zone.tomorrow.beginning_of_day)
filter = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "Today"})
result = Vm.where(filter.to_sql.first)
expect(result).to eq([vm2])
end
it "finds the correct instances for an IS expression with a datetime field and 'n Hours Ago'" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => Time.zone.parse("13:59:59.999999"))
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => Time.zone.parse("14:00:00"))
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => Time.zone.parse("14:59:59.999999"))
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => Time.zone.parse("15:00:00"))
filter = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "3 Hours Ago"})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
it "finds the correct instances for an IS expression with 'Last Month'" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => (1.month.ago.beginning_of_month - 1.day).end_of_day)
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.month.ago.beginning_of_month)
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.month.ago.end_of_month)
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => (1.month.ago.end_of_month + 1.day).beginning_of_day)
filter = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "Last Month"})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
it "finds the correct instances for a FROM expression with a date field and 'Last Week'" do
_vm1 = FactoryBot.create(:vm_vmware, :retires_on => 1.week.ago.beginning_of_week - 1.day)
vm2 = FactoryBot.create(:vm_vmware, :retires_on => 1.week.ago.beginning_of_week)
vm3 = FactoryBot.create(:vm_vmware, :retires_on => 1.week.ago.end_of_week)
_vm4 = FactoryBot.create(:vm_vmware, :retires_on => 1.week.ago.end_of_week + 1.day)
filter = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["Last Week", "Last Week"]})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
it "finds the correct instances for a FROM expression with a datetime field and 'Last Week'" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.week.ago.beginning_of_week - 1.second)
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.week.ago.beginning_of_week.beginning_of_day)
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.week.ago.end_of_week.end_of_day)
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.week.ago.end_of_week + 1.second)
filter = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["Last Week", "Last Week"]})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
it "finds the correct instances for a FROM expression with 'Last Week' and 'This Week'" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.week.ago.beginning_of_week - 1.second)
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.week.ago.beginning_of_week.beginning_of_day)
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.week.from_now.beginning_of_week - 1.second)
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.week.from_now.beginning_of_week.beginning_of_day)
filter = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["Last Week", "This Week"]})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
it "finds the correct instances for a FROM expression with 'n Months Ago'" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => 2.months.ago.beginning_of_month - 1.second)
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => 2.months.ago.beginning_of_month.beginning_of_day)
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.month.ago.end_of_month.end_of_day)
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.month.ago.end_of_month + 1.second)
filter = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2 Months Ago", "1 Month Ago"]})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
it "finds the correct instances for a FROM expression with 'Last Month'" do
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.month.ago.beginning_of_month - 1.second)
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.month.ago.beginning_of_month.beginning_of_day)
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.month.ago.end_of_month.end_of_day)
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => 1.month.ago.end_of_month + 1.second)
filter = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["Last Month", "Last Month"]})
result = Vm.where(filter.to_sql.first)
expect(result).to contain_exactly(vm2, vm3)
end
end
context "timezone support" do
it "finds the correct instances for a FROM expression with a datetime field and timezone" do
timezone = "Eastern Time (US & Canada)"
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-09 21:59:59.999999")
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-09 22:00:00")
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-11 04:30:59")
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-11 04:31:00")
filter = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on",
"value" => ["2011-01-09 17:00", "2011-01-10 23:30:59"]})
result = Vm.where(filter.to_sql(timezone).first)
expect(result).to contain_exactly(vm2, vm3)
end
it "finds the correct instances for a FROM expression with a date field and timezone" do
timezone = "Eastern Time (US & Canada)"
_vm1 = FactoryBot.create(:vm_vmware, :retires_on => "2011-01-09T23:59:59Z")
vm2 = FactoryBot.create(:vm_vmware, :retires_on => "2011-01-10T06:30:00Z")
_vm3 = FactoryBot.create(:vm_vmware, :retires_on => "2011-01-11T08:00:00Z")
filter = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
result = Vm.where(filter.to_sql(timezone).first)
expect(result).to eq([vm2])
end
it "finds the correct instances for an IS expression with timezone" do
timezone = "Eastern Time (US & Canada)"
_vm1 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-11 04:59:59.999999")
vm2 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-11 05:00:00")
vm3 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-12 04:59:59.999999")
_vm4 = FactoryBot.create(:vm_vmware, :last_scan_on => "2011-01-12 05:00:00")
filter = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "2011-01-11"})
result = Vm.where(filter.to_sql(timezone).first)
expect(result).to contain_exactly(vm2, vm3)
end
end
end
end
describe "#lenient_evaluate" do
describe "integration" do
it "with a find/checkany expression" do
host1, host2, host3, host4, host5, host6, host7, host8 = FactoryBot.create_list(:host, 8)
FactoryBot.create(:vm_vmware, :host => host1, :description => "foo", :last_scan_on => "2011-01-08 16:59:59.999999")
FactoryBot.create(:vm_vmware, :host => host2, :description => nil, :last_scan_on => "2011-01-08 16:59:59.999999")
FactoryBot.create(:vm_vmware, :host => host3, :description => "bar", :last_scan_on => "2011-01-08 17:00:00")
FactoryBot.create(:vm_vmware, :host => host4, :description => nil, :last_scan_on => "2011-01-08 17:00:00")
FactoryBot.create(:vm_vmware, :host => host5, :description => "baz", :last_scan_on => "2011-01-09 23:30:59.999999")
FactoryBot.create(:vm_vmware, :host => host6, :description => nil, :last_scan_on => "2011-01-09 23:30:59.999999")
FactoryBot.create(:vm_vmware, :host => host7, :description => "qux", :last_scan_on => "2011-01-09 23:31:00")
FactoryBot.create(:vm_vmware, :host => host8, :description => nil, :last_scan_on => "2011-01-09 23:31:00")
filter = MiqExpression.new(
"FIND" => {
"checkany" => {"FROM" => {"field" => "Host.vms-last_scan_on",
"value" => ["2011-01-08 17:00", "2011-01-09 23:30:59"]}},
"search" => {"IS NOT NULL" => {"field" => "Host.vms-description"}}})
result = Host.all.to_a.select { |rec| filter.lenient_evaluate(rec) }
expect(result).to contain_exactly(host3, host5)
end
it "with a find/checkall expression" do
host1, host2, host3, host4, host5 = FactoryBot.create_list(:host, 5)
FactoryBot.create(:vm_vmware, :host => host1, :description => "foo", :last_scan_on => "2011-01-08 16:59:59.999999")
FactoryBot.create(:vm_vmware, :host => host2, :description => "bar", :last_scan_on => "2011-01-08 17:00:00")
FactoryBot.create(:vm_vmware, :host => host2, :description => "baz", :last_scan_on => "2011-01-09 23:30:59.999999")
FactoryBot.create(:vm_vmware, :host => host3, :description => "qux", :last_scan_on => "2011-01-08 17:00:00")
FactoryBot.create(:vm_vmware, :host => host3, :description => nil, :last_scan_on => "2011-01-09 23:30:59.999999")
FactoryBot.create(:vm_vmware, :host => host4, :description => nil, :last_scan_on => "2011-01-08 17:00:00")
FactoryBot.create(:vm_vmware, :host => host4, :description => "quux", :last_scan_on => "2011-01-09 23:30:59.999999")
FactoryBot.create(:vm_vmware, :host => host5, :description => "corge", :last_scan_on => "2011-01-09 23:31:00")
filter = MiqExpression.new(
"FIND" => {
"search" => {"FROM" => {"field" => "Host.vms-last_scan_on",
"value" => ["2011-01-08 17:00", "2011-01-09 23:30:59"]}},
"checkall" => {"IS NOT NULL" => {"field" => "Host.vms-description"}}}
)
result = Host.all.to_a.select { |rec| filter.lenient_evaluate(rec) }
expect(result).to eq([host2])
end
it "cannot execute non-attribute methods on target objects" do
vm = FactoryBot.create(:vm_vmware)
expect do
described_class.new("=" => {"field" => "Vm-destroy", "value" => true}).lenient_evaluate(vm)
end.not_to change(Vm, :count)
end
end
end
describe "#to_ruby" do
it "generates the ruby for a = expression with count" do
actual = described_class.new("=" => {"count" => "Vm.snapshots", "value" => "1"}).to_ruby
expected = "<count ref=vm>/virtual/snapshots</count> == 1"
expect(actual).to eq(expected)
end
it "generates the ruby for a = expression with regkey" do
actual = described_class.new("=" => {"regkey" => "foo", "regval" => "bar", "value" => "baz"}).to_ruby
expected = "<registry>foo : bar</registry> == \"baz\""
expect(actual).to eq(expected)
end
it "generates the ruby for a < expression with hash context" do
actual = described_class.new({"<" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}, "hash").to_ruby
expected = "<value type=integer>hardware.cpu_sockets</value> < 2"
expect(actual).to eq(expected)
end
it "generates the ruby for a < expression with count" do
actual = described_class.new("<" => {"count" => "Vm.snapshots", "value" => "2"}).to_ruby
expected = "<count ref=vm>/virtual/snapshots</count> < 2"
expect(actual).to eq(expected)
end
it "generates the ruby for a > expression with hash context" do
actual = described_class.new({">" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}, "hash").to_ruby
expected = "<value type=integer>hardware.cpu_sockets</value> > 2"
expect(actual).to eq(expected)
end
it "generates the ruby for a > expression with count" do
actual = described_class.new(">" => {"count" => "Vm.snapshots", "value" => "2"}).to_ruby
expected = "<count ref=vm>/virtual/snapshots</count> > 2"
expect(actual).to eq(expected)
end
it "generates the ruby for a >= expression with hash context" do
actual = described_class.new({">=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}, "hash").to_ruby
expected = "<value type=integer>hardware.cpu_sockets</value> >= 2"
expect(actual).to eq(expected)
end
it "generates the ruby for a >= expression with count" do
actual = described_class.new(">=" => {"count" => "Vm.snapshots", "value" => "2"}).to_ruby
expected = "<count ref=vm>/virtual/snapshots</count> >= 2"
expect(actual).to eq(expected)
end
it "generates the ruby for a <= expression with hash context" do
actual = described_class.new({"<=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}, "hash").to_ruby
expected = "<value type=integer>hardware.cpu_sockets</value> <= 2"
expect(actual).to eq(expected)
end
it "generates the ruby for a <= expression with count" do
actual = described_class.new("<=" => {"count" => "Vm.snapshots", "value" => "2"}).to_ruby
expected = "<count ref=vm>/virtual/snapshots</count> <= 2"
expect(actual).to eq(expected)
end
it "generates the ruby for a != expression with hash context" do
actual = described_class.new({"!=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}, "hash").to_ruby
expected = "<value type=integer>hardware.cpu_sockets</value> != 2"
expect(actual).to eq(expected)
end
it "generates the ruby for a != expression with count" do
actual = described_class.new("!=" => {"count" => "Vm.snapshots", "value" => "2"}).to_ruby
expected = "<count ref=vm>/virtual/snapshots</count> != 2"
expect(actual).to eq(expected)
end
it "generates the ruby for a BEFORE expression with hash context" do
actual = described_class.new({"BEFORE" => {"field" => "Vm-retires_on", "value" => "2011-01-10"}}, "hash").to_ruby
expected = "val=<value type=datetime>Vm.retires_on</value>; !val.nil? && val.to_time < '2011-01-10T00:00:00Z'.to_time(:utc)"
expect(actual).to eq(expected)
end
it "generates the ruby for a AFTER expression with hash context" do
actual = described_class.new({"AFTER" => {"field" => "Vm-retires_on", "value" => "2011-01-10"}}, "hash").to_ruby
expected = "val=<value type=datetime>Vm.retires_on</value>; !val.nil? && val.to_time > '2011-01-10T23:59:59Z'.to_time(:utc)"
expect(actual).to eq(expected)
end
it "generates the ruby for a INCLUDES ALL expression with hash context" do
actual = described_class.new(
{"INCLUDES ALL" => {"field" => "Host-enabled_inbound_ports", "value" => "22, 427, 5988, 5989, 1..4"}},
"hash"
).to_ruby
expected = "(<value type=numeric_set>Host.enabled_inbound_ports</value> & [1,2,3,4,22,427,5988,5989]) == [1,2,3,4,22,427,5988,5989]"
expect(actual).to eq(expected)
end
it "generates the ruby for a INCLUDES ANY expression with hash context" do
actual = described_class.new(
{"INCLUDES ANY" => {"field" => "Host-enabled_inbound_ports", "value" => "22, 427, 5988, 5989, 1..4"}},
"hash"
).to_ruby
expected = "([1,2,3,4,22,427,5988,5989] - <value type=numeric_set>Host.enabled_inbound_ports</value>) != [1,2,3,4,22,427,5988,5989]"
expect(actual).to eq(expected)
end
it "generates the ruby for a INCLUDES ONLY expression with hash context" do
actual = described_class.new(
{"INCLUDES ONLY" => {"field" => "Host-enabled_inbound_ports", "value" => "22, 427, 5988, 5989, 1..4"}},
"hash"
).to_ruby
expected = "(<value type=numeric_set>Host.enabled_inbound_ports</value> - [1,2,3,4,22,427,5988,5989]) == []"
expect(actual).to eq(expected)
end
it "generates the ruby for a LIMITED TO expression with hash context" do
actual = described_class.new(
{"LIMITED TO" => {"field" => "Host-enabled_inbound_ports", "value" => "22, 427, 5988, 5989, 1..4"}},
"hash"
).to_ruby
expected = "(<value type=numeric_set>Host.enabled_inbound_ports</value> - [1,2,3,4,22,427,5988,5989]) == []"
expect(actual).to eq(expected)
end
it "generates the ruby for a LIKE expression with field" do
actual = described_class.new("LIKE" => {"field" => "Vm-name", "value" => "foo"}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /foo/"
expect(actual).to eq(expected)
end
it "generates the ruby for a LIKE expression with hash context" do
actual = described_class.new({"LIKE" => {"field" => "Vm-name", "value" => "foo"}}, "hash").to_ruby
expected = "<value type=string>Vm.name</value> =~ /foo/"
expect(actual).to eq(expected)
end
it "generates the ruby for a LIKE expression with regkey" do
actual = described_class.new("LIKE" => {"regkey" => "foo", "regval" => "bar", "value" => "baz"}).to_ruby
expected = "<registry>foo : bar</registry> =~ /baz/"
expect(actual).to eq(expected)
end
it "generates the ruby for a NOT LIKE expression with field" do
actual = described_class.new("NOT LIKE" => {"field" => "Vm-name", "value" => "foo"}).to_ruby
expected = "!(<value ref=vm, type=string>/virtual/name</value> =~ /foo/)"
expect(actual).to eq(expected)
end
it "generates the ruby for a NOT LIKE expression with hash context" do
actual = described_class.new({"NOT LIKE" => {"field" => "Vm-name", "value" => "foo"}}, "hash").to_ruby
expected = "!(<value type=string>Vm.name</value> =~ /foo/)"
expect(actual).to eq(expected)
end
it "generates the ruby for a NOT LIKE expression with regkey" do
actual = described_class.new("NOT LIKE" => {"regkey" => "foo", "regval" => "bar", "value" => "baz"}).to_ruby
expected = "!(<registry>foo : bar</registry> =~ /baz/)"
expect(actual).to eq(expected)
end
it "generates the ruby for a STARTS WITH expression with hash context with field" do
actual = described_class.new({"STARTS WITH" => {"field" => "Vm-name", "value" => "foo"}}, "hash").to_ruby
expected = "<value type=string>Vm.name</value> =~ /^foo/"
expect(actual).to eq(expected)
end
it "generates the ruby for a STARTS WITH expression with regkey" do
actual = described_class.new("STARTS WITH" => {"regkey" => "foo", "regval" => "bar", "value" => "baz"}).to_ruby
expected = "<registry>foo : bar</registry> =~ /^baz/"
expect(actual).to eq(expected)
end
it "generates the ruby for a ENDS WITH expression with hash context" do
actual = described_class.new({"ENDS WITH" => {"field" => "Vm-name", "value" => "foo"}}, "hash").to_ruby
expected = "<value type=string>Vm.name</value> =~ /foo$/"
expect(actual).to eq(expected)
end
it "generates the ruby for a ENDS WITH expression with regkey" do
actual = described_class.new("ENDS WITH" => {"regkey" => "foo", "regval" => "bar", "value" => "baz"}).to_ruby
expected = "<registry>foo : bar</registry> =~ /baz$/"
expect(actual).to eq(expected)
end
it "generates the ruby for a INCLUDES expression with hash context" do
actual = described_class.new({"INCLUDES" => {"field" => "Vm-name", "value" => "foo"}}, "hash").to_ruby
expected = "<value type=string>Vm.name</value> =~ /foo/"
expect(actual).to eq(expected)
end
it "generates the ruby for a INCLUDES expression with regkey" do
actual = described_class.new("INCLUDES" => {"regkey" => "foo", "regval" => "bar", "value" => "baz"}).to_ruby
expected = "<registry>foo : bar</registry> =~ /baz/"
expect(actual).to eq(expected)
end
it "generates the ruby for a REGULAR EXPRESSION MATCHES expression with regkey" do
actual = described_class.new(
"REGULAR EXPRESSION MATCHES" => {"regkey" => "foo", "regval" => "bar", "value" => "baz"}
).to_ruby
expected = "<registry>foo : bar</registry> =~ /baz/"
expect(actual).to eq(expected)
end
it "generates the ruby for a REGULAR EXPRESSION DOES NOT MATCH expression with hash context" do
actual = described_class.new(
{"REGULAR EXPRESSION DOES NOT MATCH" => {"field" => "Vm-name", "value" => "foo"}},
"hash"
).to_ruby
expected = "<value type=string>Vm.name</value> !~ /foo/"
expect(actual).to eq(expected)
end
it "generates the ruby for a REGULAR EXPRESSION DOES NOT MATCH expression with regkey" do
actual = described_class.new(
"REGULAR EXPRESSION DOES NOT MATCH" => {"regkey" => "foo", "regval" => "bar", "value" => "baz"}
).to_ruby
expected = "<registry>foo : bar</registry> !~ /baz/"
expect(actual).to eq(expected)
end
it "generates the ruby for a IS NULL expression with hash context" do
actual = described_class.new({"IS NULL" => {"field" => "Vm-name"}}, "hash").to_ruby
expected = "<value type=string>Vm.name</value> == \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for a IS NULL expression with regkey" do
actual = described_class.new("IS NULL" => {"regkey" => "foo", "regval" => "bar"}).to_ruby
expected = "<registry>foo : bar</registry> == \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for a IS NOT NULL expression with hash context" do
actual = described_class.new({"IS NOT NULL" => {"field" => "Vm-name"}}, "hash").to_ruby
expected = "<value type=string>Vm.name</value> != \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for a IS NOT NULL expression with regkey" do
actual = described_class.new("IS NOT NULL" => {"regkey" => "foo", "regval" => "bar"}).to_ruby
expected = "<registry>foo : bar</registry> != \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for a IS EMPTY expression with hash context" do
actual = described_class.new({"IS EMPTY" => {"field" => "Vm-name"}}, "hash").to_ruby
expected = "<value type=string>Vm.name</value> == \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for a IS EMPTY expression with regkey" do
actual = described_class.new("IS EMPTY" => {"regkey" => "foo", "regval" => "bar"}).to_ruby
expected = "<registry>foo : bar</registry> == \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for a IS NOT EMPTY expression with hash context" do
actual = described_class.new({"IS NOT EMPTY" => {"field" => "Vm-name"}}, "hash").to_ruby
expected = "<value type=string>Vm.name</value> != \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for a IS NOT EMPTY expression with regkey" do
actual = described_class.new("IS NOT EMPTY" => {"regkey" => "foo", "regval" => "bar"}).to_ruby
expected = "<registry>foo : bar</registry> != \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for a CONTAINS expression with hash context" do
actual = described_class.new(
{"CONTAINS" => {"tag" => "Host.managed-environment", "value" => "prod"}},
"hash"
).to_ruby
expected = "<value type=string>managed.environment</value> CONTAINS \"\""
expect(actual).to eq(expected)
end
it "generates the SQL for a < expression" do
actual = described_class.new("<" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}).to_ruby
expected = "<value ref=vm, type=integer>/virtual/hardware/cpu_sockets</value> < 2"
expect(actual).to eq(expected)
end
it "generates the SQL for a < expression with dynamic value" do
actual = described_class.new("<" => {"field" => "Vm.hardware-cpu_sockets", "value" => "Vm.hardware-cpu_sockets"}).to_ruby
expected = "<value ref=vm, type=integer>/virtual/hardware/cpu_sockets</value> < <value ref=vm, type=integer>/virtual/hardware/cpu_sockets</value>"
expect(actual).to eq(expected)
end
it "generates the SQL for a <= expression" do
actual = described_class.new("<=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}).to_ruby
expected = "<value ref=vm, type=integer>/virtual/hardware/cpu_sockets</value> <= 2"
expect(actual).to eq(expected)
end
it "generates the SQL for a <= expression with dynamic value" do
actual = described_class.new("<=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "Vm.hardware-cpu_sockets"}).to_ruby
expected = "<value ref=vm, type=integer>/virtual/hardware/cpu_sockets</value> <= <value ref=vm, type=integer>/virtual/hardware/cpu_sockets</value>"
expect(actual).to eq(expected)
end
it "generates the SQL for a != expression" do
actual = described_class.new("!=" => {"field" => "Vm-name", "value" => "foo"}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> != \"foo\""
expect(actual).to eq(expected)
end
it "generates the SQL for a != expression with dynamic value" do
actual = described_class.new("!=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "Vm.hardware-cpu_sockets"}).to_ruby
expected = "<value ref=vm, type=integer>/virtual/hardware/cpu_sockets</value> != <value ref=vm, type=integer>/virtual/hardware/cpu_sockets</value>"
expect(actual).to eq(expected)
end
it "detects value empty array" do
exp = MiqExpression.new("INCLUDES" => {"field" => "Vm-name", "value" => "[]"})
expect(exp.to_ruby).to eq("<value ref=vm, type=string>/virtual/name</value> =~ /\\[\\]/")
end
it "raises error if expression contains ruby script" do
exp = MiqExpression.new("RUBY" => {"field" => "Host-name", "value" => "puts 'Hello world!'"})
expect { exp.to_ruby }.to raise_error(/operator 'RUBY' is not supported/)
end
it "tests numeric set expressions" do
exp = MiqExpression.new("=" => {"field" => "Host-enabled_inbound_ports", "value" => "22,427,5988,5989"})
expect(exp.to_ruby).to eq('<value ref=host, type=numeric_set>/virtual/enabled_inbound_ports</value> == [22,427,5988,5989]')
end
it "escapes forward slashes for values in REGULAR EXPRESSION MATCHES expressions" do
value = "//; puts 'Hi, mom!';//"
actual = described_class.new("REGULAR EXPRESSION MATCHES" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /\\/; puts 'Hi, mom!';\\//"
expect(actual).to eq(expected)
end
it "preserves the delimiters when escaping forward slashes in case-insensitive REGULAR EXPRESSION MATCHES expressions" do
value = "//; puts 'Hi, mom!';//i"
actual = described_class.new("REGULAR EXPRESSION MATCHES" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /\\/; puts 'Hi, mom!';\\//i"
expect(actual).to eq(expected)
end
it "escapes forward slashes for non-Regexp literal values in REGULAR EXPRESSION MATCHES expressions" do
value = ".*/; puts 'Hi, mom!';/.*"
actual = described_class.new("REGULAR EXPRESSION MATCHES" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /.*\\/; puts 'Hi, mom!';\\/.*/"
expect(actual).to eq(expected)
end
it "does not escape escaped forward slashes for values in REGULAR EXPRESSION MATCHES expressions" do
value = "\/foo\/bar"
actual = described_class.new("REGULAR EXPRESSION MATCHES" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /\\/foo\\/bar/"
expect(actual).to eq(expected)
end
it "handles arbitarily long escaping of forward " do
value = "\\\\\\/foo\\\\\\/bar"
actual = described_class.new("REGULAR EXPRESSION MATCHES" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /\\/foo\\/bar/"
expect(actual).to eq(expected)
end
it "escapes interpolation in REGULAR EXPRESSION MATCHES expressions" do
value = "/\#{puts 'Hi, mom!'}/"
actual = described_class.new("REGULAR EXPRESSION MATCHES" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /\\\#{puts 'Hi, mom!'}/"
expect(actual).to eq(expected)
end
it "handles arbitrarily long escaping of interpolation in REGULAR EXPRESSION MATCHES expressions" do
value = "/\\\\\#{puts 'Hi, mom!'}/"
actual = described_class.new("REGULAR EXPRESSION MATCHES" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /\\\#{puts 'Hi, mom!'}/"
expect(actual).to eq(expected)
end
it "escapes interpolation in non-Regexp literal values in REGULAR EXPRESSION MATCHES expressions" do
value = "\#{puts 'Hi, mom!'}"
actual = described_class.new("REGULAR EXPRESSION MATCHES" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /\\\#{puts 'Hi, mom!'}/"
expect(actual).to eq(expected)
end
it "escapes forward slashes for values in REGULAR EXPRESSION DOES NOT MATCH expressions" do
value = "//; puts 'Hi, mom!';//"
actual = described_class.new("REGULAR EXPRESSION DOES NOT MATCH" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> !~ /\\/; puts 'Hi, mom!';\\//"
expect(actual).to eq(expected)
end
it "preserves the delimiters when escaping forward slashes in case-insensitive REGULAR EXPRESSION DOES NOT MATCH expressions" do
value = "//; puts 'Hi, mom!';//i"
actual = described_class.new("REGULAR EXPRESSION DOES NOT MATCH" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> !~ /\\/; puts 'Hi, mom!';\\//i"
expect(actual).to eq(expected)
end
it "escapes forward slashes for non-Regexp literal values in REGULAR EXPRESSION DOES NOT MATCH expressions" do
value = ".*/; puts 'Hi, mom!';/.*"
actual = described_class.new("REGULAR EXPRESSION DOES NOT MATCH" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> !~ /.*\\/; puts 'Hi, mom!';\\/.*/"
expect(actual).to eq(expected)
end
it "does not escape escaped forward slashes for values in REGULAR EXPRESSION DOES NOT MATCH expressions" do
value = "\/foo\/bar"
actual = described_class.new("REGULAR EXPRESSION DOES NOT MATCH" => {"field" => "Vm-name", "value" => value}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> !~ /\\/foo\\/bar/"
expect(actual).to eq(expected)
end
# Note: To debug these tests, the following may be helpful:
# puts "Expression Raw: #{filter.exp.inspect}"
# puts "Expression in Human: #{filter.to_human}"
# puts "Expression in Ruby: #{filter.to_ruby}"
it "expands ranges with INCLUDES ALL" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
INCLUDES ALL:
field: Host-enabled_inbound_ports
value: 22, 427, 5988, 5989, 1..4
'
expected = "(<value ref=host, type=numeric_set>/virtual/enabled_inbound_ports</value> & [1,2,3,4,22,427,5988,5989]) == [1,2,3,4,22,427,5988,5989]"
expect(filter.to_ruby).to eq(expected)
end
it "expands ranges with INCLUDES ANY" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
INCLUDES ANY:
field: Host-enabled_inbound_ports
value: 22, 427, 5988, 5989, 1..3
'
expected = "([1,2,3,22,427,5988,5989] - <value ref=host, type=numeric_set>/virtual/enabled_inbound_ports</value>) != [1,2,3,22,427,5988,5989]"
expect(filter.to_ruby).to eq(expected)
end
it "expands ranges with INCLUDES ONLY" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
INCLUDES ONLY:
field: Host-enabled_inbound_ports
value: 22
'
expected = "(<value ref=host, type=numeric_set>/virtual/enabled_inbound_ports</value> - [22]) == []"
expect(filter.to_ruby).to eq(expected)
end
it "expands ranges with LIMITED TO" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
LIMITED TO:
field: Host-enabled_inbound_ports
value: 22
'
expected = "(<value ref=host, type=numeric_set>/virtual/enabled_inbound_ports</value> - [22]) == []"
expect(filter.to_ruby).to eq(expected)
end
it "should test string set expressions with EQUAL" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
"=":
field: Host-service_names
value: "ntpd, sshd, vmware-vpxa, vmware-webAccess"
'
expected = "<value ref=host, type=string_set>/virtual/service_names</value> == ['ntpd','sshd','vmware-vpxa','vmware-webAccess']"
expect(filter.to_ruby).to eq(expected)
end
it "should test string set expressions with INCLUDES ALL" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
INCLUDES ALL:
field: Host-service_names
value: "ntpd, sshd, vmware-vpxa, vmware-webAccess"
'
expected = "(<value ref=host, type=string_set>/virtual/service_names</value> & ['ntpd','sshd','vmware-vpxa','vmware-webAccess']) == ['ntpd','sshd','vmware-vpxa','vmware-webAccess']"
expect(filter.to_ruby).to eq(expected)
end
it "should test string set expressions with INCLUDES ANY" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
INCLUDES ANY:
field: Host-service_names
value: "ntpd, sshd, vmware-vpxa, vmware-webAccess"
'
expected = "(['ntpd','sshd','vmware-vpxa','vmware-webAccess'] - <value ref=host, type=string_set>/virtual/service_names</value>) != ['ntpd','sshd','vmware-vpxa','vmware-webAccess']"
expect(filter.to_ruby).to eq(expected)
end
it "should test string set expressions with INCLUDES ONLY" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
INCLUDES ONLY:
field: Host-service_names
value: "ntpd, sshd, vmware-vpxa"
'
expected = "(<value ref=host, type=string_set>/virtual/service_names</value> - ['ntpd','sshd','vmware-vpxa']) == []"
expect(filter.to_ruby).to eq(expected)
end
it "should test string set expressions with LIMITED TO" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
LIMITED TO:
field: Host-service_names
value: "ntpd, sshd, vmware-vpxa"
'
expected = "(<value ref=host, type=string_set>/virtual/service_names</value> - ['ntpd','sshd','vmware-vpxa']) == []"
expect(filter.to_ruby).to eq(expected)
end
it "should test string set expressions with FIND/checkall" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
FIND:
search:
"=":
field: Host.filesystems-name
value: /etc/passwd
checkall:
"=":
field: Host.filesystems-permissions
value: "0644"
'
expected = '<find><search><value ref=host, type=text>/virtual/filesystems/name</value> == "/etc/passwd"</search><check mode=all><value ref=host, type=string>/virtual/filesystems/permissions</value> == "0644"</check></find>'
expect(filter.to_ruby).to eq(expected)
end
it "should test regexp with regex literal" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
REGULAR EXPRESSION MATCHES:
field: Host-name
value: /^[^.]*\.galaxy\..*$/
'
expect(filter.to_ruby).to eq('<value ref=host, type=string>/virtual/name</value> =~ /^[^.]*\.galaxy\..*$/')
end
it "should test regexp with string literal" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
REGULAR EXPRESSION MATCHES:
field: Host-name
value: ^[^.]*\.galaxy\..*$
'
expect(filter.to_ruby).to eq('<value ref=host, type=string>/virtual/name</value> =~ /^[^.]*\.galaxy\..*$/')
end
it "should test regexp as part of a FIND/checkany expression" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
FIND:
search:
"=":
field: Host.firewall_rules-enabled
value: "true"
checkany:
REGULAR EXPRESSION MATCHES:
field: Host.firewall_rules-name
value: /^.*SLP.*$/'
expected = '<find><search><value ref=host, type=boolean>/virtual/firewall_rules/enabled</value> == "true"</search><check mode=any><value ref=host, type=string>/virtual/firewall_rules/name</value> =~ /^.*SLP.*$/</check></find>'
expect(filter.to_ruby).to eq(expected)
end
it "should test negative regexp with FIND/checkany expression" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
FIND:
search:
"=":
field: Host.firewall_rules-enabled
value: "true"
checkany:
REGULAR EXPRESSION DOES NOT MATCH:
field: Host.firewall_rules-name
value: /^.*SLP.*$/'
expected = '<find><search><value ref=host, type=boolean>/virtual/firewall_rules/enabled</value> == "true"</search><check mode=any><value ref=host, type=string>/virtual/firewall_rules/name</value> !~ /^.*SLP.*$/</check></find>'
expect(filter.to_ruby).to eq(expected)
end
it "should test fb7726" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
CONTAINS:
field: Host.filesystems-name
value: /etc/shadow
'
expect(filter.to_ruby).to eq("<exist ref=host>/virtual/filesystems/name/%2fetc%2fshadow</exist>")
end
it "should test numbers with methods" do
filter = YAML.load '--- !ruby/object:MiqExpression
context_type:
exp:
">=":
field: Vm-memory_shares
value: 25.kilobytes
'
expect(filter.to_ruby).to eq('<value ref=vm, type=integer>/virtual/memory_shares</value> >= 25600')
end
it "should test numbers with commas with methods" do
filter = YAML.load '--- !ruby/object:MiqExpression
context_type:
exp:
">=":
field: Vm-used_disk_storage
value: 1,000.megabytes
'
expect(filter.to_ruby).to eq('<value ref=vm, type=integer>/virtual/used_disk_storage</value> >= 1048576000')
end
context "integration" do
it "should escape strings" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
INCLUDES:
field: Vm.registry_items-data
value: $foo
'
expect(filter.to_ruby).to eq("<value ref=vm, type=text>/virtual/registry_items/data</value> =~ /\\$foo/")
data = {"registry_items.data" => "C:\\Documents and Users\\O'Neill, April\\", "/virtual/registry_items/data" => "C:\\Documents and Users\\O'Neill, April\\"}
expect(Condition.subst(filter.to_ruby, data)).to eq("\"C:\\\\Documents and Users\\\\O'Neill, April\\\\\" =~ /\\$foo/")
end
context "when context_type is 'hash'" do
let(:data) do
{
"name" => "VM_1",
"guest_applications.version" => "3.1.2.7193",
"guest_applications.release" => nil,
"guest_applications.vendor" => "VMware, Inc.", "id" => 9,
"guest_applications.name" => "VMware Tools",
"guest_applications.package_name" => nil
}
end
it "should test context hash with EQUAL" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
"=":
field: Vm.guest_applications-name
value: VMware Tools
context_type: hash
'
expect(filter.to_ruby).to eq("<value type=string>guest_applications.name</value> == \"VMware Tools\"")
expect(Condition.subst(filter.to_ruby, data)).to eq("\"VMware Tools\" == \"VMware Tools\"")
end
it "should test context hash with REGULAR EXPRESSION MATCHES" do
filter = YAML.load '--- !ruby/object:MiqExpression
exp:
REGULAR EXPRESSION MATCHES:
field: Vm.guest_applications-vendor
value: /^[^.]*ware.*$/
context_type: hash
'
expect(filter.to_ruby).to eq("<value type=string>guest_applications.vendor</value> =~ /^[^.]*ware.*$/")
expect(Condition.subst(filter.to_ruby, data)).to eq('"VMware, Inc." =~ /^[^.]*ware.*$/')
end
end
end
it "generates the ruby for a STARTS WITH expression" do
actual = described_class.new("STARTS WITH" => {"field" => "Vm-name", "value" => "foo"}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /^foo/"
expect(actual).to eq(expected)
end
it "generates the ruby for an ENDS WITH expression" do
actual = described_class.new("ENDS WITH" => {"field" => "Vm-name", "value" => "foo"}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> =~ /foo$/"
expect(actual).to eq(expected)
end
it "generates the ruby for an AND expression" do
actual = described_class.new("AND" => [{"=" => {"field" => "Vm-name", "value" => "foo"}},
{"=" => {"field" => "Vm-vendor", "value" => "bar"}}]).to_ruby
expected = "(<value ref=vm, type=string>/virtual/name</value> == \"foo\" and <value ref=vm, type=string>/virtual/vendor</value> == \"bar\")"
expect(actual).to eq(expected)
end
it "generates the ruby for an OR expression" do
actual = described_class.new("OR" => [{"=" => {"field" => "Vm-name", "value" => "foo"}},
{"=" => {"field" => "Vm-vendor", "value" => "bar"}}]).to_ruby
expected = "(<value ref=vm, type=string>/virtual/name</value> == \"foo\" or <value ref=vm, type=string>/virtual/vendor</value> == \"bar\")"
expect(actual).to eq(expected)
end
it "generates the ruby for a NOT expression" do
actual = described_class.new("NOT" => {"=" => {"field" => "Vm-name", "value" => "foo"}}).to_ruby
expected = "!(<value ref=vm, type=string>/virtual/name</value> == \"foo\")"
expect(actual).to eq(expected)
end
it "generates the ruby for a ! expression" do
actual = described_class.new("!" => {"=" => {"field" => "Vm-name", "value" => "foo"}}).to_ruby
expected = "!(<value ref=vm, type=string>/virtual/name</value> == \"foo\")"
expect(actual).to eq(expected)
end
it "generates the ruby for an IS NULL expression" do
actual = described_class.new("IS NULL" => {"field" => "Vm-name"}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> == \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for an IS NOT NULL expression" do
actual = described_class.new("IS NOT NULL" => {"field" => "Vm-name"}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> != \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for an IS EMPTY expression" do
actual = described_class.new("IS EMPTY" => {"field" => "Vm-name"}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> == \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for an IS NOT EMPTY expression" do
actual = described_class.new("IS NOT EMPTY" => {"field" => "Vm-name"}).to_ruby
expected = "<value ref=vm, type=string>/virtual/name</value> != \"\""
expect(actual).to eq(expected)
end
it "generates the ruby for a FIND expression with checkall" do
actual = described_class.new(
"FIND" => {"search" => {"=" => {"field" => "Vm-name", "value" => "foo"}},
"checkall" => {">" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}}
).to_ruby
expected = "<find><search><value ref=vm, type=string>/virtual/name</value> == \"foo\"</search><check mode=all><value ref=vm, type=integer>/virtual/hardware/cpu_sockets</value> > 2</check></find>"
expect(actual).to eq(expected)
end
it "generates the ruby for a FIND expression with checkany" do
actual = described_class.new(
"FIND" => {"search" => {"=" => {"field" => "Vm-name", "value" => "foo"}},
"checkany" => {">" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}}
).to_ruby
expected = "<find><search><value ref=vm, type=string>/virtual/name</value> == \"foo\"</search><check mode=any><value ref=vm, type=integer>/virtual/hardware/cpu_sockets</value> > 2</check></find>"
expect(actual).to eq(expected)
end
it "generates the ruby for a FIND expression with checkcount and =" do
actual = described_class.new(
"FIND" => {"search" => {"=" => {"field" => "Vm-name", "value" => "foo"}},
"checkcount" => {"=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}}
).to_ruby
expected = "<find><search><value ref=vm, type=string>/virtual/name</value> == \"foo\"</search><check mode=count><count> == 2</check></find>"
expect(actual).to eq(expected)
end
it "generates the ruby for a FIND expression with checkcount and !=" do
actual = described_class.new(
"FIND" => {"search" => {"=" => {"field" => "Vm-name", "value" => "foo"}},
"checkcount" => {"!=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}}
).to_ruby
expected = "<find><search><value ref=vm, type=string>/virtual/name</value> == \"foo\"</search><check mode=count><count> != 2</check></find>"
expect(actual).to eq(expected)
end
it "generates the ruby for a FIND expression with checkcount and <" do
actual = described_class.new(
"FIND" => {"search" => {"=" => {"field" => "Vm-name", "value" => "foo"}},
"checkcount" => {"<" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}}
).to_ruby
expected = "<find><search><value ref=vm, type=string>/virtual/name</value> == \"foo\"</search><check mode=count><count> < 2</check></find>"
expect(actual).to eq(expected)
end
it "generates the ruby for a FIND expression with checkcount and >" do
actual = described_class.new(
"FIND" => {"search" => {"=" => {"field" => "Vm-name", "value" => "foo"}},
"checkcount" => {">" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}}
).to_ruby
expected = "<find><search><value ref=vm, type=string>/virtual/name</value> == \"foo\"</search><check mode=count><count> > 2</check></find>"
expect(actual).to eq(expected)
end
it "generates the ruby for a FIND expression with checkcount and <=" do
actual = described_class.new(
"FIND" => {"search" => {"=" => {"field" => "Vm-name", "value" => "foo"}},
"checkcount" => {"<=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}}
).to_ruby
expected = "<find><search><value ref=vm, type=string>/virtual/name</value> == \"foo\"</search><check mode=count><count> <= 2</check></find>"
expect(actual).to eq(expected)
end
it "generates the ruby for a FIND expression with checkcount and >=" do
actual = described_class.new(
"FIND" => {"search" => {"=" => {"field" => "Vm-name", "value" => "foo"}},
"checkcount" => {">=" => {"field" => "Vm.hardware-cpu_sockets", "value" => "2"}}}
).to_ruby
expected = "<find><search><value ref=vm, type=string>/virtual/name</value> == \"foo\"</search><check mode=count><count> >= 2</check></find>"
expect(actual).to eq(expected)
end
it "generates the ruby for a KEY EXISTS expression" do
actual = described_class.new("KEY EXISTS" => {"regkey" => "foo"}).to_ruby
expected = "<registry key_exists=1, type=boolean>foo</registry> == 'true'"
expect(actual).to eq(expected)
end
it "generates the ruby for a VALUE EXISTS expression" do
actual = described_class.new("VALUE EXISTS" => {"regkey" => "foo", "regval" => "bar"}).to_ruby
expected = "<registry value_exists=1, type=boolean>foo : bar</registry> == 'true'"
expect(actual).to eq(expected)
end
it "raises an error for an expression with an invalid operator" do
expression = described_class.new("FOOBAR" => {"field" => "Vm-name", "value" => "baz"})
expect { expression.to_ruby }.to raise_error(/operator 'FOOBAR' is not supported/)
end
context "date/time support" do
context "static dates and times with no timezone" do
it "generates the ruby for an AFTER expression with date value" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time > '2011-01-10T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a BEFORE expression with date value" do
exp = MiqExpression.new("BEFORE" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time < '2011-01-10T00:00:00Z'.to_time(:utc)")
end
it "generates the ruby for a AFTER expression with datetime value" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-last_scan_on", "value" => "2011-01-10 9:00"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time > '2011-01-10T09:00:00Z'.to_time(:utc)")
end
it "generates the ruby for a IS expression with date value" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-10T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a IS expression with datetime value" do
exp = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "2011-01-10"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-10T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a IS expression with hash context" do
actual = described_class.new({"IS" => {"field" => "Vm-retires_on", "value" => "2011-01-10"}}, "hash").to_ruby
expected = "val=<value type=datetime>Vm.retires_on</value>; !val.nil? && val.to_time >= '2011-01-10T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T23:59:59Z'.to_time(:utc)"
expect(actual).to eq(expected)
end
it "generates the ruby for a FROM expression with date values" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["2011-01-09", "2011-01-10"]})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-09T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with date values" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["01/09/2011", "01/10/2011"]})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-09T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with datetime values" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2011-01-10 8:00", "2011-01-10 17:00"]})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-10T08:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T17:00:00Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with identical datetime values" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2011-01-10 00:00", "2011-01-10 00:00"]})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-10T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T00:00:00Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with hash context" do
actual = described_class.new(
{"FROM" => {"field" => "Vm-retires_on", "value" => ["2011-01-09", "2011-01-10"]}},
"hash"
).to_ruby
expected = "val=<value type=datetime>Vm.retires_on</value>; !val.nil? && val.to_time >= '2011-01-09T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T23:59:59Z'.to_time(:utc)"
expect(actual).to eq(expected)
end
end
context "static dates and times with a time zone" do
let(:tz) { "Eastern Time (US & Canada)" }
it "generates the ruby for a AFTER expression with date value" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time > '2011-01-11T04:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a BEFORE expression with date value" do
exp = MiqExpression.new("BEFORE" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time < '2011-01-10T05:00:00Z'.to_time(:utc)")
end
it "generates the ruby for a AFTER expression with datetime value" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-last_scan_on", "value" => "2011-01-10 9:00"})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time > '2011-01-10T14:00:00Z'.to_time(:utc)")
end
it "generates the ruby for a AFTER expression with datetime value" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-last_scan_on", "value" => "2011-01-10 9:00"})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time > '2011-01-10T14:00:00Z'.to_time(:utc)")
end
it "generates the ruby for a IS expression wtih date value" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-10T05:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-11T04:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with date values" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["2011-01-09", "2011-01-10"]})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-09T05:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-11T04:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with datetime values" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2011-01-10 8:00", "2011-01-10 17:00"]})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-10T13:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T22:00:00Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with identical datetime values" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2011-01-10 00:00", "2011-01-10 00:00"]})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-10T05:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T05:00:00Z'.to_time(:utc)")
end
end
end
context "relative date/time support" do
around { |example| Timecop.freeze("2011-01-11 17:30 UTC") { example.run } }
context "given a non-UTC timezone" do
it "generates the SQL for a AFTER expression with a value of 'Yesterday' for a date field" do
exp = described_class.new("AFTER" => {"field" => "Vm-retires_on", "value" => "Yesterday"})
ruby, * = exp.to_ruby("Asia/Jakarta")
expect(ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time > '2011-01-11T16:59:59Z'.to_time(:utc)")
end
it "generates the RUBY for a BEFORE expression with a value of 'Yesterday' for a date field" do
exp = described_class.new("BEFORE" => {"field" => "Vm-retires_on", "value" => "Yesterday"})
ruby, * = exp.to_ruby("Asia/Jakarta")
expect(ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time < '2011-01-10T17:00:00Z'.to_time(:utc)")
end
it "generates the RUBY for an IS expression with a value of 'Yesterday' for a date field" do
exp = described_class.new("IS" => {"field" => "Vm-retires_on", "value" => "Yesterday"})
ruby, * = exp.to_ruby("Asia/Jakarta")
expect(ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-10T17:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-11T16:59:59Z'.to_time(:utc)")
end
it "generates the RUBY for a FROM expression with a value of 'Yesterday'/'Today' for a date field" do
exp = described_class.new("FROM" => {"field" => "Vm-retires_on", "value" => %w(Yesterday Today)})
ruby, * = exp.to_ruby("Asia/Jakarta")
expect(ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-10T17:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-12T16:59:59Z'.to_time(:utc)")
end
end
context "relative dates with no time zone" do
it "generates the ruby for an AFTER expression with date value of n Days Ago" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-retires_on", "value" => "2 Days Ago"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time > '2011-01-09T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for an AFTER expression with datetime value of n Days ago" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-last_scan_on", "value" => "2 Days Ago"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time > '2011-01-09T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a BEFORE expression with date value of n Days Ago" do
exp = MiqExpression.new("BEFORE" => {"field" => "Vm-retires_on", "value" => "2 Days Ago"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time < '2011-01-09T00:00:00Z'.to_time(:utc)")
end
it "generates the ruby for a BEFORE expression with datetime value of n Days Ago" do
exp = MiqExpression.new("BEFORE" => {"field" => "Vm-last_scan_on", "value" => "2 Days Ago"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time < '2011-01-09T00:00:00Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with datetime values of Last/This Hour" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["Last Hour", "This Hour"]})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-11T16:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-11T17:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with date values of Last Week" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["Last Week", "Last Week"]})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-03T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-09T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with datetime values of Last Week" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["Last Week", "Last Week"]})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-03T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-09T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with datetime values of n Months Ago/Last Month" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2 Months Ago", "Last Month"]})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2010-11-01T00:00:00Z'.to_time(:utc) && val.to_time <= '2010-12-31T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for an IS expression with datetime value of Last Week" do
exp = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "Last Week"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-03T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-09T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for an IS expression with relative date with hash context" do
actual = described_class.new({"IS" => {"field" => "Vm-retires_on", "value" => "Yesterday"}}, "hash").to_ruby
expected = "val=<value type=datetime>Vm.retires_on</value>; !val.nil? && val.to_time >= '2011-01-10T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T23:59:59Z'.to_time(:utc)"
expect(actual).to eq(expected)
end
it "generates the ruby for an IS expression with date value of Last Week" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "Last Week"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-03T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-09T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a IS expression with date value of Today" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "Today"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-11T00:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-11T23:59:59Z'.to_time(:utc)")
end
it "generates the ruby for an IS expression with date value of n Hours Ago" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "3 Hours Ago"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-11T14:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-11T14:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a IS expression with datetime value of n Hours Ago" do
exp = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "3 Hours Ago"})
expect(exp.to_ruby).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-11T14:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-11T14:59:59Z'.to_time(:utc)")
end
end
context "relative time with a time zone" do
let(:tz) { "Hawaii" }
it "generates the ruby for a FROM expression with datetime value of Last/This Hour" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["Last Hour", "This Hour"]})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-11T16:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-11T17:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with date values of Last Week" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["Last Week", "Last Week"]})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-03T10:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T09:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with datetime values of Last Week" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["Last Week", "Last Week"]})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-03T10:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T09:59:59Z'.to_time(:utc)")
end
it "generates the ruby for a FROM expression with datetime values of n Months Ago/Last Month" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2 Months Ago", "Last Month"]})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2010-11-01T10:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-01T09:59:59Z'.to_time(:utc)")
end
it "generates the ruby for an IS expression with datetime value of Last Week" do
exp = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "Last Week"})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-03T10:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T09:59:59Z'.to_time(:utc)")
end
it "generates the ruby for an IS expression with date value of Last Week" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "Last Week"})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-03T10:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-10T09:59:59Z'.to_time(:utc)")
end
it "generates the ruby for an IS expression with date value of Today" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "Today"})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-11T10:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-12T09:59:59Z'.to_time(:utc)")
end
it "generates the ruby for an IS expression with date value of n Hours Ago" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "3 Hours Ago"})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/retires_on</value>; !val.nil? && val.to_time >= '2011-01-11T14:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-11T14:59:59Z'.to_time(:utc)")
end
it "generates the ruby for an IS expression with datetime value of n Hours Ago" do
exp = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "3 Hours Ago"})
expect(exp.to_ruby(tz)).to eq("val=<value ref=vm, type=datetime>/virtual/last_scan_on</value>; !val.nil? && val.to_time >= '2011-01-11T14:00:00Z'.to_time(:utc) && val.to_time <= '2011-01-11T14:59:59Z'.to_time(:utc)")
end
end
end
end
describe ".numeric?" do
it "should return true if digits separated by comma and false if another separator used" do
expect(MiqExpression.numeric?('10000.55')).to be_truthy
expect(MiqExpression.numeric?('10,000.55')).to be_truthy
expect(MiqExpression.numeric?('10 000.55')).to be_falsey
end
it "should return true if there is method attached to number" do
expect(MiqExpression.numeric?('2,555.hello')).to eq(false)
expect(MiqExpression.numeric?('2,555.kilobytes')).to eq(true)
expect(MiqExpression.numeric?('2,555.55.megabytes')).to eq(true)
end
end
describe ".integer?" do
it "should return true if digits separated by comma and false if another separator used" do
expect(MiqExpression.integer?('2,555')).to eq(true)
expect(MiqExpression.integer?('2 555')).to eq(false)
end
it "should return true if there is method attached to number" do
expect(MiqExpression.integer?('2,555.kilobytes')).to eq(true)
expect(MiqExpression.integer?('2,555.hello')).to eq(false)
end
end
describe ".atom_error" do
it "should return false if value can be evaluated as regular expression" do
value = '123[)'
expect(MiqExpression.atom_error("Host-xx", "regular expression matches", value)).to be_truthy
value = '/foo/'
expect(MiqExpression.atom_error("Host-xx", "regular expression matches", value)).to be_falsey
end
it "should return true if operator is 'ruby'" do
# Ruby scripts in expressions are no longer supported.
expect(MiqExpression.atom_error("VmPerformance-cpu_usage_rate_average", "ruby", '')).to be_truthy
end
it "should return false if data type of field is 'string' or 'text'" do
field = "Vm-vendor"
expect(MiqExpression.atom_error(field, "START WITH", 'red')).to be_falsey
end
it "should return false if field is 'count'" do
filed = :count
expect(MiqExpression.atom_error(filed, ">=", '1')).to be_falsey
end
it "should return false if data type of field is boolean and value is 'true' or 'false'" do
field = "Vm-retired"
expect(MiqExpression.atom_error(field, "=", 'false')).to be_falsey
expect(MiqExpression.atom_error(field, "=", 'true')).to be_falsey
expect(MiqExpression.atom_error(field, "=", 'not')).to be_truthy
end
it "should return false if data type of field is float and value evaluated to float" do
field = "VmPerformance-cpu_usage_rate_average"
expect(MiqExpression.atom_error(field, "=", '')).to be_truthy
expect(MiqExpression.atom_error(field, "=", '123abc')).to be_truthy
expect(MiqExpression.atom_error(field, "=", '123')).to be_falsey
expect(MiqExpression.atom_error(field, "=", '123.456')).to be_falsey
expect(MiqExpression.atom_error(field, "=", '2,123.456')).to be_falsey
expect(MiqExpression.atom_error(field, "=", '123.kilobytes')).to be_falsey
end
it "should return false if data type of field is integer and value evaluated to integer" do
field = "Vm-cpu_limit"
expect(MiqExpression.atom_error(field, "=", '')).to be_truthy
expect(MiqExpression.atom_error(field, "=", '123.5')).to be_truthy
expect(MiqExpression.atom_error(field, "=", '123.abc')).to be_truthy
expect(MiqExpression.atom_error(field, "=", '123')).to be_falsey
expect(MiqExpression.atom_error(field, "=", '2,123')).to be_falsey
end
it "should return false if data type of field is datetime and value evaluated to datetime" do
field = "Vm-created_on"
expect(MiqExpression.atom_error(field, "=", Time.current.to_s)).to be_falsey
expect(MiqExpression.atom_error(field, "=", "123456")).to be_truthy
end
it "should return false if most resent date is second element in array" do
field = "Vm-state_changed_on"
expect(MiqExpression.atom_error(field, "FROM", ["7 Days Ago", "Today"])).to be_falsey
expect(MiqExpression.atom_error(field, "FROM", ["Today", "7 Days Ago"])).to be_truthy
end
end
context "._model_details" do
it "should not be overly aggressive in filtering out columns for logical CPUs" do
relats = MiqExpression.get_relats(Vm)
details = MiqExpression._model_details(relats, {})
cluster_sorted = details.select { |d| d.first.starts_with?("Cluster") }.sort
expect(cluster_sorted.map(&:first)).to include("Cluster / Deployment Role : Total Number of Physical CPUs")
expect(cluster_sorted.map(&:first)).to include("Cluster / Deployment Role : Total Number of Logical CPUs")
hardware_sorted = details.select { |d| d.first.starts_with?("Hardware") }.sort
expect(hardware_sorted.map(&:first)).not_to include("Hardware : Logical Cpus")
end
it "should not contain duplicate tag fields" do
# tags contain the root tenant's name
Tenant.seed
category = FactoryBot.create(:classification, :name => 'environment', :description => 'Environment')
FactoryBot.create(:classification, :parent_id => category.id, :name => 'prod', :description => 'Production')
tags = MiqExpression.model_details('Host',
:typ => 'tag',
:include_model => true,
:include_my_tags => false,
:userid => 'admin')
expect(tags.uniq.length).to eq(tags.length)
end
end
context "._custom_details_for" do
let(:klass) { Vm }
let(:vm) { FactoryBot.create(:vm) }
let!(:custom_attr1) { FactoryBot.create(:custom_attribute, :resource => vm, :name => "CATTR_1", :value => "Value 1") }
let!(:custom_attr2) { FactoryBot.create(:custom_attribute, :resource => vm, :name => nil, :value => "Value 2") }
it "ignores custom_attibutes with a nil name" do
expect(MiqExpression._custom_details_for("Vm", {})).to eq([["Custom Attribute: CATTR_1", "Vm-virtual_custom_attribute_CATTR_1"]])
end
let(:conatiner_image) { FactoryBot.create(:container_image) }
let!(:custom_attribute_with_section_1) do
FactoryBot.create(:custom_attribute, :resource => conatiner_image, :name => 'CATTR_3', :value => "Value 3",
:section => 'section_3')
end
let!(:custom_attribute_with_section_2) do
FactoryBot.create(:custom_attribute, :resource => conatiner_image, :name => 'CATTR_3', :value => "Value 3",
:section => 'docker_labels')
end
it "returns human names of custom attributes with sections" do
expected_result = [
['Docker Labels: CATTR_3', 'ContainerImage-virtual_custom_attribute_CATTR_3:SECTION:docker_labels'],
['Section 3: CATTR_3', 'ContainerImage-virtual_custom_attribute_CATTR_3:SECTION:section_3']
]
expect(MiqExpression._custom_details_for("ContainerImage", {})).to match_array(expected_result)
end
end
describe "#to_human" do
it "generates a human readable string for a 'FIELD' expression" do
exp = MiqExpression.new(">" => {"field" => "Vm-allocated_disk_storage", "value" => "5.megabytes"})
expect(exp.to_human).to eq('VM and Instance : Allocated Disk Storage > 5 MB')
end
it "generates a human readable string for a FIELD expression with alias" do
exp = MiqExpression.new(">" => {"field" => "Vm-allocated_disk_storage", "value" => "5.megabytes",
"alias" => "Disk"})
expect(exp.to_human).to eq('Disk > 5 MB')
end
it "generates a human readable string for a FIND/CHECK expression" do
exp = MiqExpression.new("FIND" => {"search" => {"STARTS WITH" => {"field" => "Vm.advanced_settings-name",
"value" => "X"}},
"checkall" => {"=" => {"field" => "Vm.advanced_settings-read_only",
"value" => "true"}}})
expect(exp.to_human).to eq('FIND VM and Instance.Advanced Settings : '\
'Name STARTS WITH "X" CHECK ALL Read Only = "true"')
end
it "generates a human readable string for a FIND/CHECK expression with alias" do
exp = MiqExpression.new("FIND" => {"search" => {"STARTS WITH" => {"field" => "Vm.advanced_settings-name",
"value" => "X",
"alias" => "Settings Name"}},
"checkall" => {"=" => {"field" => "Vm.advanced_settings-read_only",
"value" => "true"}}})
expect(exp.to_human).to eq('FIND Settings Name STARTS WITH "X" CHECK ALL Read Only = "true"')
end
it "generates a human readable string for a COUNT expression" do
exp = MiqExpression.new({">" => {"count" => "Vm.snapshots", "value" => "1"}})
expect(exp.to_human).to eq("COUNT OF VM and Instance.Snapshots > 1")
end
it "generates a human readable string for a COUNT expression with alias" do
exp = MiqExpression.new(">" => {"count" => "Vm.snapshots", "value" => "1", "alias" => "Snaps"})
expect(exp.to_human).to eq("COUNT OF Snaps > 1")
end
context "TAG type expression" do
before do
# tags contain the root tenant's name
Tenant.seed
category = FactoryBot.create(:classification, :name => 'environment', :description => 'Environment')
FactoryBot.create(:classification, :parent_id => category.id, :name => 'prod', :description => 'Production')
end
it "generates a human readable string for a TAG expression" do
exp = MiqExpression.new("CONTAINS" => {"tag" => "Host.managed-environment", "value" => "prod"})
expect(exp.to_human).to eq("Host / Node.My Company Tags : Environment CONTAINS 'Production'")
end
it "generates a human readable string for a TAG expression with alias" do
exp = MiqExpression.new("CONTAINS" => {"tag" => "Host.managed-environment", "value" => "prod",
"alias" => "Env"})
expect(exp.to_human).to eq("Env CONTAINS 'Production'")
end
end
context "when given values with relative dates" do
it "generates a human readable string for a AFTER '2 Days Ago' expression" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-retires_on", "value" => "2 Days Ago"})
expect(exp.to_human).to eq('VM and Instance : Retires On AFTER "2 Days Ago"')
end
it "generates a human readable string for a BEFORE '2 Days ago' expression" do
exp = MiqExpression.new("BEFORE" => {"field" => "Vm-retires_on", "value" => "2 Days Ago"})
expect(exp.to_human).to eq('VM and Instance : Retires On BEFORE "2 Days Ago"')
end
it "generates a human readable string for a FROM 'Last Hour' THROUGH 'This Hour' expression" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["Last Hour", "This Hour"]})
expect(exp.to_human).to eq('VM and Instance : Last Analysis Time FROM "Last Hour" THROUGH "This Hour"')
end
it "generates a human readable string for a FROM 'Last Week' THROUGH 'Last Week' expression" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["Last Week", "Last Week"]})
expect(exp.to_human).to eq('VM and Instance : Retires On FROM "Last Week" THROUGH "Last Week"')
end
it "generates a human readable string for a FROM '2 Months ago' THROUGH 'Last Month' expression" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on", "value" => ["2 Months Ago", "Last Month"]})
expect(exp.to_human).to eq('VM and Instance : Last Analysis Time FROM "2 Months Ago" THROUGH "Last Month"')
end
it "generates a human readable string for a IS '3 Hours Ago' expression" do
exp = MiqExpression.new("IS" => {"field" => "Vm-last_scan_on", "value" => "3 Hours Ago"})
expect(exp.to_human).to eq('VM and Instance : Last Analysis Time IS "3 Hours Ago"')
end
end
context "when giving value with static dates and times" do
it "generates a human readable string for a AFTER expression with date without time" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_human).to eq('VM and Instance : Retires On AFTER "2011-01-10"')
end
it "generates a human readable string for a AFTER expression with date and time" do
exp = MiqExpression.new("AFTER" => {"field" => "Vm-last_scan_on", "value" => "2011-01-10 9:00"})
expect(exp.to_human).to eq('VM and Instance : Last Analysis Time AFTER "2011-01-10 9:00"')
end
it "generates a human readable string for a BEFORE expression with date without time" do
exp = MiqExpression.new("BEFORE" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_human).to eq('VM and Instance : Retires On BEFORE "2011-01-10"')
end
it "generates a human readable string for a '>' expression with date without time" do
exp = MiqExpression.new(">" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_human).to eq('VM and Instance : Retires On > "2011-01-10"')
end
it "generates a human readable string for a '>' expression with date and time" do
exp = MiqExpression.new(">" => {"field" => "Vm-last_scan_on", "value" => "2011-01-10 9:00"})
expect(exp.to_human).to eq('VM and Instance : Last Analysis Time > "2011-01-10 9:00"')
end
it "generates a human readable string for a '<' expression with date without time" do
exp = MiqExpression.new("<" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_human).to eq('VM and Instance : Retires On < "2011-01-10"')
end
it "generates a human readable string for a '>=' expression with date and time" do
exp = MiqExpression.new(">=" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_human).to eq('VM and Instance : Retires On >= "2011-01-10"')
end
it "generates a human readable string for a '<=' expression with date without time" do
exp = MiqExpression.new("<=" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_human).to eq('VM and Instance : Retires On <= "2011-01-10"')
end
it "generates a human readable string for a 'IS' with date without time" do
exp = MiqExpression.new("IS" => {"field" => "Vm-retires_on", "value" => "2011-01-10"})
expect(exp.to_human).to eq('VM and Instance : Retires On IS "2011-01-10"')
end
it "generates a human readable string for a FROM THROUGH expression with date format: 'yyyy-mm-dd'" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["2011-01-09", "2011-01-10"]})
expect(exp.to_human).to eq('VM and Instance : Retires On FROM "2011-01-09" THROUGH "2011-01-10"')
end
it "generates a human readable string for a FROM THROUGH expression with date format: 'mm/dd/yyyy'" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-retires_on", "value" => ["01/09/2011", "01/10/2011"]})
expect(exp.to_human).to eq('VM and Instance : Retires On FROM "01/09/2011" THROUGH "01/10/2011"')
end
it "generates a human readable string for a FROM THROUGH expression with date and time" do
exp = MiqExpression.new("FROM" => {"field" => "Vm-last_scan_on",
"value" => ["2011-01-10 8:00", "2011-01-10 17:00"]})
expect(exp.to_human).to eq('VM and Instance : Last Analysis Time ' \
'FROM "2011-01-10 8:00" THROUGH "2011-01-10 17:00"')
end
end
end
context "quick search" do
let(:exp) { {"=" => {"field" => "Vm-name", "value" => "test"}} }
let(:qs_exp) { {"=" => {"field" => "Vm-name", "value" => :user_input}} }
let(:complex_qs_exp) do
{
"AND" => [
{"=" => {"field" => "Vm-name", "value" => "test"}},
{"=" => {"field" => "Vm-name", "value" => :user_input}}
]
}
end
describe ".quick_search?" do
it "detects false in hash" do
expect(MiqExpression.quick_search?(exp)).to be_falsey
end
it "detects in hash" do
expect(MiqExpression.quick_search?(qs_exp)).to be_truthy
end
it "detects in complex hash" do
expect(MiqExpression.quick_search?(complex_qs_exp)).to be_truthy
end
it "detects false in miq expression" do
expect(MiqExpression.quick_search?(MiqExpression.new(exp))).to be_falsey
end
it "detects in miq expression" do
expect(MiqExpression.quick_search?(MiqExpression.new(qs_exp))).to be_truthy
end
end
describe "#quick_search?" do
it "detects false in hash" do
expect(MiqExpression.new(exp).quick_search?).to be_falsey
end
it "detects in hash" do
expect(MiqExpression.new(qs_exp).quick_search?).to be_truthy
end
it "detects in complex hash" do
expect(MiqExpression.new(complex_qs_exp).quick_search?).to be_truthy
end
end
end
describe ".merge_where_clauses" do
it "returns nil for nil" do
expect(MiqExpression.merge_where_clauses(nil)).to be_nil
end
it "returns nil for blank" do
expect(MiqExpression.merge_where_clauses("")).to be_nil
end
it "returns nil for multiple empty arrays" do
expect(MiqExpression.merge_where_clauses([],[])).to be_nil
end
it "returns same string single results" do
expect(MiqExpression.merge_where_clauses("a=5")).to eq("a=5")
end
it "returns same string when concatinating blank results" do
expect(MiqExpression.merge_where_clauses("a=5", [])).to eq("a=5")
end
# would be nice if we returned a hash
it "returns a string if the only argument is a hash" do
expect(MiqExpression.merge_where_clauses({"vms.id" => 5})).to eq("\"vms\".\"id\" = 5")
end
it "concatinates 2 arrays" do
expect(MiqExpression.merge_where_clauses(["a=?",5], ["b=?",5])).to eq("(a=5) AND (b=5)")
end
it "concatinates 2 string" do
expect(MiqExpression.merge_where_clauses("a=5", "b=5")).to eq("(a=5) AND (b=5)")
end
it "concatinates a string and a hash" do
expect(MiqExpression.merge_where_clauses("a=5", {"vms.id" => 5})).to eq("(a=5) AND (\"vms\".\"id\" = 5)")
end
end
describe ".get_col_type" do
subject { described_class.get_col_type(@field) }
let(:string_custom_attribute) do
FactoryBot.create(:custom_attribute,
:name => "foo",
:value => "string",
:resource_type => 'ExtManagementSystem')
end
let(:date_custom_attribute) do
FactoryBot.create(:custom_attribute,
:name => "foo",
:value => DateTime.current,
:resource_type => 'ExtManagementSystem')
end
it "with model-field__with_pivot_table_suffix" do
@field = "Vm-name__pv"
expect(subject).to eq(described_class.get_col_type("Vm-name"))
end
it "with custom attribute without value_type" do
string_custom_attribute
@field = "ExtManagementSystem-#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}foo"
expect(subject).to eq(:string)
end
it "with custom attribute with value_type" do
date_custom_attribute
@field = "ExtManagementSystem-#{CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX}foo"
expect(subject).to eq(:datetime)
end
it "with model.managed-in_field" do
@field = "Vm.managed-service_level"
expect(subject).to eq(:string)
end
it "with model.last.managed-in_field" do
@field = "Vm.host.managed-environment"
expect(subject).to eq(:string)
end
it "with valid model-in_field" do
@field = "Vm-cpu_limit"
expect(subject).to eq(:integer)
end
it "with invalid model-in_field" do
@field = "abc-name"
expect(subject).to be_nil
end
it "with valid model.association-in_field" do
@field = "Vm.guest_applications-vendor"
expect(subject).to eq(:string)
end
it "with invalid model.association-in_field" do
@field = "abc.host-name"
expect(subject).to be_nil
end
it "with model-invalid_field" do
@field = "Vm-abc"
expect(subject).to be_nil
end
it "with field without model" do
@field = "storage"
expect(subject).to be_nil
end
end
describe ".model_details" do
before do
# tags contain the root tenant's name
Tenant.seed
cat = FactoryBot.create(:classification,
:description => "Auto Approve - Max CPU",
:name => "prov_max_cpu",
:single_value => true,
:show => true,
)
cat.add_entry(:description => "1",
:read_only => "0",
:syntax => "string",
:name => "1",
:example_text => nil,
:default => true,
:single_value => "1"
)
end
context "with :typ=>tag" do
it "VmInfra" do
result = described_class.model_details("ManageIQ::Providers::InfraManager::Vm", :typ => "tag", :include_model => true, :include_my_tags => true, :userid => "admin")
expect(result.map(&:first)).to include("Virtual Machine.My Company Tags : Auto Approve - Max CPU")
end
it "VmCloud" do
result = described_class.model_details("ManageIQ::Providers::CloudManager::Vm", :typ => "tag", :include_model => true, :include_my_tags => true, :userid => "admin")
expect(result.map(&:first)).to include("Instance.My Company Tags : Auto Approve - Max CPU")
expect(result.map(&:first)).not_to include("Instance.VM and Instance.My Company Tags : Auto Approve - Max CPU")
end
it "VmOrTemplate" do
result = described_class.model_details("VmOrTemplate",
:typ => "tag",
:include_model => true,
:include_my_tags => true,
:userid => "admin"
)
expect(result.map(&:first)).to include("VM or Template.My Company Tags : Auto Approve - Max CPU")
end
it "TemplateInfra" do
result = described_class.model_details("ManageIQ::Providers::InfraManager::Template", :typ => "tag", :include_model => true, :include_my_tags => true, :userid => "admin")
expect(result.map(&:first)).to include("Template.My Company Tags : Auto Approve - Max CPU")
end
it "TemplateCloud" do
result = described_class.model_details("ManageIQ::Providers::CloudManager::Template", :typ => "tag", :include_model => true, :include_my_tags => true, :userid => "admin")
expect(result.map(&:first)).to include("Image.My Company Tags : Auto Approve - Max CPU")
end
it "MiqTemplate" do
result = described_class.model_details("MiqTemplate", :typ => "tag", :include_model => true, :include_my_tags => true, :userid => "admin")
expect(result.map(&:first)).to include("VM Template and Image.My Company Tags : Auto Approve - Max CPU")
end
it "EmsInfra" do
result = described_class.model_details("ManageIQ::Providers::InfraManager", :typ => "tag", :include_model => true, :include_my_tags => true, :userid => "admin")
expect(result.map(&:first)).to include("Infrastructure Provider.My Company Tags : Auto Approve - Max CPU")
end
it "EmsCloud" do
result = described_class.model_details("ManageIQ::Providers::CloudManager", :typ => "tag", :include_model => true, :include_my_tags => true, :userid => "admin")
expect(result.map(&:first)).to include("Cloud Provider.My Company Tags : Auto Approve - Max CPU")
end
end
context "with :typ=>all" do
it "VmOrTemplate" do
result = described_class.model_details("VmOrTemplate",
:typ => "all",
:include_model => false,
:include_tags => true)
expect(result.map(&:first)).to include("My Company Tags : Auto Approve - Max CPU")
end
it "Service" do
result = described_class.model_details("Service", :typ => "all", :include_model => false, :include_tags => true)
expect(result.map(&:first)).to include("My Company Tags : Auto Approve - Max CPU")
end
it "Supports classes derived form ActsAsArModel" do
result = described_class.model_details("ChargebackVm", :typ => "all", :include_model => false, :include_tags => true)
expect(result.map(&:first)[0]).to eq(" CPU Total Cost")
end
end
context "with :include_id_columns" do
it "Vm" do
result = described_class.model_details("Vm", :include_id_columns => true)
expect(result.map(&:second)).to include("Vm-id", "Vm-host_id", "Vm.host-id")
end
end
end
context ".build_relats" do
it "AvailabilityZone" do
result = described_class.build_relats("AvailabilityZone")
expect(result.fetch_path(:reflections, :ext_management_system, :parent, :class_path).split(".").last).to eq("manageiq_providers_cloud_manager")
expect(result.fetch_path(:reflections, :ext_management_system, :parent, :assoc_path).split(".").last).to eq("ext_management_system")
end
it "VmInfra" do
result = described_class.build_relats("ManageIQ::Providers::InfraManager::Vm")
expect(result.fetch_path(:reflections, :evm_owner, :parent, :class_path).split(".").last).to eq("evm_owner")
expect(result.fetch_path(:reflections, :evm_owner, :parent, :assoc_path).split(".").last).to eq("evm_owner")
expect(result.fetch_path(:reflections, :linux_initprocesses, :parent, :class_path).split(".").last).to eq("linux_initprocesses")
expect(result.fetch_path(:reflections, :linux_initprocesses, :parent, :assoc_path).split(".").last).to eq("linux_initprocesses")
end
it "Vm" do
result = described_class.build_relats("Vm")
expect(result.fetch_path(:reflections, :users, :parent, :class_path).split(".").last).to eq("users")
expect(result.fetch_path(:reflections, :users, :parent, :assoc_path).split(".").last).to eq("users")
end
it "OrchestrationStack" do
result = described_class.build_relats("ManageIQ::Providers::CloudManager::OrchestrationStack")
expect(result.fetch_path(:reflections, :vms, :parent, :class_path).split(".").last).to eq("manageiq_providers_cloud_manager_vms")
expect(result.fetch_path(:reflections, :vms, :parent, :assoc_path).split(".").last).to eq("vms")
end
end
describe ".determine_relat_path (private)" do
subject { described_class.send(:determine_relat_path, @ref) }
it "when association name is same as class name" do
@ref = Vm.reflect_on_association(:miq_group)
expect(subject).to eq(@ref.name.to_s)
end
it "when association name is different from class name" do
@ref = Vm.reflect_on_association(:evm_owner)
expect(subject).to eq(@ref.name.to_s)
end
context "when class name is a subclass of association name" do
it "one_to_one relation" do
@ref = AvailabilityZone.reflect_on_association(:ext_management_system)
expect(subject).to eq(@ref.klass.model_name.singular)
end
it "one_to_many relation" do
@ref = ManageIQ::Providers::CloudManager::OrchestrationStack.reflections_with_virtual[:vms]
expect(subject).to eq(@ref.klass.model_name.plural)
end
end
end
describe ".get_col_operators" do
subject { described_class.get_col_operators(@field) }
it "returns array of available operations if parameter is :count" do
@field = :count
expect(subject).to contain_exactly("=", "!=", "<", "<=", ">=", ">")
end
it "returns list of available operations if parameter is :regkey" do
@field = :regkey
expect(subject).to contain_exactly("=",
"STARTS WITH",
"ENDS WITH",
"INCLUDES",
"IS NULL",
"IS NOT NULL",
"IS EMPTY",
"IS NOT EMPTY",
"REGULAR EXPRESSION MATCHES",
"REGULAR EXPRESSION DOES NOT MATCH",
"KEY EXISTS",
"VALUE EXISTS")
end
it "returns list of available operations for field type 'string'" do
@field = "ManageIQ::Providers::InfraManager::Vm.advanced_settings-name"
expect(subject).to contain_exactly("=",
"STARTS WITH",
"ENDS WITH",
"INCLUDES",
"IS NULL",
"IS NOT NULL",
"IS EMPTY",
"IS NOT EMPTY",
"REGULAR EXPRESSION MATCHES",
"REGULAR EXPRESSION DOES NOT MATCH")
end
it "returns list of available operations for field type 'integer'" do
@field = "ManageIQ::Providers::InfraManager::Vm-cpu_limit"
expect(subject).to contain_exactly("=", "!=", "<", "<=", ">=", ">")
end
it "returns list of available operations for field type 'float'" do
@field = "Storage-v_provisioned_percent_of_total"
expect(subject).to contain_exactly("=", "!=", "<", "<=", ">=", ">")
end
=begin
# there is no example of fields with fixnum datatype available for expression builder
it "returns list of available operations for field type 'fixnum'" do
@field = ?
expect(subject).to eq(["=", "!=", "<", "<=", ">=", ">", "RUBY"])
end
=end
it "returns list of available operations for field type 'string_set'" do
@field = "ManageIQ::Providers::InfraManager::Vm-hostnames"
expect(subject).to contain_exactly("INCLUDES ALL", "INCLUDES ANY", "LIMITED TO")
end
it "returns list of available operations for field type 'numeric_set'" do
@field = "Host-all_enabled_ports"
expect(subject).to contain_exactly("INCLUDES ALL", "INCLUDES ANY", "LIMITED TO")
end
it "returns list of available operations for field type 'boolean'" do
@field = "ManageIQ::Providers::InfraManager::Vm-active"
expect(subject).to contain_exactly("=", "IS NULL", "IS NOT NULL")
end
it "returns list of available operations for field type 'date'" do
@field = "ManageIQ::Providers::InfraManager::Vm-retires_on"
expect(subject).to contain_exactly("IS", "BEFORE", "AFTER", "FROM", "IS EMPTY", "IS NOT EMPTY")
end
it "returns list of available operations for field type 'datetime'" do
@field = "ManageIQ::Providers::InfraManager::Vm-ems_created_on"
expect(subject).to contain_exactly("IS", "BEFORE", "AFTER", "FROM", "IS EMPTY", "IS NOT EMPTY")
end
it "returns list of available operations for field with not recognized type" do
@field = "Hello-world"
expect(subject).to contain_exactly("=",
"STARTS WITH",
"ENDS WITH",
"INCLUDES",
"IS NULL",
"IS NOT NULL",
"IS EMPTY",
"IS NOT EMPTY",
"REGULAR EXPRESSION MATCHES",
"REGULAR EXPRESSION DOES NOT MATCH")
end
end
describe ".get_col_info" do
it "return column info for missing model" do
field = "hostname"
col_info = described_class.get_col_info(field)
expect(col_info).to match(
:data_type => nil,
:excluded_by_preprocess_options => false,
:include => {},
:tag => false,
:sql_support => false,
)
end
it "return column info for model-virtual field" do
field = "VmInfra-uncommitted_storage"
col_info = described_class.get_col_info(field)
expect(col_info).to match(
:data_type => :integer,
:excluded_by_preprocess_options => false,
:format_sub_type => :bytes,
:include => {},
:tag => false,
:sql_support => false,
)
end
it "return column info for model-virtual field" do
field = "VmInfra-active"
col_info = described_class.get_col_info(field)
expect(col_info).to match(
:data_type => :boolean,
:excluded_by_preprocess_options => false,
:format_sub_type => :boolean,
:include => {},
:tag => false,
:sql_support => true,
)
end
it "return column info for model-invalid" do
field = "ManageIQ::Providers::InfraManager::Vm-invalid"
col_info = described_class.get_col_info(field)
expect(col_info).to match(
:data_type => nil,
:excluded_by_preprocess_options => false,
:format_sub_type => nil,
:include => {},
:tag => false,
:sql_support => false,
)
end
# TODO: think this should return same results as missing model?
it "return column info for managed-field" do
tag = "managed-location"
col_info = described_class.get_col_info(tag)
expect(col_info).to match(
:data_type => :string,
:excluded_by_preprocess_options => false,
:include => {},
:tag => true,
:sql_support => true,
)
end
it "return column info for model.managed-field" do
tag = "VmInfra.managed-operations"
col_info = described_class.get_col_info(tag)
expect(col_info).to match(
:data_type => :string,
:excluded_by_preprocess_options => false,
:include => {},
:tag => true,
:sql_support => true,
)
end
it "return column info for model.association.managed-field" do
tag = "Vm.host.managed-environment"
col_info = described_class.get_col_info(tag)
expect(col_info).to match(
:data_type => :string,
:excluded_by_preprocess_options => false,
:include => {},
:tag => true,
:sql_support => true,
)
end
it "return column info for model-field" do
field = "ManageIQ::Providers::InfraManager::Vm-cpu_limit"
col_info = described_class.get_col_info(field)
expect(col_info).to match(
:data_type => :integer,
:excluded_by_preprocess_options => false,
:format_sub_type => :integer,
:include => {},
:tag => false,
:sql_support => true,
)
end
it "return column info for model.association-field" do
field = "ManageIQ::Providers::InfraManager::Vm.guest_applications-vendor"
col_info = described_class.get_col_info(field)
expect(col_info).to match(
:data_type => :string,
:excluded_by_preprocess_options => false,
:format_sub_type => :string,
:include => {:guest_applications => {}},
:tag => false,
:sql_support => true,
)
end
it "return column info for model.virtualassociation..virtualassociation-field (with sql)" do
field = "ManageIQ::Providers::InfraManager::Vm.service.user.vms-uncommitted_storage"
col_info = described_class.get_col_info(field)
expect(col_info).to match(
:data_type => :integer,
:excluded_by_preprocess_options => false,
:format_sub_type => :bytes,
:include => {},
:tag => false,
:sql_support => false,
)
end
it "return column info for model.virtualassociation..virtualassociation-invalid" do
field = "ManageIQ::Providers::InfraManager::Vm.service.user.vms-invalid"
col_info = described_class.get_col_info(field)
expect(col_info).to match(
:data_type => nil,
:excluded_by_preprocess_options => false,
:format_sub_type => nil,
:include => {},
:tag => false,
:sql_support => false,
)
end
it "return column info for model.invalid-active" do
field = "ManageIQ::Providers::InfraManager::Vm.invalid-active"
col_info = described_class.get_col_info(field)
expect(col_info).to match(
:data_type => nil,
:excluded_by_preprocess_options => false,
:include => {},
:tag => false,
:sql_support => false,
)
end
it "return column info for model.virtualassociation..virtualassociation-field (with sql)" do
field = "ManageIQ::Providers::InfraManager::Vm.service.user.vms-active"
col_info = described_class.get_col_info(field)
expect(col_info).to match(
:data_type => :boolean,
:excluded_by_preprocess_options => false,
:format_sub_type => :boolean,
:include => {},
:tag => false,
:sql_support => false,
)
end
end
describe "#sql_supports_atom?" do
context "expression key is 'CONTAINS'" do
context "operations with 'tag'" do
it "returns true for tag of the main model" do
expression = {"CONTAINS" => {"tag" => "VmInfra.managed-operations", "value" => "analysis_failed"}}
expect(described_class.new(nil).sql_supports_atom?(expression)).to eq(true)
end
it "returns false for tag of associated model" do
field = "Vm.ext_management_system.managed-openshiftroles"
expression = {"CONTAINS" => {"tag" => field, "value" => "node"}}
expect(described_class.new(nil).sql_supports_atom?(expression)).to eq(false)
end
end
context "operation with 'field'" do
it "returns false if format of field is model.association..association-field" do
field = "ManageIQ::Providers::InfraManager::Vm.service.user.vms-active"
expression = {"CONTAINS" => {"field" => field, "value" => "true"}}
expect(described_class.new(nil).sql_supports_atom?(expression)).to eq(false)
end
it "returns false if field belongs to virtual_has_many association" do
field = "ManageIQ::Providers::InfraManager::Vm.processes-type"
expression = {"CONTAINS" => {"field" => field, "value" => "abc"}}
expect(described_class.new(nil).sql_supports_atom?(expression)).to eq(false)
end
it "returns false if field belongs to 'has_and_belongs_to_many' association" do
field = "ManageIQ::Providers::InfraManager::Vm.storages-name"
expression = {"CONTAINS" => {"field" => field, "value" => "abc"}}
expect(described_class.new(nil).sql_supports_atom?(expression)).to eq(false)
end
it "returns false if field belongs to 'has_many' polymorhic/polymorhic association" do
field = "ManageIQ::Providers::InfraManager::Vm.advanced_settings-region_number"
expression = {"CONTAINS" => {"field" => field, "value" => "1"}}
expect(described_class.new(nil).sql_supports_atom?(expression)).to eq(false)
end
it "returns true if field belongs to 'has_many' association" do
field = "ManageIQ::Providers::InfraManager::Vm.registry_items-name"
expression = {"CONTAINS" => {"field" => field, "value" => "abc"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(true)
end
end
end
context "expression key is 'INCLUDE'" do
it "returns false for model-virtualfield" do
field = "ManageIQ::Providers::InfraManager::Vm-v_datastore_path"
expression = {"INCLUDES" => {"field" => field, "value" => "abc"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(false)
end
it "returns true for model-field" do
field = "ManageIQ::Providers::InfraManager::Vm-location"
expression = {"INCLUDES" => {"field" => field, "value" => "abc"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(true)
end
it "returns false for model.association.virtualfield" do
field = "ManageIQ::Providers::InfraManager::Vm.ext_management_system-hostname"
expression = {"INCLUDES" => {"field" => field, "value" => "abc"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(false)
end
it "returns true for model.accociation.field" do
field = "ManageIQ::Providers::InfraManager::Vm.ext_management_system-name"
expression = {"INCLUDES" => {"field" => field, "value" => "abc"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(true)
end
it "returns false if format of field is model.association..association-field" do
field = "ManageIQ::Providers::InfraManager::Vm.service.miq_request-v_approved_by"
expression = {"INCLUDES" => {"field" => field, "value" => "abc"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(false)
end
end
it "returns false if expression key is 'FIND'" do
expect(described_class.new(nil).sql_supports_atom?("FIND" => {})).to eq(false)
end
it "returns false if expression key is 'REGULAR EXPRESSION MATCHES'" do
field = "ManageIQ::Providers::InfraManager::Vm-name"
expression = {"REGULAR EXPRESSION MATCHES" => {"filed" => field, "value" => "\w+"}}
expect(described_class.new(nil).sql_supports_atom?(expression)).to eq(false)
end
it "returns false if expression key is 'REGULAR EXPRESSION DOES NOT MATCH'" do
field = "ManageIQ::Providers::InfraManager::Vm-name"
expression = {"REGULAR EXPRESSION DOES NOT MATCH" => {"filed" => field, "value" => "\w+"}}
expect(described_class.new(nil).sql_supports_atom?(expression)).to eq(false)
end
it "returns false if expression key is not 'CONTAINS' and operand is 'TAG'" do
# UI does not allow to create this kind of expression:
expression = {"=" => {"tag" => "Vm-vendor"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(false)
end
it "returns false if operand is'COUNT' on model.association" do
association = "ManageIQ::Providers::InfraManager::Vm.users"
expression = {">" => {"count" => association, "value" => "10"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(false)
end
it "supports sql for model.association-virtualfield (with arel)" do
field = "Host.vms-archived"
expression = {"=" => {"field" => field, "value" => "true"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(true)
end
it "does not supports sql for model.association-virtualfield (no arel)" do
field = "ManageIQ::Providers::InfraManager::Vm.storage-v_used_space"
expression = {">=" => {"field" => field, "value" => "50"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(false)
end
it "returns true for model-field" do
field = "ManageIQ::Providers::InfraManager::Vm-vendor"
expression = {"=" => {"field" => field, "value" => "redhat"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(true)
end
it "returns true for model.assoctiation-field" do
field = "ManageIQ::Providers::InfraManager::Vm.ext_management_system-name"
expression = {"STARTS WITH" => {"field" => field, "value" => "abc"}}
expect(described_class.new(expression).sql_supports_atom?(expression)).to eq(true)
end
it "returns false if column excluded from processing for adhoc performance metrics" do
field = "EmsClusterPerformance-cpu_usagemhz_rate_average"
expression = {">=" => {"field" => field, "value" => "0"}}
obj = described_class.new(expression)
obj.preprocess_options = {:vim_performance_daily_adhoc => true}
expect(obj.sql_supports_atom?(expression)).to eq(false)
end
it "returns true if column is not excluded from processing for adhoc performance metrics" do
field = "EmsClusterPerformance-derived_cpu_available"
expression = {">=" => {"field" => field, "value" => "0"}}
obj = described_class.new(expression)
obj.preprocess_options = {:vim_performance_daily_adhoc => true}
expect(obj.sql_supports_atom?(expression)).to eq(true)
end
end
describe "#field_in_sql?" do
it "returns true for model.virtualfield (with sql)" do
field = "ManageIQ::Providers::InfraManager::Vm-archived"
expression = {"=" => {"field" => field, "value" => "true"}}
expect(described_class.new(expression).field_in_sql?(field)).to eq(true)
end
it "returns false for model.virtualfield (with no sql)" do
field = "ManageIQ::Providers::InfraManager::Vm-uncommitted_storage"
expression = {"=" => {"field" => field, "value" => "true"}}
expect(described_class.new(expression).field_in_sql?(field)).to eq(false)
end
it "returns false for model.association-virtualfield" do
field = "ManageIQ::Providers::InfraManager::Vm.storage-v_used_space_percent_of_total"
expression = {">=" => {"field" => field, "value" => "50"}}
expect(described_class.new(expression).field_in_sql?(field)).to eq(false)
end
it "returns true for model-field" do
field = "ManageIQ::Providers::InfraManager::Vm-vendor"
expression = {"=" => {"field" => field, "value" => "redhat"}}
expect(described_class.new(expression).field_in_sql?(field)).to eq(true)
end
it "returns true for model.association-field" do
field = "ManageIQ::Providers::InfraManager::Vm.guest_applications-vendor"
expression = {"CONTAINS" => {"field" => field, "value" => "redhat"}}
expect(described_class.new(expression).field_in_sql?(field)).to eq(true)
end
it "returns false if column excluded from processing for adhoc performance metrics" do
field = "EmsClusterPerformance-cpu_usagemhz_rate_average"
expression = {">=" => {"field" => field, "value" => "0"}}
obj = described_class.new(expression)
obj.preprocess_options = {:vim_performance_daily_adhoc => true}
expect(obj.field_in_sql?(field)).to eq(false)
end
it "returns true if column not excluded from processing for adhoc performance metrics" do
field = "EmsClusterPerformance-derived_cpu_available"
expression = {">=" => {"field" => field, "value" => "0"}}
obj = described_class.new(expression)
obj.preprocess_options = {:vim_performance_daily_adhoc => true}
expect(obj.field_in_sql?(field)).to eq(true)
end
end
describe "#evaluate" do
before do
@data_hash = {"guest_applications.name" => "VMware Tools",
"guest_applications.vendor" => "VMware, Inc."}
end
it "returns true if expression evaluated to value equal to value in supplied hash" do
expression = {"=" => {"field" => "Vm.guest_applications-name",
"value" => "VMware Tools"}}
obj = described_class.new(expression, "hash")
expect(obj.evaluate(@data_hash)).to eq(true)
end
it "returns false if expression evaluated to value not equal to value in supplied hash" do
expression = {"=" => {"field" => "Vm.guest_applications-name",
"value" => "Hello"}}
obj = described_class.new(expression, "hash")
expect(obj.evaluate(@data_hash)).to eq(false)
end
it "returns true if expression is regex and there is match in supplied hash" do
expression = {"REGULAR EXPRESSION MATCHES" => {"field" => "Vm.guest_applications-vendor",
"value" => "/^[^.]*ware.*$/"}}
obj = described_class.new(expression, "hash")
expect(obj.evaluate(@data_hash)).to eq(true)
end
it "returns false if expression is regex and there is no match in supplied hash" do
expression = {"REGULAR EXPRESSION MATCHES" => {"field" => "Vm.guest_applications-vendor",
"value" => "/^[^.]*hello.*$/"}}
obj = described_class.new(expression, "hash")
expect(obj.evaluate(@data_hash)).to eq(false)
end
end
describe ".evaluate_atoms" do
it "adds mapping 'result'=>false to expression if expression evaluates to false on supplied object" do
expression = {">=" => {"field" => "Vm-num_cpu",
"value" => "2"}}
result = described_class.evaluate_atoms(expression, FactoryBot.build(:vm))
expect(result).to include(
">=" => {"field" => "Vm-num_cpu",
"value" => "2"},
"result" => false)
end
end
describe ".operands2rubyvalue" do
RSpec.shared_examples :coerces_value_to_integer do |value|
it 'coerces the value to an integer' do
expect(subject.last).to eq(0)
end
end
let(:operator) { ">" }
subject do
described_class.operands2rubyvalue(operator, ops, nil)
end
context "when ops field equals count" do
let(:ops) { {"field" => "<count>", "value" => "foo"} }
include_examples :coerces_value_to_integer
end
context "when ops key is count" do
let(:ops) do
{
"count" => "ManageIQ::Providers::InfraManager::Vm.advanced_settings",
"value" => "foo"
}
end
include_examples :coerces_value_to_integer
end
end
describe "#fields" do
it "extracts fields" do
expression = {
"AND" => [
{">=" => {"field" => "EmsClusterPerformance-cpu_usagemhz_rate_average", "value" => "0"}},
{"<" => {"field" => "Vm-name", "value" => 5}}
]
}
actual = described_class.new(expression).fields.sort_by(&:column)
expect(actual).to contain_exactly(
an_object_having_attributes(:model => EmsClusterPerformance, :column => "cpu_usagemhz_rate_average"),
an_object_having_attributes(:model => Vm, :column => "name")
)
end
it "extracts tags" do
expression = {
"AND" => [
{">=" => {"field" => "EmsClusterPerformance-cpu_usagemhz_rate_average", "value" => "0"}},
{"<" => {"field" => "Vm.managed-favorite_color", "value" => "5"}}
]
}
actual = described_class.new(expression).fields
expect(actual).to contain_exactly(
an_object_having_attributes(:model => EmsClusterPerformance, :column => "cpu_usagemhz_rate_average"),
an_object_having_attributes(:model => Vm, :namespace => "/managed/favorite_color")
)
end
it "extracts values" do
expression =
{">=" => {"field" => "EmsClusterPerformance-cpu_usagemhz_rate_average", "value" => "Vm.managed-favorite_color"}}
actual = described_class.new(expression).fields
expect(actual).to contain_exactly(
an_object_having_attributes(:model => EmsClusterPerformance, :column => "cpu_usagemhz_rate_average"),
an_object_having_attributes(:model => Vm, :namespace => "/managed/favorite_color")
)
end
end
describe "#set_tagged_target" do
it "will substitute a new class into the expression" do
expression = described_class.new("CONTAINS" => {"tag" => "managed-environment", "value" => "prod"})
expression.set_tagged_target(Vm)
expect(expression.exp).to eq("CONTAINS" => {"tag" => "Vm.managed-environment", "value" => "prod"})
end
it "will substitute a new class and associations into the expression" do
expression = described_class.new("CONTAINS" => {"tag" => "managed-environment", "value" => "prod"})
expression.set_tagged_target(Vm, ["host"])
expect(expression.exp).to eq("CONTAINS" => {"tag" => "Vm.host.managed-environment", "value" => "prod"})
end
it "can handle OR expressions" do
expression = described_class.new(
"OR" => [
{"CONTAINS" => {"tag" => "managed-environment", "value" => "prod"}},
{"CONTAINS" => {"tag" => "managed-location", "value" => "ny"}}
]
)
expression.set_tagged_target(Vm)
expected = {
"OR" => [
{"CONTAINS" => {"tag" => "Vm.managed-environment", "value" => "prod"}},
{"CONTAINS" => {"tag" => "Vm.managed-location", "value" => "ny"}}
]
}
expect(expression.exp).to eq(expected)
end
it "can handle AND expressions" do
expression = described_class.new(
"AND" => [
{"CONTAINS" => {"tag" => "managed-environment", "value" => "prod"}},
{"CONTAINS" => {"tag" => "managed-location", "value" => "ny"}}
]
)
expression.set_tagged_target(Vm)
expected = {
"AND" => [
{"CONTAINS" => {"tag" => "Vm.managed-environment", "value" => "prod"}},
{"CONTAINS" => {"tag" => "Vm.managed-location", "value" => "ny"}}
]
}
expect(expression.exp).to eq(expected)
end
it "can handle NOT expressions" do
expression = described_class.new("NOT" => {"CONTAINS" => {"tag" => "managed-environment", "value" => "prod"}})
expression.set_tagged_target(Vm)
expected = {"NOT" => {"CONTAINS" => {"tag" => "Vm.managed-environment", "value" => "prod"}}}
expect(expression.exp).to eq(expected)
end
it "will not change the target of fields" do
expression = described_class.new("=" => {"field" => "Vm-vendor", "value" => "redhat"})
expression.set_tagged_target(Host)
expect(expression.exp).to eq("=" => {"field" => "Vm-vendor", "value" => "redhat"})
end
it "will not change the target of counts" do
expression = described_class.new("=" => {"count" => "Vm.disks", "value" => "1"})
expression.set_tagged_target(Host)
expect(expression.exp).to eq("=" => {"count" => "Vm.disks", "value" => "1"})
end
end
describe ".tag_details" do
before do
described_class.instance_variable_set(:@classifications, nil)
end
it "returns the tags when no path is given" do
Tenant.seed
FactoryBot.create(
:classification,
:name => "env",
:description => "Environment",
:children => [FactoryBot.create(:classification)]
)
actual = described_class.tag_details(nil, {})
expect(actual).to eq([["My Company Tags : Environment", "managed-env"]])
end
it "returns the added classification when no_cache option is used" do
Tenant.seed
FactoryBot.create(:classification,
:name => "first_classification",
:description => "First Classification",
:children => [FactoryBot.create(:classification)])
actual = described_class.tag_details(nil, {})
expect(actual).to eq([["My Company Tags : First Classification", "managed-first_classification"]])
FactoryBot.create(:classification,
:name => "second_classification",
:description => "Second Classification",
:children => [FactoryBot.create(:classification)])
actual = described_class.tag_details(nil, :no_cache => true)
expect(actual).to eq([["My Company Tags : First Classification", "managed-first_classification"], ["My Company Tags : Second Classification", "managed-second_classification"]])
end
end
describe "miq_adv_search_lists" do
it ":exp_available_counts" do
result = described_class.miq_adv_search_lists(Vm, :exp_available_counts)
expect(result.map(&:first)).to include(" VM and Instance.Users")
end
it ":exp_available_finds" do
result = described_class.miq_adv_search_lists(Vm, :exp_available_finds)
expect(result.map(&:first)).to include("VM and Instance.Provisioned VMs : Href Slug")
expect(result.map(&:first)).not_to include("VM and Instance : Id")
end
it ":exp_available_fields with include_id_columns" do
result = described_class.miq_adv_search_lists(Vm, :exp_available_fields, :include_id_columns => true)
expect(result.map(&:first)).to include("VM and Instance : Id")
end
end
end
| 49.465633 | 232 | 0.601586 |
01371b4422a8cd602721627495c27c53265cac3c | 961 | # The Book of Ruby - http://www.sapphiresteel.com
# illustrates how to read and write instance variables
# using accessor methods
class Thing
def initialize( aName, aDescription )
@name = aName
@description = aDescription
end
# get accessor for @name
def name
return @name
end
# set accessor for @name
def name=( aName )
@name = aName
end
# get accessor for @description
def description
return @description
end
# set accessor for @description
def description=( aDescription )
@description = aDescription
end
end
t = Thing.new("The Thing", "a lovely, glittery wotsit")
print( t.name )
print( " is " )
puts( t.description )
t.name = "A Refurbished Thing"
t.description = "a bit faded and worn around the edges"
print( "It has now changed its name to " )
puts( t.name )
print( "I would describe it as " )
puts( t.description ) | 21.840909 | 56 | 0.631634 |
1dbf630f53cdb091914611233429d6575eee2177 | 1,141 | module User
module Academics
class EnrollmentTermInstruction
attr_reader :user
attr_reader :term_id
def initialize(user, term_id)
@user = user
@term_id = term_id
end
def enrollment_periods
@enrollment_periods ||= EnrollmentPeriods.new(enrollment_period_data)
end
def enrollment_careers
@enrollment_careers ||= EnrollmentCareers.new(enrollment_careers_data)
end
def as_json(options={})
{
user: user.uid,
term_id: term_id,
enrollment_periods: enrollment_periods.as_json,
enrollment_careers: enrollment_careers.as_json
}
end
private
def enrollment_period_data
data[:enrollmentTerm][:enrollmentPeriod] || []
rescue NoMethodError
[]
end
def enrollment_careers_data
data[:enrollmentTerm][:careers] || []
rescue NoMethodError
[]
end
def data
@data ||= ::CampusSolutions::EnrollmentTerm.new({
user_id: user.uid,
term_id: term_id
}).get[:feed]
end
end
end
end
| 21.942308 | 78 | 0.60298 |
ff9339eb33c9ad759be07e79f21a0d86dacfc125 | 175 | require 'resque'
require 'resque/scheduler/server'
Rails.application.routes.draw do
root "checks#index"
resources :checks
mount Resque::Server.new, at: '/resque'
end
| 15.909091 | 41 | 0.742857 |
1af1190ff9d971808c7862340bcac5e262c44d17 | 516 | cask 'stand' do
version '1.0_4'
sha256 '8919e43c9c591657d8d6961b25e8dc5f77d706d71eb246839be22522a82bb0ec'
# get-stand-app.s3.amazonaws.com was verified as official when first introduced to the cask
url "https://get-stand-app.s3.amazonaws.com/#{version.sub(%r{.*_}, '')}/Stand.zip"
appcast 'https://standapp-sparkle-updater.herokuapp.com/',
checkpoint: 'e8440576a4be7c429f6e1a1235992ddc259389767a7127c5ee2968c59c83584f'
name 'Stand'
homepage 'https://getstandapp.com/'
app 'Stand.app'
end
| 36.857143 | 93 | 0.753876 |
91387269eb4204a40d7afea087de92916396a779 | 3,854 | class Arangodb < Formula
desc "Multi-Model NoSQL Database"
homepage "https://www.arangodb.com/"
url "https://download.arangodb.com/Source/ArangoDB-3.9.0.tar.bz2"
sha256 "a6fb06bdfcaa8884d8a060e4aa1164d94db12bf2df332a2e44b2de2283204bca"
license "Apache-2.0"
head "https://github.com/arangodb/arangodb.git", branch: "devel"
livecheck do
url "https://www.arangodb.com/download-major/source/"
regex(/href=.*?ArangoDB[._-]v?(\d+(?:\.\d+)+)(-\d+)?\.t/i)
end
bottle do
sha256 monterey: "3e642fea46662626f97b4d4f40dc1bc0b7e3684f5904659f20340a32ea21e510"
sha256 big_sur: "5e18ae6110ed7cd419acba77be9e722c21bb70a4cfc3dc1a5d1052e5debe7615"
sha256 catalina: "e2eb992537c33e30ca4294b03060f11e86dafb70563e0cc2aa9ab761e389e180"
sha256 x86_64_linux: "02c8b6c395f786324e6f262382eb1d95d52a161af93ede1a79f62ac27eb77714"
end
depends_on "ccache" => :build
depends_on "cmake" => :build
depends_on "[email protected]" => :build
depends_on "[email protected]" => :build
depends_on macos: :mojave
depends_on "[email protected]"
on_linux do
depends_on "gcc"
end
fails_with gcc: "5"
# the ArangoStarter is in a separate github repository;
# it is used to easily start single server and clusters
# with a unified CLI
resource "starter" do
url "https://github.com/arangodb-helper/arangodb.git",
tag: "0.15.3",
revision: "814f8be9e5cc613a63ac1dc161b879ccb7ec23e0"
end
def install
ENV["MACOSX_DEPLOYMENT_TARGET"] = MacOS.version if OS.mac?
resource("starter").stage do
ENV["GO111MODULE"] = "on"
ENV["DOCKERCLI"] = ""
ldflags = %W[
-s -w
-X main.projectVersion=#{resource("starter").version}
-X main.projectBuild=#{Utils.git_head}
]
system "go", "build", *std_go_args(ldflags: ldflags), "github.com/arangodb-helper/arangodb"
end
mkdir "build" do
openssl = Formula["[email protected]"]
args = std_cmake_args + %W[
-DHOMEBREW=ON
-DCMAKE_BUILD_TYPE=RelWithDebInfo
-DUSE_MAINTAINER_MODE=Off
-DUSE_JEMALLOC=Off
-DCMAKE_SKIP_RPATH=On
-DOPENSSL_USE_STATIC_LIBS=On
-DCMAKE_LIBRARY_PATH=#{openssl.opt_lib}
-DOPENSSL_ROOT_DIR=#{openssl.opt_lib}
-DCMAKE_OSX_DEPLOYMENT_TARGET=#{MacOS.version}
-DTARGET_ARCHITECTURE=nehalem
-DUSE_CATCH_TESTS=Off
-DUSE_GOOGLE_TESTS=Off
-DCMAKE_INSTALL_LOCALSTATEDIR=#{var}
]
ENV["V8_CXXFLAGS"] = "-O3 -g -fno-delete-null-pointer-checks" if ENV.compiler == "gcc-6"
system "cmake", "..", *args
system "make", "install"
end
end
def post_install
(var/"lib/arangodb3").mkpath
(var/"log/arangodb3").mkpath
end
def caveats
<<~EOS
An empty password has been set. Please change it by executing
#{opt_sbin}/arango-secure-installation
EOS
end
service do
run opt_sbin/"arangod"
keep_alive true
end
test do
require "pty"
testcase = "require('@arangodb').print('it works!')"
output = shell_output("#{bin}/arangosh --server.password \"\" --javascript.execute-string \"#{testcase}\"")
assert_equal "it works!", output.chomp
ohai "#{bin}/arangodb --starter.instance-up-timeout 1m --starter.mode single"
PTY.spawn("#{bin}/arangodb", "--starter.instance-up-timeout", "1m",
"--starter.mode", "single", "--starter.disable-ipv6",
"--server.arangod", "#{sbin}/arangod",
"--server.js-dir", "#{share}/arangodb3/js") do |r, _, pid|
loop do
available = r.wait_readable(60)
refute_equal available, nil
line = r.readline.strip
ohai line
break if line.include?("Your single server can now be accessed")
end
ensure
Process.kill "SIGINT", pid
ohai "shutting down #{pid}"
end
end
end
| 30.587302 | 111 | 0.656201 |
62f64472f158b8ebf4cc651405db8cdc82f3e0c8 | 448 | # Definition for a binary tree node.
# class TreeNode
# attr_accessor :val, :left, :right
# def initialize(val)
# @val = val
# @left, @right = nil, nil
# end
# end
# @param {TreeNode} root
# @return {Integer[]}
def postorder_traversal(root)
ans = []
def traverse(node, ans)
unless node.nil?
traverse(node.left, ans)
traverse(node.right, ans)
ans.push(node.val)
end
end
traverse(root, ans)
ans
end
| 18.666667 | 37 | 0.622768 |
b9f5050e6ecd1aceaf330061be009c16b02b6492 | 6,817 | # rubocop:disable Style/GlobalVars
require 'rubygems'
require 'fileutils'
require 'rufus-scheduler'
# Pupistry::Agent
module Pupistry
# Functions for running the Pupistry agent aka "apply mode" to actually
# download and run Puppet against the contents of the artifact.
class Agent
## Run as a daemon
def self.daemon(options)
# Since options comes from Thor, it can't be modified, so we need to
# copy the options and then we can edit it.
options_new = options.inject({}) do |new, (name, value)|
new[name] = value
new
end
# If the minimal mode has been enabled in config, respect.
options_new[:minimal] = true if $config['agent']['daemon_minimal']
# If no frequency supplied, use 300 seconds safe default.
$config['agent']['daemon_frequency'] = 300 unless $config['agent']['daemon_frequency']
# Use rufus-scheduler to run our apply job as a regularly scheduled job
# but with build in locking handling.
$logger.info "Launching daemon... frequency of #{$config['agent']['daemon_frequency']} seconds."
begin
scheduler = Rufus::Scheduler.new
scheduler.every "#{$config['agent']['daemon_frequency']}s", overlap: false, timeout: '1d', first_at: Time.now + 1 do
$logger.info "Triggering another Pupistry run (#{$config['agent']['daemon_frequency']}s)"
apply options_new
end
scheduler.join
rescue Rufus::Scheduler::TimeoutError
$logger.error 'A run of Pupistry timed out after 1 day as a safety measure. There may be a bug or a Puppet action causing it to get stuck'
rescue SignalException
# Clean shutdown signal (eg SIGTERM)
$logger.info 'Clean shutdown of Pupistry daemon requests'
exit 0
rescue StandardError => e
raise e
end
end
def self.apply(options)
## Download and apply the latest artifact (if any)
# Fetch artifact versions
$logger.info 'Checking version of artifact available...'
artifact = Pupistry::Artifact.new
artifact.checksum = artifact.fetch_latest
unless artifact.checksum
$logger.error 'There is no current artifact available for download, no steps can be taken.'
return false
end
artifact_installed = Pupistry::Artifact.new
artifact_installed.checksum = artifact_installed.fetch_installed
if artifact_installed.checksum
$logger.debug "Currently on #{artifact_installed.checksum}"
else
$logger.debug 'No currently installed artifact - blank slate!'
end
# Download the new artifact if one has changed. If we already have this
# version, then we should skip downloading and go straight to running
# Puppet - unless the user runs with --force (eg to fix a corrupted
# artifact).
if artifact.checksum != artifact_installed.checksum || options[:force]
$logger.warn 'Forcing download of latest artifact regardless of current one.' if options[:force]
# Install the artifact
$logger.info "Downloading latest artifact (#{artifact.checksum})..."
artifact.fetch_artifact
artifact.unpack
artifact.hieracrypt_decrypt
unless artifact.install
$logger.fatal 'An unexpected error happened when installing the latest artifact, cancelling Puppet run'
return false
end
# Remove temporary unpacked files
artifact.clean_unpack
else
$logger.info 'Already have latest artifact applied.'
# By default we run Puppet even if we have the latest artifact. There's
# some grounds for debate about whether this is the right thing - in some
# ways it is often a waste of CPU, since if the artifact hasn't changed,
# then it's unlikley anything else has changed.
#
# But that's not always 100% true - Puppet will undo local changes or
# upgrade package versions (ensure => latest) if appropiate, so we should
# act like the standard command and attempt to apply whatever we can.
#
# To provide users with options, we provide the --lazy parameter to avoid
# running Puppet except when the artifact changes. By default, Puppet
# runs every thing to avoid surprise.
if options[:minimal]
$logger.info 'Running with minimal effort mode enabled, not running Puppet since artifact version already applied'
return false
end
end
# If the environment has been specified, use it.
environment = $config['agent']['environment'] || 'master'
# override if environment is supplied on CLI
environment = options["environment"] || environment
unless Dir.exist?("#{$config['agent']['puppetcode']}/#{environment}")
$logger.fatal "The requested branch/environment of #{environment} does not exist, unable to run Puppet"
return false
end
# Execute Puppet.
puppet_cmd = 'puppet apply'
puppet_cmd += ' --noop' if options[:noop]
puppet_cmd += ' --show_diff' if options[:verbose]
puppet_cmd += " --environment #{environment}"
puppet_cmd += " --confdir #{$config['agent']['puppetcode']}"
puppet_cmd += " --environmentpath #{$config['agent']['puppetcode']}"
puppet_cmd += " --modulepath #{build_modulepath(environment)}"
puppet_cmd += " --hiera_config #{$config['agent']['puppetcode']}/#{environment}/hiera.yaml"
puppet_cmd += " #{$config['agent']['puppetcode']}/#{environment}/manifests/"
$logger.info 'Executing Puppet...'
$logger.debug "With: #{puppet_cmd}"
$logger.error 'An unexpected issue occured when running puppet' unless system puppet_cmd
end
def self.build_modulepath(environment)
environment_path = "#{$config['agent']['puppetcode']}/#{environment}"
environment_conf = "#{environment_path}/environment.conf"
configured_paths = []
if File.exist?(environment_conf)
$logger.debug "Adding modulepath config from '#{environment_path}'"
File.open(environment_conf, 'r').readlines.each do |line|
if line !~ /^\s*#/ && /^(.*)=(.*)/ =~ line
key, val = $1.strip, $2.strip
configured_paths = val.split(':') if key == 'modulepath'
end
end
end
modulepaths = configured_paths.map { |path| File.expand_path(path, environment_path) }
# Ensure '<environment_path>/modules' in modulepath.
ensure_path = File.expand_path('modules', environment_path)
modulepaths.insert(0, ensure_path) unless modulepaths.include? ensure_path
modulepaths.join(File::PATH_SEPARATOR)
end
end
end
# vim:shiftwidth=2:tabstop=2:softtabstop=2:expandtab:smartindent
| 37.251366 | 146 | 0.660114 |
1d06a1dc5c9c280adebb20b317db8b930b7664db | 580 | require 'rails'
require 'jquery-rails'
require 'remotipart'
require 'bootstrap-sass'
require 'kaminari'
require 'rack-pjax'
require 'rails_admin'
module RailsAdmin
class Engine < Rails::Engine
isolate_namespace RailsAdmin
initializer "RailsAdmin precompile hook" do |app|
app.config.assets.precompile += ['rails_admin/rails_admin.js', 'rails_admin/rails_admin.css', 'rails_admin/jquery.colorpicker.js', 'rails_admin/jquery.colorpicker.css']
end
initializer "RailsAdmin pjax hook" do |app|
app.config.middleware.use Rack::Pjax
end
end
end
| 27.619048 | 174 | 0.743103 |
91fbd7d0cede21fc0ec5deb514ddc8fc1afa35d0 | 627 | class ChannelPostObserver < ActiveRecord::Observer
observe :channel_post
def before_save(channel_post)
if channel_post.visible
channel_post.published_at = DateTime.now unless channel_post.published_at.present?
end
end
def after_save(channel_post)
if channel_post.visible
channel_post.channel.subscribers.each do |subscriber|
channel_post.notify_once(
:channel_post,
subscriber,
channel_post,
{
from_email: channel_post.channel.email,
from_name: channel_post.channel.name
}
)
end
end
end
end
| 24.115385 | 88 | 0.661882 |
2185d300a496e8ceae4139f7ed755af77e7bff60 | 13,783 | #
# Cookbook Name:: arcgis-notebooks
# Attributes:: default
#
# Copyright 2019 Esri
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
include_attribute 'arcgis-repository'
default['arcgis']['notebook_server'].tap do |notebook_server|
notebook_server['wa_name'] = 'notebooks'
if node['fqdn'].nil? || node['arcgis']['configure_cloud_settings']
notebook_server['url'] = "https://#{node['ipaddress']}:11443/arcgis"
notebook_server['wa_url'] = "https://#{node['ipaddress']}/#{node['arcgis']['notebook_server']['wa_name']}"
notebook_server['domain_name'] = node['ipaddress']
notebook_server['hostname'] = node['ipaddress']
else
notebook_server['url'] = "https://#{node['fqdn']}:11443/arcgis"
notebook_server['wa_url'] = "https://#{node['fqdn']}/#{node['arcgis']['notebook_server']['wa_name']}"
notebook_server['domain_name'] = node['fqdn']
notebook_server['hostname'] = '' # Use the default server machine hostname
end
notebook_server['private_url'] = "https://#{node['arcgis']['notebook_server']['domain_name']}:11443/arcgis"
notebook_server['web_context_url'] = "https://#{node['arcgis']['notebook_server']['domain_name']}/#{node['arcgis']['notebook_server']['wa_name']}"
notebook_server['ports'] = '11443'
notebook_server['authorization_file'] = node['arcgis']['server']['authorization_file']
notebook_server['authorization_file_version'] = node['arcgis']['server']['authorization_file_version']
notebook_server['license_level'] = 'standard'
notebook_server['configure_autostart'] = true
notebook_server['install_system_requirements'] = true
notebook_server['install_samples_data'] = false
notebook_server['setup_archive'] = ''
notebook_server['admin_username'] = 'siteadmin'
if ENV['ARCGIS_NOTEBOOK_SERVER_ADMIN_PASSWORD'].nil?
notebook_server['admin_password'] = 'change.it'
else
notebook_server['admin_password'] = ENV['ARCGIS_NOTEBOOK_SERVER_ADMIN_PASSWORD']
end
notebook_server['config_store_type'] = 'FILESYSTEM'
notebook_server['config_store_class_name'] = case node['arcgis']['notebook_server']['config_store_type']
when 'AMAZON'
'com.esri.arcgis.carbon.persistence.impl.amazon.AmazonConfigPersistence'
when 'AZURE'
'com.esri.arcgis.carbon.persistence.impl.azure.AzureConfigPersistence'
else
'com.esri.arcgis.carbon.persistence.impl.filesystem.FSConfigPersistence'
end
notebook_server['log_level'] = 'WARNING'
notebook_server['max_log_file_age'] = 90
notebook_server['system_properties'] = {}
case node['platform']
when 'windows'
notebook_server['setup'] = ::File.join(node['arcgis']['repository']['setups'],
'ArcGIS ' + node['arcgis']['version'],
'NotebookServer', 'Setup.exe')
notebook_server['data_setup'] = ::File.join(node['arcgis']['repository']['setups'],
'ArcGIS ' + node['arcgis']['version'],
'NotebookServerData', 'Setup.exe')
notebook_server['install_dir'] = ::File.join(ENV['ProgramW6432'], 'ArcGIS\\NotebookServer').gsub('/', '\\')
notebook_server['install_subdir'] = ''
notebook_server['authorization_tool'] = ::File.join(ENV['ProgramW6432'],
'Common Files\\ArcGIS\\bin\\SoftwareAuthorization.exe').gsub('/', '\\')
notebook_server['directories_root'] = 'C:\\arcgisnotebookserver\\directories'
notebook_server['config_store_connection_string'] = 'C:\\arcgisnotebookserver\\config-store'
notebook_server['workspace'] = 'C:\\arcgisnotebookserver\\arcgisworkspace'
notebook_server['log_dir'] = 'C:\\arcgisnotebookserver\\logs'
case node['arcgis']['version']
when '10.9'
notebook_server['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Server_Windows_109_177821.exe').gsub('/', '\\')
notebook_server['standard_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Standard_109_177822.tar.gz').gsub('/', '\\')
notebook_server['advanced_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Advanced_109_177823.tar.gz').gsub('/', '\\')
notebook_server['product_code'] = '{9D0291C2-D01E-4411-A2D8-BB42740C9ECC}'
notebook_server['data_setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Server_Samples_Data_Windows_109_177842.exe').gsub('/', '\\')
notebook_server['data_product_code'] = '{C2ECEE9C-961A-4EAF-9BD4-9FB0EBCFA535}'
when '10.8.1'
notebook_server['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Server_1081_175250.exe').gsub('/', '\\')
notebook_server['standard_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Standard_1081_175251.tar.gz').gsub('/', '\\')
notebook_server['advanced_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Advanced_1081_175252.tar.gz').gsub('/', '\\')
notebook_server['product_code'] = '{55DE1B3D-DDFB-4906-81F2-B573BAC25018}'
when '10.8'
notebook_server['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Server_108_172941.exe').gsub('/', '\\')
notebook_server['standard_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Standard_108_172942.tar.gz').gsub('/', '\\')
notebook_server['advanced_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Advanced_108_172943.tar.gz').gsub('/', '\\')
notebook_server['product_code'] = '{B1DB581E-E66C-4E58-B9E3-50A4D6CB5982}'
when '10.7.1'
notebook_server['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Server_1071_169734.exe').gsub('/', '\\')
notebook_server['standard_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Standard_1071_169736.tar.gz').gsub('/', '\\')
notebook_server['advanced_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Advanced_1071_169738.tar.gz').gsub('/', '\\')
notebook_server['product_code'] = '{F6DF77B9-F35E-4877-A7B1-63E1918B4E19}'
else
Chef::Log.warn 'Unsupported ArcGIS Notebook Server version'
end
else # node['platform'] == 'linux'
notebook_server['setup'] = ::File.join(node['arcgis']['repository']['setups'],
node['arcgis']['version'],
'NotebookServer_Linux', 'Setup')
notebook_server['data_setup'] = ::File.join(node['arcgis']['repository']['setups'],
node['arcgis']['version'],
'NotebookServerData_Linux', 'ArcGISNotebookServerSamplesData-Setup.sh')
notebook_server['install_dir'] = "/home/#{node['arcgis']['run_as_user']}"
notebook_server['install_subdir'] = node['arcgis']['notebook_server']['install_dir'].end_with?('/arcgis') ?
'notebookserver' : 'arcgis/notebookserver'
if node['arcgis']['notebook_server']['install_dir'].nil?
notebook_server_install_dir = notebook_server['install_dir']
else
notebook_server_install_dir = node['arcgis']['notebook_server']['install_dir']
end
if node['arcgis']['notebook_server']['install_subdir'].nil?
notebook_server_install_subdir = notebook_server['install_subdir']
else
notebook_server_install_subdir = node['arcgis']['notebook_server']['install_subdir']
end
notebook_server['authorization_tool'] = ::File.join(notebook_server_install_dir,
notebook_server_install_subdir,
'/tools/authorizeSoftware')
notebook_server['keycodes'] = ::File.join(node['arcgis']['notebook_server']['install_dir'],
node['arcgis']['notebook_server']['install_subdir'],
'framework/.esri/License' +
node['arcgis']['notebook_server']['authorization_file_version'] +
'/sysgen/keycodes')
notebook_server['directories_root'] = ::File.join(notebook_server_install_dir,
notebook_server_install_subdir,
'usr', 'directories')
notebook_server['config_store_connection_string'] = ::File.join(notebook_server_install_dir,
notebook_server_install_subdir,
'usr', 'config-store')
notebook_server['workspace'] = ::File.join(notebook_server_install_dir,
notebook_server_install_subdir,
'usr', 'arcgisworkspace')
notebook_server['log_dir'] = ::File.join(notebook_server_install_dir,
notebook_server_install_subdir,
'usr', 'logs')
case node['arcgis']['version']
when '10.9'
notebook_server['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Server_Linux_109_177908.tar.gz')
notebook_server['standard_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Standard_109_177822.tar.gz')
notebook_server['advanced_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Advanced_109_177823.tar.gz')
notebook_server['data_setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Server_Samples_Data_Linux_109_177914.tar.gz')
when '10.8.1'
notebook_server['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Server_Linux_1081_175333.tar.gz')
notebook_server['standard_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Standard_1081_175251.tar.gz')
notebook_server['advanced_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Advanced_1081_175252.tar.gz')
when '10.8'
notebook_server['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Server_Linux_108_173012.tar.gz')
notebook_server['standard_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Standard_108_172942.tar.gz')
notebook_server['advanced_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Advanced_108_172943.tar.gz')
when '10.7.1'
notebook_server['setup_archive'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Server_Linux_1071_169927.tar.gz')
notebook_server['standard_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Standard_1071_169736.tar.gz')
notebook_server['advanced_images'] = ::File.join(node['arcgis']['repository']['archives'],
'ArcGIS_Notebook_Docker_Advanced_1071_169738.tar.gz')
else
Chef::Log.warn 'Unsupported ArcGIS Notebook Server version'
end
end
end
| 64.406542 | 148 | 0.572372 |
4ad96d0e469d4d8268551d7dd7ad0e1780ffea90 | 2,613 | require_relative '../base'
require_relative './associated-pool-item'
# Nocoah
module Nocoah
# Types
module Types
# Network
module Network
# Loadbalancer health monitor item
class HealthMonitorItem < Base
# @return [String] Health monitor ID
attr_reader :health_monitor_id
# @return [String] Tenant ID
attr_reader :tenant_id
# @return [Integer] Delay
attr_reader :delay
# @return [Integer] Max retries
attr_reader :max_retries
# @return [Integer] Timeout
attr_reader :timeout
# @return [Array<Nocoah::Types::Network::AssociatedPoolItem>] Pools
attr_reader :pools
# @return [Boolean] Administrative state of the network
attr_reader :admin_state_up
# @return [String (Nocoah::Types::Network::HealthMonitorType)] Health monitor type
attr_reader :type
# @return [String] URL path
attr_reader :url_path
# @return [String] Expected codes
attr_reader :expected_codes
# @return [String] HTTP method
attr_reader :http_method
# Creates a new {HealthMonitorItem} class instance.
#
# @param [Hash] data Data
def initialize( data )
@health_monitor_id = data['id']
@tenant_id = data['tenant_id']
@delay = data['delay']
@max_retries = data['max_retries']
@timeout = data['timeout']
@pools = data['pools'].map { | pool | AssociatedPoolItem.new( pool ) } rescue []
@admin_state_up = Utility.to_b( data['admin_state_up'] )
@type = data['type']
@url_path = data['url_path']
@expected_codes = data['expected_codes']
@http_method = data['http_method']
end
end
# Health monitor type
module HealthMonitorType
# Ping: Sends a ping to the members and checks the response.
PING = "PING"
# TCP: Polls TCP to the members and checks the response.
TCP = "TCP"
# HTTP: Sends a HTTP request to the members and checks the response.
HTTP = "HTTP"
end
end
end
end | 36.291667 | 100 | 0.500574 |
abcfa7e8ce2226e57ba1b7ea0ca6da34dff1d322 | 103 | class ApplicationMailer < ActionMailer::Base
default from: ENV["ADMIN_EMAIL"]
layout 'mailer'
end
| 20.6 | 44 | 0.757282 |
4a49266fdc1f427bedef4963a9bad5c13d0aa958 | 1,667 | class Bumpversion < Formula
include Language::Python::Virtualenv
desc "Increase version numbers with SemVer terms"
homepage "https://pypi.python.org/pypi/bumpversion"
# maintained fork for the project
# Ongoing maintenance discussion for the project, https://github.com/c4urself/bump2version/issues/86
url "https://files.pythonhosted.org/packages/29/2a/688aca6eeebfe8941235be53f4da780c6edee05dbbea5d7abaa3aab6fad2/bump2version-1.0.1.tar.gz"
sha256 "762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"
license "MIT"
bottle do
cellar :any_skip_relocation
rebuild 1
sha256 "0565fcf56009d940c51badc4a28a0613490db4334a571c204532c9a2aa4c881a" => :big_sur
sha256 "55a004675d686be21cc9dbf7f820063165d13c50388046f4b7e8ccfe664f6215" => :arm64_big_sur
sha256 "d7bf73d933580643487b48d170a0e0357e8da2cddc47b4c238d020a7141c0ff5" => :catalina
sha256 "7324e4d10330ec6f90a2c09f073f209b54264c6aac2ac81d173eb256f9bc8758" => :mojave
end
depends_on "[email protected]"
def install
virtualenv_install_with_resources
end
test do
ENV["COLUMNS"] = "80"
assert_includes shell_output("script -q /dev/null #{bin}/bumpversion --help"), "bumpversion: v#{version}"
version_file = testpath/"VERSION"
version_file.write "0.0.0"
system bin/"bumpversion", "--current-version", "0.0.0", "minor", version_file
assert_match "0.1.0", version_file.read
system bin/"bumpversion", "--current-version", "0.1.0", "patch", version_file
assert_match "0.1.1", version_file.read
system bin/"bumpversion", "--current-version", "0.1.1", "major", version_file
assert_match "1.0.0", version_file.read
end
end
| 41.675 | 140 | 0.763047 |
6af835c02f5f9459f96fb409b049767b0a49d8c6 | 376 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::TrafficManager::Mgmt::V2017_05_01
module Models
#
# Defines values for ProfileStatus
#
module ProfileStatus
Enabled = "Enabled"
Disabled = "Disabled"
end
end
end
| 22.117647 | 70 | 0.702128 |
ac022c71d7bcebe361566536fc3c706925c214c9 | 1,329 | require "image_processing/mini_magick"
class ImageAttachment < Shrine
plugin :hanami
end
class ComplexAttachment < Shrine
include ImageProcessing::MiniMagick
plugin :hanami
plugin :processing
plugin :versions # enable Shrine to handle a hash of files
plugin :delete_raw # delete processed files after uploading
plugin :determine_mime_type
plugin :store_dimensions
process(:store) do |io, context|
io.download do |original|
size_100 = ImageProcessing::MiniMagick.source(original).resize_to_limit!(100, 100)
size_30 = ImageProcessing::MiniMagick.source(original).resize_to_limit!(30, 30)
{original: io, small: size_100, tiny: size_30}
end
end
end
class Kitten < Hanami::Entity
include ImageAttachment[:image]
end
class KittenRepository < Hanami::Repository
self.relation = :kittens
prepend ImageAttachment.repository(:image)
end
class MultiCat < Hanami::Entity
include ImageAttachment[:cat1]
include ImageAttachment[:cat2]
end
class MultiCatRepository < Hanami::Repository
prepend ImageAttachment.repository(:cat1)
prepend ImageAttachment.repository(:cat2)
end
class PluginsModel < Hanami::Entity
include ComplexAttachment[:image]
end
class PluginsModelRepository < Hanami::Repository
prepend ComplexAttachment.repository(:image)
end
Hanami::Model.load!
| 24.611111 | 88 | 0.774266 |
1c713fd13c4517438bdaa60863139a0b4ee65e8b | 7,525 | class WorkflowStep < DbConnection::KitSaasSubenv
enum kind: {
# Token setup
GlobalConstant::WorkflowStep.economy_setup_init_kind => 1,
GlobalConstant::WorkflowStep.generate_token_addresses_kind => 2,
GlobalConstant::WorkflowStep.fund_aux_funder_addr_kind => 3,
GlobalConstant::WorkflowStep.verify_fund_aux_funder_addr_kind => 4,
GlobalConstant::WorkflowStep.fund_aux_admin_addr_kind => 5,
GlobalConstant::WorkflowStep.verify_fund_aux_admin_addr_kind => 6,
GlobalConstant::WorkflowStep.fund_aux_worker_addr_kind => 7,
GlobalConstant::WorkflowStep.verify_fund_aux_worker_addr_kind => 8,
GlobalConstant::WorkflowStep.deploy_origin_token_organization_kind => 9,
GlobalConstant::WorkflowStep.save_origin_token_organization_kind => 10,
GlobalConstant::WorkflowStep.deploy_origin_branded_token_kind => 11,
GlobalConstant::WorkflowStep.save_origin_token_branded_token_kind => 12,
GlobalConstant::WorkflowStep.deploy_aux_token_organization_kind => 13,
GlobalConstant::WorkflowStep.save_aux_token_organization_kind => 14,
GlobalConstant::WorkflowStep.deploy_utility_branded_token_kind => 15,
GlobalConstant::WorkflowStep.save_utility_branded_token_kind => 16,
GlobalConstant::WorkflowStep.token_deploy_gateway_kind => 17,
GlobalConstant::WorkflowStep.save_token_gateway_kind => 18,
GlobalConstant::WorkflowStep.update_token_in_ost_view => 19,
GlobalConstant::WorkflowStep.token_deploy_co_gateway_kind => 20,
GlobalConstant::WorkflowStep.save_token_co_gateway_kind => 21,
GlobalConstant::WorkflowStep.activate_token_gateway_kind => 22,
GlobalConstant::WorkflowStep.verify_activate_token_gateway => 23,
GlobalConstant::WorkflowStep.set_gateway_in_bt_kind => 24,
GlobalConstant::WorkflowStep.verify_set_gateway_in_bt => 25,
GlobalConstant::WorkflowStep.set_co_gateway_in_ubt_kind => 26,
GlobalConstant::WorkflowStep.verify_set_co_gateway_in_ubt => 27,
GlobalConstant::WorkflowStep.deploy_gateway_composer => 28,
GlobalConstant::WorkflowStep.verify_deploy_gateway_composer => 29,
GlobalConstant::WorkflowStep.set_internal_actor_for_owner_in_ubt => 30,
GlobalConstant::WorkflowStep.verify_set_internal_actor_for_owner_in_ubt => 31,
GlobalConstant::WorkflowStep.verify_economy_setup => 32,
GlobalConstant::WorkflowStep.assign_shards => 33,
GlobalConstant::WorkflowStep.deploy_token_rules => 34,
GlobalConstant::WorkflowStep.save_token_rules => 35,
GlobalConstant::WorkflowStep.deploy_token_holder_master_copy => 36,
GlobalConstant::WorkflowStep.save_token_holder_master_copy => 37,
GlobalConstant::WorkflowStep.deploy_user_wallet_factory => 38,
GlobalConstant::WorkflowStep.save_user_wallet_factory => 39,
GlobalConstant::WorkflowStep.deploy_gnosis_safe_multi_sig_master_copy => 40,
GlobalConstant::WorkflowStep.save_gnosis_safe_multi_sig_master_copy => 41,
GlobalConstant::WorkflowStep.deploy_pricer_rule => 42,
GlobalConstant::WorkflowStep.save_pricer_rule => 43,
GlobalConstant::WorkflowStep.register_pricer_rule => 44,
GlobalConstant::WorkflowStep.verify_register_pricer_rule => 45,
GlobalConstant::WorkflowStep.add_price_oracle_in_pricer_rule => 46,
GlobalConstant::WorkflowStep.verify_add_price_oracle_in_pricer_rule => 47,
GlobalConstant::WorkflowStep.set_accepted_margin_in_pricer_rule => 48,
GlobalConstant::WorkflowStep.verify_set_accepted_margin_in_pricer_rule => 49,
GlobalConstant::WorkflowStep.post_token_rules_deploy => 59,
GlobalConstant::WorkflowStep.deploy_proxy_factory => 52,
GlobalConstant::WorkflowStep.save_proxy_factory => 53,
GlobalConstant::WorkflowStep.initialize_company_token_holder_in_db => 54,
GlobalConstant::WorkflowStep.create_company_wallet => 55,
GlobalConstant::WorkflowStep.verify_create_company_wallet => 56,
GlobalConstant::WorkflowStep.set_internal_actor_for_companyTH_in_UBT => 57,
GlobalConstant::WorkflowStep.verify_set_internal_actor_for_companyTH_in_UBT => 58,
GlobalConstant::WorkflowStep.generate_tx_worker_addresses => 50,
GlobalConstant::WorkflowStep.fund_ex_tx_workers => 51,
# Stake and Mint of ST'
GlobalConstant::WorkflowStep.st_prime_stake_and_mint_init => 60,
GlobalConstant::WorkflowStep.st_prime_approve => 61,
GlobalConstant::WorkflowStep.simple_token_stake => 62,
GlobalConstant::WorkflowStep.fetch_stake_intent_message_hash => 63,
GlobalConstant::WorkflowStep.prove_gateway_on_co_gateway => 64,
GlobalConstant::WorkflowStep.confirm_stake_intent => 65,
GlobalConstant::WorkflowStep.progress_stake => 66,
GlobalConstant::WorkflowStep.progress_mint => 67,
# Stake and Mint of BT
GlobalConstant::WorkflowStep.bt_stake_and_mint_init => 70,
GlobalConstant::WorkflowStep.approve_gateway_composer_trx =>71,
GlobalConstant::WorkflowStep.staker_request_stake_trx => 72,
GlobalConstant::WorkflowStep.check_gc_allowance => 73,
GlobalConstant::WorkflowStep.fetch_stake_request_hash => 74,
GlobalConstant::WorkflowStep.accept_stake => 75,
GlobalConstant::WorkflowStep.check_approve_status => 80,
GlobalConstant::WorkflowStep.check_stake_status => 81,
GlobalConstant::WorkflowStep.check_prove_gateway_status => 82,
GlobalConstant::WorkflowStep.check_confirm_stake_status => 83,
GlobalConstant::WorkflowStep.check_progress_stake_status => 84,
GlobalConstant::WorkflowStep.check_progress_mint_status => 85,
GlobalConstant::WorkflowStep.send_token_setup_success_email => 86,
GlobalConstant::WorkflowStep.send_token_setup_error_email => 87,
GlobalConstant::WorkflowStep.send_stake_and_mint_success_email => 88,
GlobalConstant::WorkflowStep.send_stake_and_mint_error_email => 89,
GlobalConstant::WorkflowStep.mark_success => 101,
GlobalConstant::WorkflowStep.mark_failure => 102,
GlobalConstant::WorkflowStep.commit_state_root_init => 150,
GlobalConstant::WorkflowStep.commit_state_root => 151,
GlobalConstant::WorkflowStep.update_committed_state_root_info => 152,
GlobalConstant::WorkflowStep.deploy_delayed_recovery_master_copy => 131,
GlobalConstant::WorkflowStep.save_deploy_delayed_recovery_master_copy => 132,
GlobalConstant::WorkflowStep.deploy_create_and_add_modules => 133,
GlobalConstant::WorkflowStep.save_deploy_create_and_add_modules => 134,
GlobalConstant::WorkflowStep.fund_recovery_controller_address => 135,
GlobalConstant::WorkflowStep.set_internal_actor_for_facilitator_in_ubt => 136,
GlobalConstant::WorkflowStep.verify_set_internal_actor_for_facilitator_in_ubt => 137,
GlobalConstant::WorkflowStep.grant_eth_stake_currency_init => 171,
GlobalConstant::WorkflowStep.grant_eth => 172,
GlobalConstant::WorkflowStep.verify_grant_eth => 173,
GlobalConstant::WorkflowStep.grant_stake_currency => 174,
GlobalConstant::WorkflowStep.verify_grant_stake_currency => 175,
GlobalConstant::WorkflowStep.fund_token_user_ops_worker => 189,
GlobalConstant::WorkflowStep.verify_fund_token_user_ops_worker => 190,
GlobalConstant::WorkflowStep.set_internal_actor_for_TR_in_UBT => 191,
GlobalConstant::WorkflowStep.verify_set_internal_actor_for_TR_in_UBT => 192
}
enum status: {
GlobalConstant::WorkflowStep.queued_status => 1,
GlobalConstant::WorkflowStep.pending_status => 2,
GlobalConstant::WorkflowStep.processed_status => 3,
GlobalConstant::WorkflowStep.failed_status => 4,
GlobalConstant::WorkflowStep.timeout_status => 5
}
end | 58.333333 | 89 | 0.802658 |
18aa70a01de5012912520af8fdd36adc6ba4e071 | 5,796 | # frozen_string_literal: true
require_relative "shared_examples/invalid_option"
describe Cask::Cmd::Outdated, :cask do
let(:installed) do
[
Cask::CaskLoader.load(cask_path("basic-cask")),
Cask::CaskLoader.load(cask_path("outdated/local-caffeine")),
Cask::CaskLoader.load(cask_path("outdated/local-transmission")),
Cask::CaskLoader.load(cask_path("version-latest-string")),
Cask::CaskLoader.load(cask_path("outdated/auto-updates")),
]
end
before do
installed.each { |cask| InstallHelper.install_with_caskfile(cask) }
end
it_behaves_like "a command that handles invalid options"
describe 'without --greedy it ignores the Casks with "version latest" or "auto_updates true"' do
it "checks all the installed Casks when no token is provided" do
expect {
described_class.run
}.to output(<<~EOS).to_stdout.as_tty
local-caffeine (1.2.2) != 1.2.3
local-transmission (2.60) != 2.61
EOS
end
it "checks only the tokens specified in the command line" do
expect {
described_class.run("local-caffeine")
}.to output(<<~EOS).to_stdout.as_tty
local-caffeine (1.2.2) != 1.2.3
EOS
end
it 'ignores "auto_updates" and "latest" Casks even when their tokens are provided in the command line' do
expect {
described_class.run("local-caffeine", "auto-updates", "version-latest-string")
}.to output(<<~EOS).to_stdout.as_tty
local-caffeine (1.2.2) != 1.2.3
EOS
end
end
describe "--quiet overrides TTY" do
it "lists only the names (no versions) of the outdated Casks with --quiet" do
expect {
described_class.run("--quiet")
}.to output(<<~EOS).to_stdout.as_tty
local-caffeine
local-transmission
EOS
end
end
describe "--quiet overrides --verbose" do
it "lists only the names (no versions) of the outdated Casks with --quiet" do
expect {
described_class.run("--verbose", "--quiet")
}.to output(<<~EOS).to_stdout
local-caffeine
local-transmission
EOS
end
end
describe "with --greedy it checks additional Casks" do
it 'includes the Casks with "auto_updates true" or "version latest" with --greedy' do
expect {
described_class.run("--greedy")
}.to output(<<~EOS).to_stdout.as_tty
auto-updates (2.57) != 2.61
local-caffeine (1.2.2) != 1.2.3
local-transmission (2.60) != 2.61
version-latest-string (latest) != latest
EOS
end
it 'does not include the Casks with "auto_updates true" when the version did not change' do
cask = Cask::CaskLoader.load(cask_path("auto-updates"))
InstallHelper.install_with_caskfile(cask)
expect {
described_class.run("--greedy")
}.to output(<<~EOS).to_stdout.as_tty
local-caffeine (1.2.2) != 1.2.3
local-transmission (2.60) != 2.61
version-latest-string (latest) != latest
EOS
end
end
describe "--json" do
it "lists outdated Casks in JSON format" do
result = [
{
name: "local-caffeine",
installed_versions: "1.2.2",
current_version: "1.2.3",
},
{
name: "local-transmission",
installed_versions: "2.60",
current_version: "2.61",
},
].to_json
expect {
described_class.run("--json")
}.to output(result + "\n").to_stdout
end
end
describe "--json overrides --quiet" do
it "ignores --quiet and lists outdated Casks in JSON format" do
result = [
{
name: "local-caffeine",
installed_versions: "1.2.2",
current_version: "1.2.3",
},
{
name: "local-transmission",
installed_versions: "2.60",
current_version: "2.61",
},
].to_json
expect {
described_class.run("--json", "--quiet")
}.to output(result + "\n").to_stdout
end
end
describe "--json and --greedy" do
it 'includes the Casks with "auto_updates true" or "version latest" in JSON format' do
result = [
{
name: "auto-updates",
installed_versions: "2.57",
current_version: "2.61",
},
{
name: "local-caffeine",
installed_versions: "1.2.2",
current_version: "1.2.3",
},
{
name: "local-transmission",
installed_versions: "2.60",
current_version: "2.61",
},
{
name: "version-latest-string",
installed_versions: "latest",
current_version: "latest",
},
].to_json
expect {
described_class.run("--json", "--greedy")
}.to output(result + "\n").to_stdout
end
it 'does not include the Casks with "auto_updates true" with no version change in JSON format' do
cask = Cask::CaskLoader.load(cask_path("auto-updates"))
InstallHelper.install_with_caskfile(cask)
result = [
{
name: "local-caffeine",
installed_versions: "1.2.2",
current_version: "1.2.3",
},
{
name: "local-transmission",
installed_versions: "2.60",
current_version: "2.61",
},
{
name: "version-latest-string",
installed_versions: "latest",
current_version: "latest",
},
].to_json
expect {
described_class.run("--json", "--greedy")
}.to output(result + "\n").to_stdout
end
end
end
| 29.42132 | 109 | 0.562802 |
b9d8dd0bb94996d8d766c5c17e65babf3eb26c58 | 1,266 | require 'rake/dsl_definition'
module Capistrano
module Container
class Manager
include Rake::DSL
def initialize()
@container = {}
end
def add(name, config)
@container[name.to_sym] = container = Instance.new(name, config)
config[:server].map!{ |ip| server(ip) }
config[:server].each do |server|
server.add_roles [:container_host, container.container_role]
end
self.create_container_tasks(container)
container
end
def by_name(name)
@container[name.to_sym]
end
def by_id(id)
@container.each do |name, instance|
return instance if instance.container_id == id
end
end
def by_roles(roles)
roles = Array(roles)
return @container.values if roles.include? :all
tmp = {}
roles.each do |role|
@container.each do |name, instance|
tmp[name] = instance if instance.has_role? role
end
end
tmp.values
end
def create_container_tasks(container)
namespace :container do
namespace container.name do
Mixins.define_tasks(container)
end
end
end
end
end
end
| 20.754098 | 72 | 0.578199 |
28536579dc22bb9db46c6449d5f97ebfc5af30ec | 984 | Given /^a query is cached with results:$/ do |results|
Oculus::Query.create(:name => "all users",
:query => "SELECT * FROM oculus_users",
:results => results.raw,
:started_at => Time.now,
:finished_at => Time.now)
end
Given /^I am on the history page$/ do
visit '/history'
end
When /^I execute "([^"]*)"$/ do |query|
visit '/'
find('.CodeMirror :first-child :first-child').native.send_keys(query)
click_button 'Run'
end
When /^I load the cached query$/ do
visit '/history'
click_link 'all users'
end
When /^I click delete$/ do
find('.delete').click
end
Then /^I should see (\d+) rows of results$/ do |result_count|
page.has_css?(".results", :visible => true)
within('.results') do
all('tr').length.should == result_count.to_i
end
end
Then /^I should not see any queries$/ do
within('#history') do
all('li').length.should == 0
end
end
| 24.6 | 71 | 0.583333 |
f84940ad3660441880441768ef70634cf9084893 | 889 | require 'spec_helper'
describe Statistrano::Log do
def clean_logger_cache
if described_class.instance_variable_get(:@_logger)
described_class.send(:remove_instance_variable, :@_logger)
end
end
before :each do
clean_logger_cache
end
after :each do
clean_logger_cache
end
describe "::set_logger" do
it "sets the logger" do
Statistrano::Log.set_logger 'foo'
expect( Statistrano::Log.instance_variable_get(:@_logger) ).to eq 'foo'
end
end
describe "::logger_instance" do
it "returns the cached logger" do
Statistrano::Log.instance_variable_set(:@_logger, 'foo')
expect( Statistrano::Log.logger_instance ).to eq 'foo'
end
it "initializes a new DefaultLogger if no logger is set" do
expect( Statistrano::Log::DefaultLogger ).to receive(:new)
Statistrano::Log.logger_instance
end
end
end
| 24.027027 | 77 | 0.703037 |
ab640634c1b3ea58fc2dcd001a5a8e409f854c23 | 465 | module Expectant
module ClassMethods
def expectation_choices
Hash[Expectation.all.order_by(text: :asc).map {|e| [e.text, e._id.to_s] }]
end
end
extend ActiveSupport::Concern
included do
field :expectation_ids, type: Array, default: []
field :minutes_to_complete, type: String
field :uses_government_gateway, type: Boolean
end
def expectations
Expectation.criteria.in(_id: self.expectation_ids)
end
end
| 21.136364 | 80 | 0.694624 |
215ccaa8961b7737b7b1226aad32960c99028101 | 335 | def start
@old_speed = @ball_speed
@ball_speed += 5
@time = 0
@stop_time = 300
end
def updating
@power_pos = [0,0]
@time += 1
if @time >= @stop_time
stop
end
end
def stop
@ball_speed = @old_speed
reset_power
end
def drawing
@font3.render(((@stop_time-@time)/30).to_i.to_s, true, [255,255,255]).blit(@screen,[40,0])
end
| 13.958333 | 91 | 0.665672 |
7ae518fc1aa4614b2517b2174e549b6f69835da7 | 2,015 | class Zeromq < Formula
desc "High-performance, asynchronous messaging library"
homepage "https://zeromq.org/"
url "https://github.com/zeromq/libzmq/releases/download/v4.3.3/zeromq-4.3.3.tar.gz"
sha256 "9d9285db37ae942ed0780c016da87060497877af45094ff9e1a1ca736e3875a2"
livecheck do
url :head
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
cellar :any
sha256 "3a0cde0350d5604c6d0a51120893cbb6a953e784c356afe5276ba431ae5936b6" => :big_sur
sha256 "5310a4204987850db552bb2746c48171a0faf51692c9f52debf10f85ac4db569" => :catalina
sha256 "b67097dee2a42630e01978befe5c173f2a976c16fe4190873997776a584e4559" => :mojave
sha256 "fee7be7f0a0762755037f3cd2ab1207dc9af43b0b75bf517d5f6f522a315a3bd" => :high_sierra
end
head do
url "https://github.com/zeromq/libzmq.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
depends_on "asciidoc" => :build
depends_on "pkg-config" => [:build, :test]
depends_on "xmlto" => :build
def install
# Work around "error: no member named 'signbit' in the global namespace"
if MacOS.version == :high_sierra
ENV.delete("HOMEBREW_SDKROOT")
ENV.delete("SDKROOT")
end
ENV["XML_CATALOG_FILES"] = "#{etc}/xml/catalog"
# Disable libunwind support due to pkg-config problem
# https://github.com/Homebrew/homebrew-core/pull/35940#issuecomment-454177261
system "./autogen.sh" if build.head?
system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <assert.h>
#include <zmq.h>
int main()
{
zmq_msg_t query;
assert(0 == zmq_msg_init_size(&query, 1));
return 0;
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-lzmq", "-o", "test"
system "./test"
system "pkg-config", "libzmq", "--cflags"
system "pkg-config", "libzmq", "--libs"
end
end
| 29.632353 | 93 | 0.672457 |
1d993cd36c8514899b6f126e8c2258afabcf4817 | 1,740 | # frozen_string_literal: true
class DeviseCreateUsers < ActiveRecord::Migration[6.1]
def change
create_table :users do |t|
## Database authenticatable
t.string :email
t.string :username
t.string :encrypted_password, null: false, default: ""
t.string :first_name
t.string :last_name
t.string :full_name, default: ""
t.references :source, polymorphic: true
t.string :token
t.datetime :token_created_at
## Recoverable
t.string :reset_password_token
t.datetime :reset_password_sent_at
## Rememberable
t.datetime :remember_created_at
## Trackable
t.integer :sign_in_count, default: 0, null: false
t.datetime :current_sign_in_at
t.datetime :last_sign_in_at
t.string :current_sign_in_ip
t.string :last_sign_in_ip
## Confirmable
t.string :confirmation_token
t.datetime :confirmed_at
t.datetime :confirmation_sent_at
t.string :unconfirmed_email # Only if using reconfirmable
## Lockable
t.integer :failed_attempts, default: 0, null: false # Only if lock strategy is :failed_attempts
t.string :unlock_token # Only if unlock strategy is :email or :both
t.datetime :locked_at
## Omniauthable
t.string :provider
t.string :uid
t.timestamps null: false
end
add_index :users, :email, unique: true, where: "email IS NOT NULL"
add_index :users, :username, unique: true, where: "username IS NOT NULL"
add_index :users, :reset_password_token, unique: true
add_index :users, :confirmation_token, unique: true
add_index :users, :unlock_token, unique: true
add_index :users, [:source_type, :source_id]
end
end
| 28.52459 | 101 | 0.678161 |
790fb8c03f80ba0adba5e03c3e9e85746fe21be4 | 708 | require "exifr"
module Jekyll
module PhotoFilter
def photo_filter(files)
photos = files.select {|photo| photo.relative_path.include?("original") }
sorted = photos.sort_by { |photo|
(EXIFR::JPEG.new(photo.path).exif? && EXIFR::JPEG.new(photo.path).date_time_original ?
EXIFR::JPEG.new(photo.path).date_time_original.to_s :
photo.modified_time.to_s)
}
sorted.each do |photo|
(EXIFR::JPEG.new(photo.path).exif? && EXIFR::JPEG.new(photo.path).date_time_original ?
EXIFR::JPEG.new(photo.path).date_time_original.to_s :
photo.modified_time.to_s)
end.reverse
end
end
end
Liquid::Template.register_filter(Jekyll::PhotoFilter) | 35.4 | 94 | 0.677966 |
33c637771090af051e781b06b490fc954d5ca084 | 4,532 | require 'formula'
class Ffmpeg < Formula
homepage 'http://ffmpeg.org/'
url 'http://ffmpeg.org/releases/ffmpeg-1.2.4.tar.bz2'
sha1 'ee73a05bde209fc23441c7e49767c1b7a4b6f124'
head 'git://git.videolan.org/ffmpeg.git'
# This is actually the new stable, not a devel release,
# but not everything builds with it yet - notably gpac
devel do
url 'http://ffmpeg.org/releases/ffmpeg-2.0.2.tar.bz2'
sha1 '47d3b3d172ae81f0571549e4dfaadfe5b4fe06cc'
end
option "without-x264", "Disable H.264 encoder"
option "without-lame", "Disable MP3 encoder"
option "without-xvid", "Disable Xvid MPEG-4 video encoder"
option "with-rtmpdump", "Enable RTMP protocol"
option "with-libvo-aacenc", "Enable VisualOn AAC encoder"
option "with-libass", "Enable ASS/SSA subtitle format"
option "with-openjpeg", 'Enable JPEG 2000 image format'
option 'with-openssl', 'Enable SSL support'
option 'with-schroedinger', 'Enable Dirac video format'
option 'with-ffplay', 'Enable FFplay media player'
option 'with-tools', 'Enable additional FFmpeg tools'
option 'with-fdk-aac', 'Enable the Fraunhofer FDK AAC library'
depends_on 'pkg-config' => :build
# manpages won't be built without texi2html
depends_on 'texi2html' => :build if MacOS.version >= :mountain_lion
depends_on 'yasm' => :build
depends_on 'x264' => :recommended
depends_on 'faac' => :recommended
depends_on 'lame' => :recommended
depends_on 'xvid' => :recommended
depends_on :freetype => :optional
depends_on 'theora' => :optional
depends_on 'libvorbis' => :optional
depends_on 'libvpx' => :optional
depends_on 'rtmpdump' => :optional
depends_on 'opencore-amr' => :optional
depends_on 'libvo-aacenc' => :optional
depends_on 'libass' => :optional
depends_on 'openjpeg' => :optional
depends_on 'sdl' if build.include? 'with-ffplay'
depends_on 'speex' => :optional
depends_on 'schroedinger' => :optional
depends_on 'fdk-aac' => :optional
depends_on 'opus' => :optional
depends_on 'frei0r' => :optional
depends_on 'libcaca' => :optional
def install
args = ["--prefix=#{prefix}",
"--enable-shared",
"--enable-pthreads",
"--enable-gpl",
"--enable-version3",
"--enable-nonfree",
"--enable-hardcoded-tables",
"--enable-avresample",
"--enable-vda",
"--cc=#{ENV.cc}",
"--host-cflags=#{ENV.cflags}",
"--host-ldflags=#{ENV.ldflags}"
]
args << "--enable-libx264" if build.with? 'x264'
args << "--enable-libfaac" if build.with? 'faac'
args << "--enable-libmp3lame" if build.with? 'lame'
args << "--enable-libxvid" if build.with? 'xvid'
args << "--enable-libfreetype" if build.with? 'freetype'
args << "--enable-libtheora" if build.with? 'theora'
args << "--enable-libvorbis" if build.with? 'libvorbis'
args << "--enable-libvpx" if build.with? 'libvpx'
args << "--enable-librtmp" if build.with? 'rtmpdump'
args << "--enable-libopencore-amrnb" << "--enable-libopencore-amrwb" if build.with? 'opencore-amr'
args << "--enable-libvo-aacenc" if build.with? 'libvo-aacenc'
args << "--enable-libass" if build.with? 'libass'
args << "--enable-ffplay" if build.include? 'with-ffplay'
args << "--enable-libspeex" if build.with? 'speex'
args << '--enable-libschroedinger' if build.with? 'schroedinger'
args << "--enable-libfdk-aac" if build.with? 'fdk-aac'
args << "--enable-openssl" if build.with? 'openssl'
args << "--enable-libopus" if build.with? 'opus'
args << "--enable-frei0r" if build.with? 'frei0r'
args << "--enable-libcaca" if build.with? 'libcaca'
if build.with? 'openjpeg'
args << '--enable-libopenjpeg'
args << '--extra-cflags=' + %x[pkg-config --cflags libopenjpeg].chomp
end
# For 32-bit compilation under gcc 4.2, see:
# http://trac.macports.org/ticket/20938#comment:22
ENV.append_to_cflags "-mdynamic-no-pic" if Hardware.is_32_bit? && Hardware.cpu_type == :intel && ENV.compiler == :clang
system "./configure", *args
if MacOS.prefer_64_bit?
inreplace 'config.mak' do |s|
shflags = s.get_make_var 'SHFLAGS'
if shflags.gsub!(' -Wl,-read_only_relocs,suppress', '')
s.change_make_var! 'SHFLAGS', shflags
end
end
end
system "make install"
if build.include? 'with-tools'
system "make alltools"
bin.install Dir['tools/*'].select {|f| File.executable? f}
end
end
end
| 36.256 | 123 | 0.647396 |
038cd353950ae1b9dd434c6016778a4deb830c7f | 832 | # frozen_string_literal: true
RSpec.describe Eezee, type: :module do
it { expect(described_class::VERSION).not_to be_nil }
describe '.configure' do
subject do
described_class.configure do |config|
config.add_service :linqueta,
protocol: 'https',
raise_error: true,
url: 'www.linqueta.com'
end
end
before { subject }
it do
expect(described_class.configuration.services[:linqueta]).to have_attributes(
protocol: 'https',
raise_error: true,
url: 'www.linqueta.com',
uri: 'https://www.linqueta.com'
)
end
end
describe '.configuration' do
subject { described_class.configuration }
it { is_expected.to be_a(described_class::Configuration) }
end
end
| 24.470588 | 83 | 0.602163 |
e2ddcfa8f115288c70ef7dc48831de1adf8a09cb | 309 | require 'faraday'
require 'faraday_middleware'
require 'nokogiri'
require 'archive_today/version'
require 'archive_today/archiver'
module ArchiveToday
class Error < StandardError; end
class << self
def capture(url:, debug: false)
Archiver.new(url: url, debug: debug).capture
end
end
end
| 19.3125 | 50 | 0.737864 |
612562379ed24e08c07b416bfc76c635d9082490 | 35,351 | class AzureCli < Formula
include Language::Python::Virtualenv
desc "Microsoft Azure CLI 2.0"
homepage "https://docs.microsoft.com/cli/azure/overview"
url "https://codeload.github.com/Azure/azure-cli/legacy.tar.gz/3fa3de4fe7ddff0f58a33eb6ae62e358c91718e6"
version "2.0.77"
sha256 "ee328b7799cfae149feb49bd1d43bcc9b4d9f920c9815a06026add69c754cbad"
head "https://github.com/Azure/azure-cli.git"
bottle do
cellar :any
sha256 "0e1ac8247f03a161fa012c83ed8cd8beec7a794ff9a7a6e0ae51e8a7de603ba2" => :catalina
sha256 "ed25e484933dda4eb45548aa37a0e65af0c401bc72289c7c35ddc72714e60eb4" => :mojave
sha256 "e0e19c28bbb548cbaef921e493b811c8b004d671461d4ca42955d75519491aa3" => :high_sierra
sha256 "9b0df155adc553deec6b2759ab131b94a4317d4f1bbe4d1d056b2d0bf00e45f5" => :x86_64_linux
end
depends_on "[email protected]"
depends_on "python"
unless OS.mac?
# pkg-config helps setuptools find libffi
depends_on "libffi"
depends_on "pkg-config" => :build
end
resource "adal" do
url "https://files.pythonhosted.org/packages/75/e2/c44b5e8d99544a2e21aace5f8390c6f3dbf8a952f0453779075ffafafc80/adal-1.2.2.tar.gz"
sha256 "5a7f1e037c6290c6d7609cab33a9e5e988c2fbec5c51d1c4c649ee3faff37eaf"
end
resource "antlr4-python3-runtime" do
url "https://files.pythonhosted.org/packages/29/14/8ac135ec7cc9db3f768e2d032776718c6b23f74e63543f0974b4873500b2/antlr4-python3-runtime-4.7.2.tar.gz"
sha256 "168cdcec8fb9152e84a87ca6fd261b3d54c8f6358f42ab3b813b14a7193bb50b"
end
resource "applicationinsights" do
url "https://files.pythonhosted.org/packages/f0/93/f60d7519c28b9e05b075ce89027df27849c7a50fe0371d4da2c38389570a/applicationinsights-0.11.7.tar.gz"
sha256 "c4712ede8eeca57e611b7fd4b3b6c345745a4a002a08145ab45f92d31d900040"
end
resource "argcomplete" do
url "https://files.pythonhosted.org/packages/58/88/5a8da671e087ef513a63519344b35d80c6979591eee24e06a5acd1cd11b9/argcomplete-1.10.2.tar.gz"
sha256 "ec88b5ccefe2d47d8f14916a006431d0afb756751ee5c46f28654a7d8a69be53"
end
resource "azure-batch" do
url "https://files.pythonhosted.org/packages/3b/e8/74a6bbfa8abbf75763b01d3098bc8e98731ebe617ddea8c0c31076aaf9a7/azure-batch-8.0.0.zip"
sha256 "918bd0dae244a595f5de6cebf0bdab87c6ccd7d9d2f288e1543b6916ed8a16c9"
end
resource "azure-common" do
url "https://files.pythonhosted.org/packages/e4/c9/0300b5a409a3758c0b6f77df5d8816366c9516579d065210ef3a2f21e23a/azure-common-1.1.23.zip"
sha256 "53b1195b8f20943ccc0e71a17849258f7781bc6db1c72edc7d6c055f79bd54e3"
end
resource "azure-cosmos" do
url "https://files.pythonhosted.org/packages/9c/47/c77b0008c9f3bf90c533a7f538b149c7cd28d2d9c5303d3fc017ada6c09c/azure-cosmos-3.1.2.tar.gz"
sha256 "7f8ac99e4e40c398089fc383bfadcdc83376f72b88532b0cac0b420357cd08c7"
end
resource "azure-datalake-store" do
url "https://files.pythonhosted.org/packages/15/00/8bfe15183eadaecd8d7a53db58b1a4a085ed509630757423ece1649716bd/azure-datalake-store-0.0.48.tar.gz"
sha256 "d27c335783d4add00b3a5f709341e4a8009857440209e15a739a9a96b52386f7"
end
resource "azure-functions-devops-build" do
url "https://files.pythonhosted.org/packages/d5/96/59ca26c8d9985df8a092cf5974e54b6c3e11208833ea1c0163d7fb763c94/azure-functions-devops-build-0.0.22.tar.gz"
sha256 "c6341abda6098813f8fa625acd1e925410a17a8a1c7aaabdf975bb7cecb14edf"
end
resource "azure-graphrbac" do
url "https://files.pythonhosted.org/packages/3e/4e/4598ea52efc2654b0c865243bd60625d4ffa4df874e7e5dcb76a9a4ddbbc/azure-graphrbac-0.60.0.zip"
sha256 "d0bb62d8bf8e196b903f3971ba4afa448e4fe14e8394ebfcdd941d84d62ecafe"
end
resource "azure-keyvault" do
url "https://files.pythonhosted.org/packages/8e/47/b71d7ab466189d0663a8aa216e4cc67eb16d5dfc7d69b62a9140dd8d1a20/azure-keyvault-1.1.0.zip"
sha256 "37a8e5f376eb5a304fcd066d414b5d93b987e68f9212b0c41efa37d429aadd49"
end
resource "azure-loganalytics" do
url "https://files.pythonhosted.org/packages/7a/37/6d296ee71319f49a93ea87698da2c5326105d005267d58fb00cb9ec0c3f8/azure-loganalytics-0.1.0.zip"
sha256 "3ceb350def677a351f34b0a0d1637df6be0c6fe87ff32a5270b17f540f6da06e"
end
resource "azure-mgmt-advisor" do
url "https://files.pythonhosted.org/packages/f2/fb/bca29d83a2062c7d977742189195d669fd5983017fddb464c90f07adaac0/azure-mgmt-advisor-2.0.1.zip"
sha256 "1929d6d5ba49d055fdc806e981b93cf75ea42ba35f78222aaf42d8dcf29d4ef3"
end
resource "azure-mgmt-apimanagement" do
url "https://files.pythonhosted.org/packages/ec/bf/8b960e78095793b60f4c56e0f9979436250e22e12fca95344a319f1a593e/azure-mgmt-apimanagement-0.1.0.zip"
sha256 "5d45d3438c6a11bae6bb8d4d5173cdb44b85683695f9f3433f22f45aecc47819"
end
resource "azure-mgmt-appconfiguration" do
url "https://files.pythonhosted.org/packages/05/ab/e62ff2d9728717ec41554f41f8376b139074eba066049edd33cbc58e37bf/azure-mgmt-appconfiguration-0.3.0.zip"
sha256 "3a6045bec1f57aeaa9498c7dfef628506a2fb1a8b628c9be68e733de661cf4c5"
end
resource "azure-mgmt-applicationinsights" do
url "https://files.pythonhosted.org/packages/83/ad/27c3e2c618c08ea451a80d6a0dc5b73b8c8c2392706909f297c37389766f/azure-mgmt-applicationinsights-0.1.1.zip"
sha256 "f10229eb9e3e9d0ad20188b8d14d67055e86f3815b43b75eedf96b654bee2a9b"
end
resource "azure-mgmt-authorization" do
url "https://files.pythonhosted.org/packages/14/d2/9a6cf1dd65feaddf43f30ddd89bce7da74ced856d459b11a6a1d5ada0f4e/azure-mgmt-authorization-0.52.0.zip"
sha256 "16a618c4357c11e96de376856c396f09e76a56473920cdf7a66735fabaa2a70c"
end
resource "azure-mgmt-batch" do
url "https://files.pythonhosted.org/packages/7b/e5/4457999b46dc09512e7920c0f527838afc464f20afee4ea2b4cedf66e25a/azure-mgmt-batch-7.0.0.zip"
sha256 "16c5b652b439b1a0a20366558f5c06858a3052d50b16a470bb80cd30f97abca1"
end
resource "azure-mgmt-batchai" do
url "https://files.pythonhosted.org/packages/fa/7f/0a9e5aa22ea91db0771c267c4815396516177702a4a4eea389eed7af47dd/azure-mgmt-batchai-2.0.0.zip"
sha256 "f1870b0f97d5001cdb66208e5a236c9717a0ed18b34dbfdb238a828f3ca2a683"
end
resource "azure-mgmt-billing" do
url "https://files.pythonhosted.org/packages/24/35/3b9da47363a300203c324b572a1ae3c096dc031905d582d5a27bd59a8d4e/azure-mgmt-billing-0.2.0.zip"
sha256 "85f73bb3808a7d0d2543307e8f41e5b90a170ad6eeedd54fe7fcaac61b5b22d2"
end
resource "azure-mgmt-botservice" do
url "https://files.pythonhosted.org/packages/eb/8e/f523bf5c10abd10c945d0911a6988b9ee347464939d8b9cf769721bdbcb3/azure-mgmt-botservice-0.2.0.zip"
sha256 "b21d8858e69aa16d25b908c40116a1f773c127ec4dd602cbb8542ebf39a55d83"
end
resource "azure-mgmt-cdn" do
url "https://files.pythonhosted.org/packages/a4/0f/203e44ea1aab1368b73bd70438945ee5e3493405db38a74d74bb1420b498/azure-mgmt-cdn-3.1.0.zip"
sha256 "0cdbe0914aec544884ef681e31950efa548d9bec6d6dc354e00c3dbdab9e76e3"
end
resource "azure-mgmt-cognitiveservices" do
url "https://files.pythonhosted.org/packages/02/9c/747b7e7f5286a0498d99cc9445a6d96a87d52411db54193fc8703eb0b90c/azure-mgmt-cognitiveservices-5.0.0.zip"
sha256 "06245a7200bd1ed8a7a8ad3bce282011d6ba2faaae847f337cafd53d5d1efbd4"
end
resource "azure-mgmt-compute" do
url "https://files.pythonhosted.org/packages/29/f9/ab33d99f9d6474a61d80477099f2033661d04c7cd143e4d62a648c482b7b/azure-mgmt-compute-10.0.0.zip"
sha256 "cfdf35722d5d7ccee8d32a81f6734b210298dfaed10f7299efadf06ea7e96be8"
end
resource "azure-mgmt-consumption" do
url "https://files.pythonhosted.org/packages/8c/f0/e2d94b246e2dce71eff8d362836a1979f02b4185f5403a13e4fb26c07ccb/azure-mgmt-consumption-2.0.0.zip"
sha256 "9a85a89f30f224d261749be20b4616a0eb8948586f7f0f20573b8ea32f265189"
end
resource "azure-mgmt-containerinstance" do
url "https://files.pythonhosted.org/packages/95/3b/513362470b6b61fe2a5067e5426351a40fe8d6bd7197a5355d2957928b4d/azure-mgmt-containerinstance-1.5.0.zip"
sha256 "b055386f04ba8433112b0df7fcbc260b5208828d7bb8c057e760fe596aa7a8cd"
end
resource "azure-mgmt-containerregistry" do
url "https://files.pythonhosted.org/packages/4a/f5/1113c8f9f8d23f9e59e5c3ee0831607698371185e52e405cc9cfabc87ed9/azure-mgmt-containerregistry-3.0.0rc7.zip"
sha256 "b79ff461c22b901cdc58c26a8ff27cc74cdb3b10f9d1056cb7b01914febaeeaf"
end
resource "azure-mgmt-containerservice" do
url "https://files.pythonhosted.org/packages/d2/16/d9db0418a06044a954460173a8da0a9ded4446f45af0e14194204485ab45/azure-mgmt-containerservice-8.0.0.zip"
sha256 "8fa3d3ac8a88ad6fd25f87966c27049864780d88b7b946d06da310d945a8772a"
end
resource "azure-mgmt-cosmosdb" do
url "https://files.pythonhosted.org/packages/93/1d/7b2fff8a634fec10b810aee377b5f117857d66864081bcfe980afc6daaf2/azure-mgmt-cosmosdb-0.8.0.zip"
sha256 "4b7b59132992bdf5d4ad211ca8e416402bfca5d7a90a80c20989879cc5ea5345"
end
resource "azure-mgmt-datalake-analytics" do
url "https://files.pythonhosted.org/packages/f4/c6/6b273e3b7bc17c13ef85c0ebc6bf7bbd8289a46892ee5bef1f0859aff11d/azure-mgmt-datalake-analytics-0.2.1.zip"
sha256 "4c7960d094f5847d9a456c18b8a3c8e60c428e3080a3905f1c943d81ba6351a4"
end
resource "azure-mgmt-datalake-nspkg" do
url "https://files.pythonhosted.org/packages/8e/0c/7b705f7c4a41bfeb0b6f3557f227c71aa3fa71555ed76ae934aa7db4b13e/azure-mgmt-datalake-nspkg-3.0.1.zip"
sha256 "deb192ba422f8b3ec272ce4e88736796f216f28ea5b03f28331d784b7a3f4880"
end
resource "azure-mgmt-datalake-store" do
url "https://files.pythonhosted.org/packages/00/13/037f0128bdfcd47253f69a3b4ca6a7ff7b673b35832bc48f7c74df24a9be/azure-mgmt-datalake-store-0.5.0.zip"
sha256 "9376d35495661d19f8acc5604f67b0bc59493b1835bbc480f9a1952f90017a4c"
end
resource "azure-mgmt-datamigration" do
url "https://files.pythonhosted.org/packages/69/0c/d876ab1ff8786deaf5bbf3b10c6823ae92c1d1ff576e262f4a6c681ffd39/azure-mgmt-datamigration-0.1.0.zip"
sha256 "e754928992743f54d999800a5e0679ee3e91d804d23a25f12c2e6f2f86cd05df"
end
resource "azure-mgmt-deploymentmanager" do
url "https://files.pythonhosted.org/packages/53/4a/2cb6f4dbd4f1510249ce5a93dfd78ba258fb562528f24d9621bf49f379cb/azure-mgmt-deploymentmanager-0.1.0.zip"
sha256 "398a6d38b658c4a790e1a6884921eb98a22a10d906340bb8c9fb3207d709703f"
end
resource "azure-mgmt-devtestlabs" do
url "https://files.pythonhosted.org/packages/1d/67/b3fad6c04240edf278d2afa71129b8a86f43803ee681c518beac5729e58b/azure-mgmt-devtestlabs-2.2.0.zip"
sha256 "d416a6d0883b0d33a63c524db6455ee90a01a72a9d8757653e446bf4d3f69796"
end
resource "azure-mgmt-dns" do
url "https://files.pythonhosted.org/packages/a2/0d/a36c123a1c978d39a1da747b9e8179f37441176d2a5276124d6d3312b2c4/azure-mgmt-dns-2.1.0.zip"
sha256 "3730b1b3f545a5aa43c0fff07418b362a789eb7d81286e2bed90ffef88bfa5d0"
end
resource "azure-mgmt-eventgrid" do
url "https://files.pythonhosted.org/packages/8d/e6/f805cd9731bdf21e4dba9a1b341b6ff3cd69747bdbd954164d8187af6deb/azure-mgmt-eventgrid-2.2.0.zip"
sha256 "c62058923ed20db35b04491cd1ad6a692f337244d05c377ecc14a53c06651cc3"
end
resource "azure-mgmt-eventhub" do
url "https://files.pythonhosted.org/packages/bc/f6/27bab11a5de855e95b639254c4abb83b27f9e64e352d00bcd80421b6b792/azure-mgmt-eventhub-2.6.0.zip"
sha256 "e86b20aa1f6f9c77a83d4af6e708823cc3593658bcea7b12228efc48e214d7da"
end
resource "azure-mgmt-hdinsight" do
url "https://files.pythonhosted.org/packages/8b/8a/e9e39cba508294a1a1a495bd395dbc0c230bb6a3529b6b54ac109e23a179/azure-mgmt-hdinsight-1.1.0.zip"
sha256 "76d277fb1a2fedc5181a7c738f058ebff8646bde5fb477cb53a43712166c4952"
end
resource "azure-mgmt-imagebuilder" do
url "https://files.pythonhosted.org/packages/0d/b2/d16fe769e12170e01b015bbef16f4a09e32c60dff2ba2818bcd7f02f056b/azure-mgmt-imagebuilder-0.2.1.zip"
sha256 "7e5efd9f641764884cbb6e1521c8e7ff67c5ff85ed367ebe8623dbd289df9457"
end
resource "azure-mgmt-iotcentral" do
url "https://files.pythonhosted.org/packages/cb/8b/878d6d5658cc224861f56d220834aeca794cc60c59e77bad643aa88c8ab7/azure-mgmt-iotcentral-1.0.0.zip"
sha256 "9aac88ed1f993965015f4e9986931fc08798e09d7b864928681a7cebff053de8"
end
resource "azure-mgmt-iothub" do
url "https://files.pythonhosted.org/packages/aa/7e/9b706f7de3eb7dbba3d133562bbddb008aca782f46e86f1bdad361d6e4ad/azure-mgmt-iothub-0.8.2.zip"
sha256 "388be0ed9f7ec8e7e450c7677aa4f823773a99df78ecac9ae4d36653420b7c70"
end
resource "azure-mgmt-iothubprovisioningservices" do
url "https://files.pythonhosted.org/packages/7a/9e/179a404d2b3d999cf2dbdbec51c849e92625706e8eff6bd6d02df3ad2ab7/azure-mgmt-iothubprovisioningservices-0.2.0.zip"
sha256 "8c37acfd1c33aba845f2e0302ef7266cad31cba503cc990a48684659acb7b91d"
end
resource "azure-mgmt-keyvault" do
url "https://files.pythonhosted.org/packages/ee/51/49aa83bc983020d69807ce5458d70009bff211e9f6e4f6bb081755e82af8/azure-mgmt-keyvault-1.1.0.zip"
sha256 "05a15327a922441d2ba32add50a35c7f1b9225727cbdd3eeb98bc656e4684099"
end
resource "azure-mgmt-kusto" do
url "https://files.pythonhosted.org/packages/0d/79/887c8f71d7ebd87e4f2359f6726a0a881f1c9369167bf075bf22ba39751c/azure-mgmt-kusto-0.3.0.zip"
sha256 "9eb8b7781fd4410ee9e207cd0c3983baf9e58414b5b4a18849d09856e36bacde"
end
resource "azure-mgmt-loganalytics" do
url "https://files.pythonhosted.org/packages/93/e2/6b47cc232357b05d0c8c788d6bbece67428ea997ba29d50e5cd90c1bd104/azure-mgmt-loganalytics-0.2.0.zip"
sha256 "c7315ff0ee4d618fb38dca68548ef4023a7a20ce00efe27eb2105a5426237d86"
end
resource "azure-mgmt-managedservices" do
url "https://files.pythonhosted.org/packages/f8/db/faab14079c628202d771a2bc33016326de6d194d1460fd8e531a59664371/azure-mgmt-managedservices-1.0.0.zip"
sha256 "fed8399fc6773aada37c1d0496a46f59410d77c9494d0ca5967c531c3376ad19"
end
resource "azure-mgmt-managementgroups" do
url "https://files.pythonhosted.org/packages/f2/03/30442b6025da7a730b24b5d208119740382e2c5135ec0b96a1003b3c86fe/azure-mgmt-managementgroups-0.2.0.zip"
sha256 "3d5237947458dc94b4a392141174b1c1258d26611241ee104e9006d1d798f682"
end
resource "azure-mgmt-maps" do
url "https://files.pythonhosted.org/packages/58/99/735fc6f274d2f2a493071b4bc3e6ec2bc3d0d6caf1425eb903647785532c/azure-mgmt-maps-0.1.0.zip"
sha256 "c120e210bb61768da29de24d28b82f8d42ae24e52396eb6569b499709e22f006"
end
resource "azure-mgmt-marketplaceordering" do
url "https://files.pythonhosted.org/packages/e9/90/1bf9d50614acee60ba5447bc9db6d63930f1559182fa8266ccac60a96dd3/azure-mgmt-marketplaceordering-0.2.1.zip"
sha256 "dc765cde7ec03efe456438c85c6207c2f77775a8ce8a7adb19b0df5c5dc513c2"
end
resource "azure-mgmt-media" do
url "https://files.pythonhosted.org/packages/77/41/b410828ce54066c2686d4c76da8178741d7a6638d9f07dd75f208e27a0ee/azure-mgmt-media-1.1.1.zip"
sha256 "5d0c6b3a0f882dde8ae3d42467f03ea6c4e3f62613936087d54c67e6f504939b"
end
resource "azure-mgmt-monitor" do
url "https://files.pythonhosted.org/packages/ea/bb/ad413dbbcdb6f37ca9f674cd94ecad2c86b24e8a8f1f5f3d06b23ded6beb/azure-mgmt-monitor-0.7.0.zip"
sha256 "8216ab3ec57994303c47a0977e853a8a3fff4778e08dc3575e669522cadcf9de"
end
resource "azure-mgmt-msi" do
url "https://files.pythonhosted.org/packages/da/6f/60f92469f93e2820949f967b8c722fe0c04f03e4cc9a6332ffaf5e9f405b/azure-mgmt-msi-0.2.0.zip"
sha256 "8622bc9a164169a0113728ebe7fd43a88189708ce6e10d4507247d6907987167"
end
resource "azure-mgmt-netapp" do
url "https://files.pythonhosted.org/packages/77/5c/2bf12596d381c73e32cdd622b666d192380c327b138be0df524493596c35/azure-mgmt-netapp-0.7.0.zip"
sha256 "239bf4dde9990681f76c23330c5144ddcd6f84a9fd3c8e25f95ef8b2ecbcc431"
end
resource "azure-mgmt-network" do
url "https://files.pythonhosted.org/packages/2a/3f/a8a9c85e12c9179a67f554daa835de40a4669d07d730b22add9c8c79a2fa/azure-mgmt-network-7.0.0.zip"
sha256 "32ce90691b96ecdaa974ecb4d35063377c8fd21fd05984164507b63113f3456b"
end
resource "azure-mgmt-nspkg" do
url "https://files.pythonhosted.org/packages/c4/d4/a9a140ee15abd8b0a542c0d31b7212acf173582c10323b09380c79a1178b/azure-mgmt-nspkg-3.0.2.zip"
sha256 "8b2287f671529505b296005e6de9150b074344c2c7d1c805b3f053d081d58c52"
end
resource "azure-mgmt-policyinsights" do
url "https://files.pythonhosted.org/packages/e4/48/d9b552a1c5b169610d139e87cb201afff56eb141c5656f24a1c05484323b/azure-mgmt-policyinsights-0.3.1.zip"
sha256 "b27f5ac367b69e225ab02fa2d1ea20cbbfe948ff43b0af4698cd8cbde0063908"
end
resource "azure-mgmt-privatedns" do
url "https://files.pythonhosted.org/packages/1b/88/bd382d401e58b87df086f0218af94e7defd78f7cb300427eee3d25a9d7a1/azure-mgmt-privatedns-0.1.0.zip"
sha256 "d29cfd8cec806e06673d9382d3f5766fc65d9a9de75b424705094a34a7db8d23"
end
resource "azure-mgmt-rdbms" do
url "https://files.pythonhosted.org/packages/62/3a/e55b068d95104c7096c84cb53b0680b28a5fbc7aa07a03717ede5a0f890b/azure-mgmt-rdbms-1.9.0.zip"
sha256 "d9d4090010cbb64176ce094603f1298af7368ddb3a0cb606d5e972331285216d"
end
resource "azure-mgmt-recoveryservices" do
url "https://files.pythonhosted.org/packages/7c/67/9857c8741d0ccbc4bd22af3350df974631c2b04a62e4fcbdb704bc05dae3/azure-mgmt-recoveryservices-0.4.0.zip"
sha256 "e1e794760232239f8a9328d5de1740565ff70d1612a2921c9609746ba5671e6c"
end
resource "azure-mgmt-recoveryservicesbackup" do
url "https://files.pythonhosted.org/packages/d4/99/ac5caee2dfd1e2f85d8768127184b86fee3f01a3e4a8cf8d3489fdddaa68/azure-mgmt-recoveryservicesbackup-0.4.0.zip"
sha256 "511cc92f2a12fd233c920f64ee4ea2d2d39adea7393145bbfa7e881bdbdf5a7f"
end
resource "azure-mgmt-redis" do
url "https://files.pythonhosted.org/packages/d0/d2/5f42ae10ee738da5cfaffa082fdd1ef07e1ccd546d72953f87f15f878e57/azure-mgmt-redis-6.0.0.zip"
sha256 "db999e104edeee3a13a8ceb1881e15196fe03a02635e0e20855eb52c1e2ecca1"
end
resource "azure-mgmt-relay" do
url "https://files.pythonhosted.org/packages/df/76/f4673094df467c1198dfd944f8a800a25d0ed7f4bbd7c73e9e2605874576/azure-mgmt-relay-0.1.0.zip"
sha256 "d9f987cf2998b8a354f331b2a71082c049193f1e1cd345812e14b9b821365acb"
end
resource "azure-mgmt-reservations" do
url "https://files.pythonhosted.org/packages/97/0a/eb194a08fd35bda1e6b27ef227241ac36c8abb3bf3a201772c2777a74caf/azure-mgmt-reservations-0.6.0.zip"
sha256 "83a9a70d6fd78b8b3e92ca64bbc1fde8d1bc5e2efea54076052c51c946b4cc9b"
end
resource "azure-mgmt-resource" do
url "https://files.pythonhosted.org/packages/46/3d/b9be31f106e3f042e90a341287c03971d293d6cec61a5b12217a3d038567/azure-mgmt-resource-4.0.0.zip"
sha256 "2b909f137469c7bfa541554c3d22eb918e9191c07667a42f2c6fc684e24ac83f"
end
resource "azure-mgmt-search" do
url "https://files.pythonhosted.org/packages/e7/9d/6aae72f83c1a30d6b0fb9b7892ddf150b8e6bc0f01a82e53c675877944aa/azure-mgmt-search-2.1.0.zip"
sha256 "92a40a1a7a9e3a82b6fa302042799e8d5a67d3996c20835af72afc14f1610501"
end
resource "azure-mgmt-security" do
url "https://files.pythonhosted.org/packages/30/14/191a5a9887eacb94eb314a6b4124e9b4d563c8736061edea8bb32ca158fb/azure-mgmt-security-0.1.0.zip"
sha256 "1d42ced0690d10ebe5f83bf20be835e1a424d81463e59857cc402f218e3164b1"
end
resource "azure-mgmt-servicebus" do
url "https://files.pythonhosted.org/packages/8f/bd/fdb9db085a1590ef13e683f3aa6462c6fe70fb1e61e69212017abe58b419/azure-mgmt-servicebus-0.6.0.zip"
sha256 "f20920b8fb119ef4abeda4d2dac765a4fc48cd0bcf30c27f8c4cc6d890bc08b1"
end
resource "azure-mgmt-servicefabric" do
url "https://files.pythonhosted.org/packages/13/cd/996d5887c207c175eb1be0936b994db3382d0e2998e58baaf5255e53ddc2/azure-mgmt-servicefabric-0.2.0.zip"
sha256 "b2bf2279b8ff8450c35e78e226231655021482fdbda27db09975ebfc983398ad"
end
resource "azure-mgmt-signalr" do
url "https://files.pythonhosted.org/packages/6b/70/26d29a7a31a4c95ac2de05358c2beec0c033033de23bc0ef6452af3fb626/azure-mgmt-signalr-0.3.0.zip"
sha256 "0a6c876434f1a51bfbf1c4cea3f7167329f4ea86c12ba8ce8123d8f4b9896221"
end
resource "azure-mgmt-sql" do
url "https://files.pythonhosted.org/packages/03/78/acd3633c206dd4f7a7500c9b0f0da6f8ae945b866ccb58696fbbd8f481ca/azure-mgmt-sql-0.15.0.zip"
sha256 "89f93b26c044fafe4460a9aae7981dbd10f0a1fd9b91d160a362dbaf72476563"
end
resource "azure-mgmt-sqlvirtualmachine" do
url "https://files.pythonhosted.org/packages/c1/3c/317e8717336e963dcb40997803a5790ae465738cdc56ac72129c4458dbcc/azure-mgmt-sqlvirtualmachine-0.4.0.zip"
sha256 "95718425df713e87700e21207f2695ea26b91fe2ddd89918ca7c76bfe58cb5cb"
end
resource "azure-mgmt-storage" do
url "https://files.pythonhosted.org/packages/c4/9f/aea6c2350818ada490fa6ed82e385711aa5fbdcfb67d74bc700b3ed215ae/azure-mgmt-storage-5.0.0.zip"
sha256 "dd27ffc8c763932100dff1cda3d5a72dc2348686093732747f68396b1dd3fabf"
end
resource "azure-mgmt-trafficmanager" do
url "https://files.pythonhosted.org/packages/14/98/6fb3bc67bb862b7bac2ea43108aa1648f72c8fa63de22ab1e58278224c43/azure-mgmt-trafficmanager-0.51.0.zip"
sha256 "fc8ae77022cfe52fda4379a2f31e0b857574d536e41291a7b569b5c0f4104186"
end
resource "azure-mgmt-web" do
url "https://files.pythonhosted.org/packages/d0/08/4e0b371a3f63c00db90d93b99ee019a212f64bb570ce31758dfe436ec08e/azure-mgmt-web-0.42.0.zip"
sha256 "b6ddc3020cd44d1cc64331c9f4fe71478c83a5c911e3a3cf67be70a55204e46e"
end
resource "azure-multiapi-storage" do
url "https://files.pythonhosted.org/packages/7a/5f/2e01d7f11fd32ebc1f15473d5e2b80b3dc2d9dc3da96bb0f59a8e3415a17/azure-multiapi-storage-0.2.4.tar.gz"
sha256 "2f5e9d3aaef82c6b0e5a1e735bd02548063be6f4d2951ad94a0d9bde08bb0a7f"
end
resource "azure-nspkg" do
url "https://files.pythonhosted.org/packages/39/31/b24f494eca22e0389ac2e81b1b734453f187b69c95f039aa202f6f798b84/azure-nspkg-3.0.2.zip"
sha256 "e7d3cea6af63e667d87ba1ca4f8cd7cb4dfca678e4c55fc1cedb320760e39dd0"
end
resource "azure-storage-blob" do
url "https://files.pythonhosted.org/packages/52/2e/21691005508ab03b88b99ad13b52275543623be9acfc96f4ce162b6a35e3/azure-storage-blob-1.5.0.tar.gz"
sha256 "f187a878e7a191f4e098159904f72b4146cf70e1aabaf6484ab4ba72fc6f252c"
end
resource "azure-storage-common" do
url "https://files.pythonhosted.org/packages/ae/45/0d21c1543afd3a97c416298368e06df158dfb4740da0e646a99dab6080de/azure-storage-common-1.4.2.tar.gz"
sha256 "4ec87c7537d457ec95252e0e46477e2c1ccf33774ffefd05d8544682cb0ae401"
end
resource "bcrypt" do
url "https://files.pythonhosted.org/packages/fa/aa/025a3ab62469b5167bc397837c9ffc486c42a97ef12ceaa6699d8f5a5416/bcrypt-3.1.7.tar.gz"
sha256 "0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/62/85/7585750fd65599e88df0fed59c74f5075d4ea2fe611deceb95dd1c2fb25b/certifi-2019.9.11.tar.gz"
sha256 "e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/2d/bf/960e5a422db3ac1a5e612cb35ca436c3fc985ed4b7ed13a1b4879006f450/cffi-1.13.2.tar.gz"
sha256 "599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346"
end
resource "chardet" do
url "https://files.pythonhosted.org/packages/fc/bb/a5768c230f9ddb03acc9ef3f0d4a3cf93462473795d18e9535498c8f929d/chardet-3.0.4.tar.gz"
sha256 "84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"
end
resource "colorama" do
url "https://files.pythonhosted.org/packages/76/53/e785891dce0e2f2b9f4b4ff5bc6062a53332ed28833c7afede841f46a5db/colorama-0.4.1.tar.gz"
sha256 "05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/be/60/da377e1bed002716fb2d5d1d1cab720f298cb33ecff7bf7adea72788e4e4/cryptography-2.8.tar.gz"
sha256 "3cda1f0ed8747339bbdf71b9f38ca74c7b592f24f65cdb3ab3765e4b02871651"
end
resource "fabric" do
url "https://files.pythonhosted.org/packages/f9/14/e81b9425d450de0f34d8c49b46133aa5554a7f4f1f1f2e7857e66dfa270b/fabric-2.5.0.tar.gz"
sha256 "24842d7d51556adcabd885ac3cf5e1df73fc622a1708bf3667bf5927576cdfa6"
end
resource "humanfriendly" do
url "https://files.pythonhosted.org/packages/26/71/e7daf57e819a70228568ff5395fdbc4de81b63067b93167e07825fcf0bcf/humanfriendly-4.18.tar.gz"
sha256 "33ee8ceb63f1db61cce8b5c800c531e1a61023ac5488ccde2ba574a85be00a85"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/ad/13/eb56951b6f7950cadb579ca166e448ba77f9d24efc03edd7e55fa57d04b7/idna-2.8.tar.gz"
sha256 "c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"
end
resource "invoke" do
url "https://files.pythonhosted.org/packages/bc/c6/2b1d2ec1b30e570c548fa841ab729ddb83bddf08100082d263c080faa5c3/invoke-1.3.0.tar.gz"
sha256 "c52274d2e8a6d64ef0d61093e1983268ea1fc0cd13facb9448c4ef0c9a7ac7da"
end
resource "isodate" do
url "https://files.pythonhosted.org/packages/b1/80/fb8c13a4cd38eb5021dc3741a9e588e4d1de88d895c1910c6fc8a08b7a70/isodate-0.6.0.tar.gz"
sha256 "2e364a3d5759479cdb2d37cce6b9376ea504db2ff90252a2e5b7cc89cc9ff2d8"
end
resource "javaproperties" do
url "https://files.pythonhosted.org/packages/db/43/58b89453727acdcf07298fe0f037e45b3988e5dcc78af5dce6881d0d2c5e/javaproperties-0.5.1.tar.gz"
sha256 "2b0237b054af4d24c74f54734b7d997ca040209a1820e96fb4a82625f7bd40cf"
end
resource "Jinja2" do
url "https://files.pythonhosted.org/packages/7b/db/1d037ccd626d05a7a47a1b81ea73775614af83c2b3e53d86a0bb41d8d799/Jinja2-2.10.3.tar.gz"
sha256 "9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de"
end
resource "jmespath" do
url "https://files.pythonhosted.org/packages/2c/30/f0162d3d83e398c7a3b70c91eef61d409dea205fb4dc2b47d335f429de32/jmespath-0.9.4.tar.gz"
sha256 "bde2aef6f44302dfb30320115b17d030798de8c4110e28d5cf6cf91a7a31074c"
end
resource "jsmin" do
url "https://files.pythonhosted.org/packages/17/73/615d1267a82ed26cd7c124108c3c61169d8e40c36d393883eaee3a561852/jsmin-2.2.2.tar.gz"
sha256 "b6df99b2cd1c75d9d342e4335b535789b8da9107ec748212706ef7bbe5c2553b"
end
resource "jsondiff" do
url "https://files.pythonhosted.org/packages/64/5c/2b4b0ae4d42cb1b0b1a89ab1c4d9fe02c72461e33a5d02009aa700574943/jsondiff-1.2.0.tar.gz"
sha256 "34941bc431d10aa15828afe1cbb644977a114e75eef6cc74fb58951312326303"
end
resource "knack" do
url "https://files.pythonhosted.org/packages/ef/a7/12ce7ee160923677d15f1d85f60a2615c848e3157d5dd7f99494ef5328f6/knack-0.6.3.tar.gz"
sha256 "b1ac92669641b902e1aef97138666a21b8852f65d83cbde03eb9ddebf82ce121"
end
resource "MarkupSafe" do
url "https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz"
sha256 "29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"
end
resource "mock" do
url "https://files.pythonhosted.org/packages/0c/53/014354fc93c591ccc4abff12c473ad565a2eb24dcd82490fae33dbf2539f/mock-2.0.0.tar.gz"
sha256 "b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba"
end
resource "msrest" do
url "https://files.pythonhosted.org/packages/08/5d/e22f815bc27ae7f9ae9eb22c0cd35f103ac481aa7d0cf644b7ea10f368f3/msrest-0.6.10.tar.gz"
sha256 "f5153bfe60ee757725816aedaa0772cbfe0bddb52cd2d6db4cb8b4c3c6c6f928"
end
resource "msrestazure" do
url "https://files.pythonhosted.org/packages/49/47/2d0c09401619b74a04eff1cdcbc56066aaa9cc8d5ff8b4e158a4952f27ff/msrestazure-0.6.2.tar.gz"
sha256 "fecb6a72a3eb5483e4deff38210d26ae42d3f6d488a7a275bd2423a1a014b22c"
end
resource "oauthlib" do
url "https://files.pythonhosted.org/packages/fc/c7/829c73c64d3749da7811c06319458e47f3461944da9d98bb4df1cb1598c2/oauthlib-3.1.0.tar.gz"
sha256 "bee41cc35fcca6e988463cacc3bcb8a96224f470ca547e697b604cc697b2f889"
end
resource "paramiko" do
url "https://files.pythonhosted.org/packages/54/68/dde7919279d4ecdd1607a7eb425a2874ccd49a73a5a71f8aa4f0102d3eb8/paramiko-2.6.0.tar.gz"
sha256 "f4b2edfa0d226b70bd4ca31ea7e389325990283da23465d572ed1f70a7583041"
end
resource "pbr" do
url "https://files.pythonhosted.org/packages/99/f1/7807d3409c79905a907f1c616d910c921b2a8e73c17b2969930318f44777/pbr-5.4.3.tar.gz"
sha256 "2c8e420cd4ed4cec4e7999ee47409e876af575d4c35a45840d59e8b5f3155ab8"
end
resource "portalocker" do
url "https://files.pythonhosted.org/packages/26/2b/b9388a8747452c5e387d39424480b9833bf6dad0152d184dbc45b600be76/portalocker-1.5.2.tar.gz"
sha256 "dac62e53e5670cb40d2ee4cdc785e6b829665932c3ee75307ad677cf5f7d2e9f"
end
resource "psutil" do
url "https://files.pythonhosted.org/packages/03/9a/95c4b3d0424426e5fd94b5302ff74cea44d5d4f53466e1228ac8e73e14b4/psutil-5.6.5.tar.gz"
sha256 "d051532ac944f1be0179e0506f6889833cf96e466262523e57a871de65a15147"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz"
sha256 "a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
end
resource "pydocumentdb" do
url "https://files.pythonhosted.org/packages/cf/53/310ef5bd836e54f8a8c3d4da8c9a8c9b21c6bb362665e018eb27c41a1518/pydocumentdb-2.3.3.tar.gz"
sha256 "77c8da2b50920442da42f13b2cb9ff0a4062a982b26d9381ba30b12bcc1b97b9"
end
resource "Pygments" do
url "https://files.pythonhosted.org/packages/7e/ae/26808275fc76bf2832deb10d3a3ed3107bc4de01b85dcccbe525f2cd6d1e/Pygments-2.4.2.tar.gz"
sha256 "881c4c157e45f30af185c1ffe8d549d48ac9127433f2c380c24b84572ad66297"
end
resource "PyJWT" do
url "https://files.pythonhosted.org/packages/2f/38/ff37a24c0243c5f45f5798bd120c0f873eeed073994133c084e1cf13b95c/PyJWT-1.7.1.tar.gz"
sha256 "8d59a976fb773f3e6a39c85636357c4f0e242707394cadadd9814f5cbaa20e96"
end
resource "PyNaCl" do
url "https://files.pythonhosted.org/packages/61/ab/2ac6dea8489fa713e2b4c6c5b549cc962dd4a842b5998d9e80cf8440b7cd/PyNaCl-1.3.0.tar.gz"
sha256 "0c6100edd16fefd1557da078c7a31e7b7d7a52ce39fdca2bec29d4f7b6e7600c"
end
resource "pyOpenSSL" do
url "https://files.pythonhosted.org/packages/0d/1d/6cc4bd4e79f78be6640fab268555a11af48474fac9df187c3361a1d1d2f0/pyOpenSSL-19.1.0.tar.gz"
sha256 "9a24494b2602aaf402be5c9e30a0b82d4a5c67528fe8fb475e3f3bc00dd69507"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/be/ed/5bbc91f03fa4c839c4c7360375da77f9659af5f7086b7a7bdda65771c8e0/python-dateutil-2.8.1.tar.gz"
sha256 "73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"
end
resource "pytz" do
url "https://files.pythonhosted.org/packages/df/d5/3e3ff673e8f3096921b3f1b79ce04b832e0100b4741573154b72b756a681/pytz-2019.1.tar.gz"
sha256 "d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141"
end
resource "PyYAML" do
url "https://files.pythonhosted.org/packages/e3/e8/b3212641ee2718d556df0f23f78de8303f068fe29cdaa7a91018849582fe/PyYAML-5.1.2.tar.gz"
sha256 "01adf0b6c6f61bd11af6e10ca52b7d4057dd0be0343eb9283c878cf3af56aee4"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/01/62/ddcf76d1d19885e8579acb1b1df26a852b03472c0e46d2b959a714c90608/requests-2.22.0.tar.gz"
sha256 "11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4"
end
resource "requests-oauthlib" do
url "https://files.pythonhosted.org/packages/23/eb/68fc8fa86e0f5789832f275c8289257d8dc44dbe93fce7ff819112b9df8f/requests-oauthlib-1.3.0.tar.gz"
sha256 "b4261601a71fd721a8bd6d7aa1cc1d6a8a93b4a9f5e96626f8e4d91e8beeaa6a"
end
resource "scp" do
url "https://files.pythonhosted.org/packages/05/e0/ac4169e773e12a08d941ca3c006cb8c91bee9d6d80328a15af850b5e7480/scp-0.13.2.tar.gz"
sha256 "ef9d6e67c0331485d3db146bf9ee9baff8a48f3eb0e6c08276a8584b13bf34b3"
end
resource "six" do
url "https://files.pythonhosted.org/packages/94/3e/edcf6fef41d89187df7e38e868b2dd2182677922b600e880baad7749c865/six-1.13.0.tar.gz"
sha256 "30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"
end
resource "sshtunnel" do
url "https://files.pythonhosted.org/packages/c5/5c/4b320d7ec4b0d5d4d6df1fdf66a5799625b3623d0ce4efe81719c6f8dfb3/sshtunnel-0.1.5.tar.gz"
sha256 "c813fdcda8e81c3936ffeac47cb69cfb2d1f5e77ad0de656c6dab56aeebd9249"
end
resource "tabulate" do
url "https://files.pythonhosted.org/packages/c4/41/523f6a05e6dc3329a5660f6a81254c6cd87e5cfb5b7482bae3391d86ec3a/tabulate-0.8.6.tar.gz"
sha256 "5470cc6687a091c7042cee89b2946d9235fe9f6d49c193a4ae2ac7bf386737c8"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/ad/fc/54d62fa4fc6e675678f9519e677dfc29b8964278d75333cf142892caf015/urllib3-1.25.7.tar.gz"
sha256 "f3c5fd51747d450d4dcf6f923c81f78f811aab8205fda64b0aba34a4e48b0745"
end
resource "vsts" do
url "https://files.pythonhosted.org/packages/ce/fa/4405cdb2a6b0476a94b24254cdfb1df7ff43138a91ccc79cd6fc877177af/vsts-0.1.25.tar.gz"
sha256 "da179160121f5b38be061dbff29cd2b60d5d029b2207102454d77a7114e64f97"
end
resource "vsts-cd-manager" do
url "https://files.pythonhosted.org/packages/fc/cd/29c798a92d5f7a718711e4beace03612c93ad7ec2121aea606d8abae38ee/vsts-cd-manager-1.0.2.tar.gz"
sha256 "0bb09059cd553e1c206e92ef324cb0dcf92334846d646c44c684f6256b86447b"
end
resource "websocket_client" do
url "https://files.pythonhosted.org/packages/c5/01/8c9c7de6c46f88e70b5a3276c791a2be82ae83d8e0d0cc030525ee2866fd/websocket_client-0.56.0.tar.gz"
sha256 "1fd5520878b68b84b5748bb30e592b10d0a91529d5383f74f4964e72b297fd3a"
end
resource "wheel" do
url "https://files.pythonhosted.org/packages/fa/b4/f9886517624a4dcb81a1d766f68034344b7565db69f13d52697222daeb72/wheel-0.30.0.tar.gz"
sha256 "9515fe0a94e823fd90b08d22de45d7bde57c90edce705b22f5e1ecf7e1b653c8"
end
resource "xmltodict" do
url "https://files.pythonhosted.org/packages/58/40/0d783e14112e064127063fbf5d1fe1351723e5dfe9d6daad346a305f6c49/xmltodict-0.12.0.tar.gz"
sha256 "50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"
end
def install
# Work around Xcode 11 clang bug
# https://code.videolan.org/videolan/libbluray/issues/20
ENV.append_to_cflags "-fno-stack-check" if DevelopmentTools.clang_build_version >= 1010
venv = virtualenv_create(libexec, "python3")
venv.pip_install resources
# Get the CLI components we'll install
components = [
buildpath/"src/azure-cli",
buildpath/"src/azure-cli-telemetry",
buildpath/"src/azure-cli-core",
buildpath/"src/azure-cli-nspkg",
buildpath/"src/azure-cli-command_modules-nspkg",
]
# Install CLI
components.each do |item|
cd item do
venv.pip_install item
end
end
(bin/"az").write <<~EOS
#!/usr/bin/env bash
#{libexec}/bin/python -m azure.cli \"$@\"
EOS
bash_completion.install "az.completion" => "az"
end
test do
json_text = shell_output("#{bin}/az cloud show --name AzureCloud")
azure_cloud = JSON.parse(json_text)
assert_equal azure_cloud["name"], "AzureCloud"
assert_equal azure_cloud["endpoints"]["management"], "https://management.core.windows.net/"
assert_equal azure_cloud["endpoints"]["resourceManager"], "https://management.azure.com/"
end
end
| 50.646132 | 164 | 0.826313 |
edc2c937e5c76da098ecc58f2bdc0b863fb9ff35 | 2,456 | require 'rails_helper'
RSpec.describe Course, type: :model do
let(:course) {
Course.create(
name: "Puppy Beginner",
length: 8,
cost: 250
)
}
it "is valid with a name, length, and cost" do
expect(course).to be_valid
end
it "has many tricks through trick_course" do
trick = Trick.create(name: "sit", description: "Dog sits down", difficulty_rating: 1)
course.tricks << trick
expect(course.tricks.first).to eq(trick)
end
it "has many training_sessions" do
trainer = Trainer.create(username: "WD_Train", first_name: "Walt", last_name: "Disney", phone_number: "555-555-5556", email: "[email protected]", certification: "Top Dog Academy", password: "password")
training_session = TrainingSession.create(starts_at: Time.current, location: "Fischer Park", trainer_id: trainer.id, course_id: course.id)
expect(course.training_sessions.first).to eq(training_session)
end
it "has many dogs through training sessions" do
trainer = Trainer.create(username: "WD_Train", first_name: "Walt", last_name: "Disney", phone_number: "555-555-5556", email: "[email protected]", certification: "Top Dog Academy", password: "password")
training_session = TrainingSession.create(starts_at: Time.current, location: "Fischer Park", trainer_id: trainer.id, course_id: course.id)
user = User.create(username: "Alex", first_name: "Alex", last_name: "Rodriguez", phone_number: "123-456-7890", email: "[email protected]", password: "password")
dog = Dog.create(name: "Pitty", breed: "pitbull", age: 2, shots: true, last_shot_date: Time.current, user_id: user.id)
training_session.dogs << dog
expect(course.dogs.first).to eq(dog)
end
it "has many users through dogs" do
trainer = Trainer.create(username: "WD_Train", first_name: "Walt", last_name: "Disney", phone_number: "555-555-5556", email: "[email protected]", certification: "Top Dog Academy", password: "password")
training_session = TrainingSession.create(starts_at: Time.current, location: "Fischer Park", trainer_id: trainer.id, course_id: course.id)
user = User.create(username: "Alex", first_name: "Alex", last_name: "Rodriguez", phone_number: "123-456-7890", email: "[email protected]", password: "password")
dog = Dog.create(name: "Pitty", breed: "pitbull", age: 2, shots: true, last_shot_date: Time.current, user_id: user.id)
training_session.dogs << dog
expect(course.dogs.first.user).to eq(user)
end
end
| 52.255319 | 203 | 0.710505 |
1aa0a1e0e93a7044ace06c7167d7e63a98fc078c | 96 | module Accela
class SalutationTranslator < Translator
include SimpleTranslation
end
end
| 16 | 41 | 0.802083 |
6a47a3210f5bf056627320e1a2bd2d640d80c441 | 926 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require "arrow"
require "gandiva/version"
require "gandiva/loader"
module Gandiva
class Error < StandardError
end
Loader.load
end
| 30.866667 | 62 | 0.768898 |
2820cfae19df484235b246da16aab8fd392da7ab | 564 | module Admin
class ArticlesController < Admin::ApplicationController
# To customize the behavior of this controller,
# simply overwrite any of the RESTful actions. For example:
#
# def index
# super
# @resources = Article.all.paginate(10, params[:page])
# end
# Define a custom finder by overriding the `find_resource` method:
# def find_resource(param)
# Article.find_by!(slug: param)
# end
# See https://administrate-docs.herokuapp.com/customizing_controller_actions
# for more information
end
end
| 28.2 | 80 | 0.691489 |
bfbc1c4a95c288fed8e78909fa944e49be68833f | 74 | #! /usr/bin/env ruby
require_relative 'core'
grexec('./hello-world.sh')
| 12.333333 | 26 | 0.689189 |
62fc438f20d45dc9f38192431690420a8a4d9870 | 1,396 | # Copyright:: 2016 Nordstrom, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
describe command('git --version') do
# git version 2.8.1.windows.1
its(:stdout) { should match(/git version \d+\.\d+\.\d+/) }
end
describe command('git config --get credential.helper') do
# verify git-credential-manager-for-windows is configured correctly
its(:stdout) { should match(/manager/) }
end
git_credential_mgr = File.join(
'$env:LOCALAPPDATA',
'Programs\Microsoft Git Credential Manager for Windows',
'git-credential-manager.exe'
)
describe command("& \"#{git_credential_mgr}\" version") do
its(:exit_status) { should eq(0) }
its(:stderr) { should eq '' } unless ENV['APPVEYOR']
its(:stderr) { should_not include('CommandNotFound') } if ENV['APPVEYOR']
end
# PoshGit
# describe powershell('test-path Function:\PoshGitPrompt') do
# its(:stdout) { should match(/^True\R/) }
# end
| 34.04878 | 75 | 0.722063 |
5d0434f9a246dc98c7151dd7d087dddc70d62201 | 1,110 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Mrw
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
| 41.111111 | 99 | 0.732432 |
d54f9371fbe8e4e605a99eee9c072a8772060d1f | 3,144 | require 'mime/types'
module API
# Projects API
class Branches < Grape::API
before { authenticate! }
before { authorize! :download_code, user_project }
resource :projects do
# Get a project repository branches
#
# Parameters:
# id (required) - The ID of a project
# Example Request:
# GET /projects/:id/repository/branches
get ":id/repository/branches" do
present user_project.repo.heads.sort_by(&:name), with: Entities::RepoObject, project: user_project
end
# Get a single branch
#
# Parameters:
# id (required) - The ID of a project
# branch (required) - The name of the branch
# Example Request:
# GET /projects/:id/repository/branches/:branch
get ':id/repository/branches/:branch', requirements: { branch: /.*/ } do
@branch = user_project.repo.heads.find { |item| item.name == params[:branch] }
not_found!("Branch does not exist") if @branch.nil?
present @branch, with: Entities::RepoObject, project: user_project
end
# Protect a single branch
#
# Parameters:
# id (required) - The ID of a project
# branch (required) - The name of the branch
# Example Request:
# PUT /projects/:id/repository/branches/:branch/protect
put ':id/repository/branches/:branch/protect',
requirements: { branch: /.*/ } do
authorize_admin_project
@branch = user_project.repository.find_branch(params[:branch])
not_found! unless @branch
protected_branch = user_project.protected_branches.find_by(name: @branch.name)
user_project.protected_branches.create(name: @branch.name) unless protected_branch
present @branch, with: Entities::RepoObject, project: user_project
end
# Unprotect a single branch
#
# Parameters:
# id (required) - The ID of a project
# branch (required) - The name of the branch
# Example Request:
# PUT /projects/:id/repository/branches/:branch/unprotect
put ':id/repository/branches/:branch/unprotect',
requirements: { branch: /.*/ } do
authorize_admin_project
@branch = user_project.repository.find_branch(params[:branch])
not_found! unless @branch
protected_branch = user_project.protected_branches.find_by(name: @branch.name)
protected_branch.destroy if protected_branch
present @branch, with: Entities::RepoObject, project: user_project
end
# Create branch
#
# Parameters:
# id (required) - The ID of a project
# branch_name (required) - The name of the branch
# ref (required) - Create branch from commit sha or existing branch
# Example Request:
# POST /projects/:id/repository/branches
post ":id/repository/branches" do
authorize_push_project
@branch = CreateBranchService.new.execute(user_project, params[:branch_name], params[:ref], current_user)
present @branch, with: Entities::RepoObject, project: user_project
end
end
end
end
| 34.933333 | 113 | 0.643448 |
3979f36757294eda698298161f11cf66dda9d8cc | 463 | def upgrade ta, td, a, d
unless a.key? "domain_specific_drivers"
a["domain_specific_drivers"] = ta["domain_specific_drivers"]
end
unless a.key? "domain_config_dir"
a["domain_config_dir"] = ta["domain_config_dir"]
end
return a, d
end
def downgrade ta, td, a, d
unless ta.key? "domain_specific_drivers"
a.delete("domain_specific_drivers")
end
unless ta.key? "domain_config_dir"
a.delete("domain_config_dir")
end
return a, d
end
| 23.15 | 64 | 0.712743 |
333f97604073e0ddb633ce73d4150e99a6b4d1c2 | 3,366 | # Copyright (c) 2009 ngmoco:)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require 'tests_helper'
class TestPlistReading < Test::Unit::TestCase
context "PList decoding" do
should "be able to open a basic string plist." do
assert_equal 'Just a String', RubyCF::PList.parse(File.read('test/test_assets/String.plist'))
end
should "be able to open a basic Number plist" do
assert_equal 1337, RubyCF::PList.parse_file('test/test_assets/Integer.plist')
assert_equal 1337.4, RubyCF::PList.parse_file(File.open('test/test_assets/Float.plist'))
end
should "be able to open a basic Date plist" do
# This isn't 100% accurate. If you need subsecond accuracy, cfdate.c needs to be reworked.
assert_equal Time.mktime(2008, "dec", 25).ctime, RubyCF::PList.parse(File.read('test/test_assets/Date.plist')).ctime
end
should "be able to open a basic Boolean plist" do
assert_equal true, RubyCF::PList.parse(File.read('test/test_assets/Boolean.plist'))
end
should "be able to open a Data plist" do
assert_equal RubyCF::Data.from_file('test/test_assets/photo.jpg'), RubyCF::PList.parse_file('test/test_assets/Data.plist')
end
should "be able to open a Dictionary plist" do
assert_equal({'STRING' => 'A', 'INT' => 2, 'BOOL' => false, 'FLOAT' => 10.1}, RubyCF::PList.parse(File.read('test/test_assets/Dictionary.plist')))
end
should "be able to open an Array plist" do
assert_equal([false, 10.1, 2, 'A'], RubyCF::PList.parse(File.read('test/test_assets/Array.plist')))
end
should "be able to open a plist with multiple, nested structures." do
struct = {
'array' => [1, 2, 3, 4],
'hash' => {'foo' => 'bar', 'baz' => 10.4}
}
assert_equal(struct, RubyCF::PList.parse(File.read('test/test_assets/Complex.plist')))
end
should "be marked tainted" do
assert(RubyCF::PList.parse(File.read('test/test_assets/Array.plist')).tainted?, 'Array')
assert(RubyCF::PList.parse(File.read('test/test_assets/Data.plist')).tainted?, 'Data')
assert(RubyCF::PList.parse(File.read('test/test_assets/Dictionary.plist')).tainted?, 'Dictionary')
assert(RubyCF::PList.parse(File.read('test/test_assets/String.plist')).tainted?, 'String')
end
end
end | 44.88 | 152 | 0.699346 |
114509229c5d23c07dfcb34d9906aaf72d8cf24e | 1,858 | # frozen_string_literal: true
require 'spec_helper'
describe Projects::MoveLfsObjectsProjectsService do
let!(:user) { create(:user) }
let!(:project_with_lfs_objects) { create(:project, namespace: user.namespace) }
let!(:target_project) { create(:project, namespace: user.namespace) }
subject { described_class.new(target_project, user) }
before do
create_list(:lfs_objects_project, 3, project: project_with_lfs_objects)
end
describe '#execute' do
it 'links the lfs objects from existent in source project' do
expect(target_project.lfs_objects.count).to eq 0
subject.execute(project_with_lfs_objects)
expect(project_with_lfs_objects.reload.lfs_objects.count).to eq 0
expect(target_project.reload.lfs_objects.count).to eq 3
end
it 'does not link existent lfs_object in the current project' do
target_project.lfs_objects << project_with_lfs_objects.lfs_objects.first(2)
expect(target_project.lfs_objects.count).to eq 2
subject.execute(project_with_lfs_objects)
expect(target_project.lfs_objects.count).to eq 3
end
it 'rollbacks changes if transaction fails' do
allow(subject).to receive(:success).and_raise(StandardError)
expect { subject.execute(project_with_lfs_objects) }.to raise_error(StandardError)
expect(project_with_lfs_objects.lfs_objects.count).to eq 3
expect(target_project.lfs_objects.count).to eq 0
end
context 'when remove_remaining_elements is false' do
let(:options) { { remove_remaining_elements: false } }
it 'does not remove remaining lfs objects' do
target_project.lfs_objects << project_with_lfs_objects.lfs_objects.first(2)
subject.execute(project_with_lfs_objects, options)
expect(project_with_lfs_objects.lfs_objects.count).not_to eq 0
end
end
end
end
| 32.034483 | 88 | 0.743811 |
e9dadefec24a6f5e0842a683a8b907341da0b098 | 2,076 | $:.unshift File.dirname(__FILE__)
require 'bayes_utils'
module Weka
module Classifier
#This module stores the classifiers from the 'weka.classifiers.bayes' package
module Bayes
java_import "weka.classifiers.bayes.NaiveBayes"
java_import "weka.classifiers.bayes.BayesianLogisticRegression"
java_import "weka.classifiers.bayes.AODE"
java_import "weka.classifiers.bayes.ComplementNaiveBayes"
java_import "weka.classifiers.bayes.WAODE"
class NaiveBayes
include Bayes_utils
class Base < NaiveBayes
def initialize(&block)
super
if block_given?
init_instance_classifier(&block)
else
init_classifier
end
end
end
end
class AODE
include Bayes_utils
class Base < AODE
def initialize(&block)
super
if block_given?
init_instance_classifier(&block)
else
init_classifier
end
end
end
end
class BayesianLogisticRegression
include Bayes_utils
class Base < BayesianLogisticRegression
def initialize(&block)
super
if block_given?
init_instance_classifier(&block)
else
init_classifier
end
end
end
end
class ComplementNaiveBayes
include Bayes_utils
class Base < ComplementNaiveBayes
def initialize(&block)
super
if block_given?
init_instance_classifier(&block)
else
init_classifier
end
end
end
end
class WAODE
include Bayes_utils
class Base < WAODE
def initialize(&block)
super
if block_given?
init_instance_classifier(&block)
else
init_classifier
end
end
end
end
end
end
end | 24.139535 | 81 | 0.55106 |
e26cf05a6f47df8eb8152c938f5d8aa43adc0fbd | 18,538 | require 'ratistics/collection'
require 'ratistics/math'
require 'ratistics/sort'
module Ratistics
# Various probability computation functions.
module Probability
extend self
# Calculates the statistical frequency.
#
# When a block is given the block will be applied to every element
# in the data set. Using a block in this way allows computation against
# a specific field in a data set of hashes or objects.
#
# The return value is a hash where the keys are the data elements
# from the sample and the values are the corresponding frequencies.
# When the *:as* option is set to *:array* the return value will
# be an array of arrays. Each element of the outer array will be
# a two-element array with the sample value at index 0 and the
# corresponding frequency at index 1.
#
# @example
# sample = [13, 18, 13, 14, 13, 16, 14, 21, 13]
# Ratistics.frequency(sample) #=> {13=>4, 18=>1, 14=>2, 16=>1, 21=>1}
# Ratistics.frequency(sample, :as => :array) #=> [[13, 4], [18, 1], [14, 2], [16, 1], [21, 1]]
#
# @yield iterates over each element in the data set
# @yieldparam item each element in the data set
#
# @param [Enumerable] data the data to perform the calculation against
# @param [Hash] opts processing options
#
# @option opts [Symbol] :as sets the output to :hash/:map or
# :array/:catalog/:catalogue (default :hash)
#
# @return [Hash, Array, nil] the statistical frequency of the given
# data set or nil if the data set is empty
def frequency(data, opts={})
return nil if data.nil? || data.empty?
freq = data.reduce({}) do |memo, datum|
datum = yield(datum) if block_given?
memo[datum] = memo[datum].to_i + 1
memo
end
if (opts[:as] == :array || opts[:as] == :catalog || opts[:as] == :catalogue)
freq = Collection.catalog_hash(freq)
end
return freq
end
# Calculates the statistical probability.
#
# When a block is given the block will be applied to every element
# in the data set. Using a block in this way allows computation against
# a specific field in a data set of hashes or objects.
#
# @example
# sample = [13, 18, 13, 14, 13, 16, 14, 21, 13]
# Ratistics.probability(sample) #=> {13=>0.4444444444444444, 18=>0.1111111111111111, 14=>0.2222222222222222, 16=>0.1111111111111111, 21=>0.1111111111111111}
# Ratistics.probability(sample, :as => :array) #=> [[13, 0.4444444444444444], [18, 0.1111111111111111], [14, 0.2222222222222222], [16, 0.1111111111111111], [21, 0.1111111111111111]]
# Ratistics.probability(sample, :as => :catalog) #=> [[13, 0.4444444444444444], [18, 0.1111111111111111], [14, 0.2222222222222222], [16, 0.1111111111111111], [21, 0.1111111111111111]]
#
# @yield iterates over each element in the data set
# @yieldparam item each element in the data set
#
# @param [Enumerable] data the data to perform the calculation against
# @param [Hash] opts processing options
# @param [Block] block optional block for per-item processing
#
# @option opts [Symbol] :from describes the nature of the data.
# :sample indicates *data* is a raw data sample, :frequency
# (or :freq) indicates *data* is a frequency distribution
# created from the #frequency function. (default :sample)
#
# @option opts [Symbol] :as sets the output to :hash or :array
# (default :hash)
#
# @return [Array, Hash, nil] the statistical probability of the given data set
# or nil if the data set is empty
#
# @see #frequency
def probability(data, opts={}, &block)
return nil if data.nil? || data.empty?
from_frequency = (opts[:from] == :frequency || opts[:from] == :freq)
if from_frequency
count = data.reduce(0) do |n, key, value|
key, value = key if key.is_a? Array
key = yield(key) if block_given?
n + value
end
else
count = data.count
data = frequency(data, &block)
end
prob = data.reduce({}) do |memo, key, value|
key, value = key if key.is_a? Array
key = yield(key) if from_frequency && block_given?
memo[key] = value.to_f / count.to_f
memo
end
if opts[:inc] || opts[:increment] || opts[:incremental]
base = 0
prob.keys.sort.each do |key|
prob[key] = base = prob[key] + base
end
end
if (opts[:as] == :array || opts[:as] == :catalog || opts[:as] == :catalogue)
prob = Collection.catalog_hash(prob)
end
return prob
end
alias :pmf :probability
# Normalize a probability distribution sample.
#
# The data set must be formatted as output by the #probability
# method. Specifically, a hash where each hash key is a datum from
# the original data set and each hash value is the probability
# associated with that datum. A probability hash may become
# denormalized when performing conditional probability.
#
# @see #probability
#
# @param [Enumerable] pmf the data to perform the calculation against
#
# @return [Hash] a new, normalized probability distribution.
def normalize_probability(pmf, opts={})
total = pmf.values.reduce(0.0){|n, value| n + value}
return { pmf.keys.first => 1 } if pmf.count == 1
return pmf if Math.delta(total, 1.0) < 0.01
factor = 1.0 / total.to_f
normalized = pmf.reduce({}) do |memo, pair|
memo[pair[0]] = pair[1] * factor
memo
end
return normalized
end
alias :normalize_pmf :normalize_probability
# Calculates the statistical mean of a probability distribution.
# Accepts a block for processing individual items.
#
# When a block is given the block will be applied to every element
# in the data set. Using a block in this way allows computation against
# a specific field in a data set of hashes or objects.
#
# @yield iterates over each element in the data set
# @yieldparam item each element in the data set
#
# @param [Enumerable] data the data to perform the calculation against
# @param [Hash] opts processing options
# @param [Block] block optional block for per-item processing
#
# @option opts [Symbol] :from describes the nature of the data.
# :sample indicates *data* is a raw data sample, :frequency
# (or :freq) indicates *data* is a frequency distribution
# created from the #frequency function, and :probability
# (or :prob) indicates the data is a probability distribution
# created by the #probability function. (default :sample)
#
# @return [Float, 0] the statistical mean of the given data set
# or zero if the data set is empty
#
# @see #frequency
# @see #probability
def probability_mean(data, opts={}, &block)
return 0 if data.nil? || data.empty?
from_probability = (opts[:from] == :probability || opts[:from] == :prob)
unless from_probability
data = probability(data, :from => opts[:from], &block)
end
mean = data.reduce(0.0) do |n, key, value|
key, value = key if key.is_a? Array
key = yield(key) if from_probability and block_given?
n + (key * value)
end
return mean
end
alias :pmf_mean :probability_mean
alias :frequency_mean :probability_mean
# Calculates the statistical variance of a probability distribution.
# Accepts a block for processing individual items in a raw data
# sample (:from => :sample).
#
# When a block is given the block will be applied to every element
# in the data set. Using a block in this way allows computation against
# a specific field in a data set of hashes or objects.
#
# @yield iterates over each element in the data set
# @yieldparam item each element in the data set
#
# @param [Enumerable] data the data to perform the calculation against
# @param [Hash] opts processing options
# @param [Block] block optional block for per-item processing
#
# @option opts [Symbol] :from describes the nature of the data.
# :sample indicates *data* is a raw data sample, :frequency
# (or :freq) indicates *data* is a frequency distribution
# created from the #frequency function, and :probability
# (or :prob) indicates the data is a probability distribution
# created by the #probability function. (default :sample)
#
# @return [Float, 0] the statistical variance of the given data set
# or zero if the data set is empty
#
# @see #probability
def probability_variance(data, opts={}, &block)
return 0 if data.nil? || data.empty?
if opts[:from] == :probability || opts[:from] == :prob
from_probability = true
else
data = probability(data, :from => opts[:from], &block)
from_probability = false
end
mean = data.reduce(0.0) do |n, key, value|
key, value = key if key.is_a? Array
key = yield(key) if from_probability && block_given?
n + (key * value)
end
variance = data.reduce(0.0) do |n, key, value|
key, value = key if key.is_a? Array
key = yield(key) if from_probability && block_given?
n + (value * ((key - mean) ** 2))
end
return variance
end
alias :pmf_variance :probability_variance
# Calculate the probability that a random variable will be at or below
# a given value based on the given sample (aka cumulative distribution
# function, CDF).
#
# 0 <= P <= 1
#
# Accepts a block for processing individual items in a raw data
# sample (:from => :sample).
#
# When a block is given the block will be applied to every element in
# the data set. Using a block in this way allows probability to be
# computed against a specific field in a data set of hashes or objects.
#
# @yield iterates over each element in the data set
# @yieldparam item each element in the data set
#
# @param [Enumerable] data the data to perform the calculation against
# @param [Hash] opts processing options
#
# @option opts [Symbol] :from describes the nature of the data.
# :sample (the default) indicates *data* is a raw data sample,
# :frequency (or :freq) indicates *data* is a frequency distribution
# created from the #frequency function.
#
# @return [0, Float, 1] the probability of a random variable being at
# or below the given value. Returns zero if the value is lower than
# the lowest value in the sample and one if the value is higher than
# the highest value in the sample. Returns zero for a nil or empty
# sample.
#
# @see #frequency
# @see #cumulative_distribution_function_value
#
# @see http://www.cumulativedistributionfunction.com/
# @see http://en.wikipedia.org/wiki/Cumulative_distribution_function
def cumulative_distribution_function(data, value, opts={})
return 0 if data.nil? || data.empty?
count = 0
if opts[:from] == :frequency || opts[:from] == :freq
size = 0
data.each do |datum, freq|
datum = yield(datum) if block_given?
count = count + freq if datum <= value
size = size + freq
end
else
data.each do |datum|
datum = yield(datum) if block_given?
count = count + 1 if datum <= value
end
size = data.size
end
return 0 if count == 0
return 1 if count == size
return count / size.to_f
end
alias :cdf :cumulative_distribution_function
alias :cumulative_distribution :cumulative_distribution_function
# Inverse of the #cumulative_distribution_function function. For the
# given data sample, return the highest value for a given probability.
#
# Accepts a block for processing individual items in a raw data
# sample (:from => :sample).
#
# When a block is given the block will be applied to every element in
# the data set. Using a block in this way allows probability to be
# computed against a specific field in a data set of hashes or objects.
#
# @yield iterates over each element in the data set
# @yieldparam item each element in the data set
#
# @param [Enumerable] data the data to perform the calculation against
# @param [Hash] opts processing options
#
# Will sort the data set using natural sort order unless
# the :sorted option is true or a block is given.
#
# @option opts [true, false] :sorted indicates of the data is already sorted
#
# @option opts [Symbol] :from describes the nature of the data.
# :sample (the default) indicates *data* is a raw data sample,
# :frequency (or :freq) indicates *data* is a frequency distribution
# created from the #frequency function.
#
# @return [Object] the highest value in the sample for the given probability
#
# @see #frequency
# @see #cumulative_distribution_function
#
# @see http://www.cumulativedistributionfunction.com/
# @see http://en.wikipedia.org/wiki/Cumulative_distribution_function
def cumulative_distribution_function_value(data, prob, opts={}, &block)
return nil if data.nil? || data.empty? || prob < 0 || prob > 1
if (opts[:from].nil? || opts[:from] == :sample) && !(block_given? || opts[:sorted] == true)
data = data.sort
end
if opts[:from].nil? || opts[:from] == :sample
return (block_given? ? yield(data[0]) : data[0]) if prob == 0
return (block_given? ? yield(data[-1]) : data[-1]) if prob == 1
else
return Math.min(data.keys, &block) if prob == 0
return Math.max(data.keys, &block) if prob == 1
end
if opts[:from] == :freq || opts[:from] == :frequency
ps = probability(data, :as => :array, :inc => true, :from => :freq, &block)
else
ps = probability(data, :as => :array, :inc => true, &block)
end
ps = Sort.insertion_sort!(ps){|item| item.first}
index = Collection.bisect_left(ps, prob){|item| item.last}
index = index-1 if prob == ps[index-1].first
return ps[index].first
end
alias :cdf_value :cumulative_distribution_function_value
alias :cumulative_distribution_value :cumulative_distribution_function_value
# Resamples the given sample with replacement (aka bootstrap).
# The resample will have the same number of elements as the original
# sample unless the :size (or :length, :count) option is given.
#
# @yield iterates over each element in the data set
# @yieldparam item each element in the data set
#
# @param [Enumerable] data the data to perform the calculation against
# @param [Hash] opts processing options
#
# Will sort the data set using natural sort order unless
# the :sorted option is true or a block is given.
#
# @option opts [Integer] :size the size of the resample
#
# @option opts [true, false] :sorted indicates of the data is already sorted
#
# @option opts [Symbol] :from describes the nature of the data.
# :sample (the default) indicates *data* is a raw data sample,
# :frequency (or :freq) indicates *data* is a frequency distribution
# created from the #frequency function.
#
# @return [Object] the highest value in the sample for the given probability
#
# @see #frequency
# @see #cumulative_distribution_function
#
# @see http://en.wikipedia.org/wiki/Resampling_(statistics)
# @see http://en.wikipedia.org/wiki/Bootstrapping_(statistics)
def sample_with_replacement(data, opts={}, &block)
return [] if data.nil? || data.empty?
if (opts[:from].nil? || opts[:from] == :sample) && !(block_given? || opts[:sorted] == true)
data = data.sort
end
if opts[:from] == :freq || opts[:from] == :frequency
ps = probability(data, :as => :array, :inc => true, :from => :freq, &block)
length = data.reduce(0){|length, item| length += item.last }
else
ps = probability(data, :as => :array, :inc => true, &block)
length = opts[:length] || opts[:size] || opts[:count] || data.length
end
ps = Sort.insertion_sort!(ps){|item| item.first}
resample = []
length.times do
prob = rand()
index = Collection.bisect_left(ps, prob){|item| item.last}
index = index-1 if prob == ps[index-1].first
resample << ps[index].first
end
return resample
end
alias :resample_with_replacement :sample_with_replacement
alias :bootstrap :sample_with_replacement
# Resamples the given sample without replacement (aka jackknife).
# The resample will have one half the number of elements as the original
# sample unless the :size (or :length, :count) option is given.
#
# @yield iterates over each element in the data set
# @yieldparam item each element in the data set
#
# @param [Enumerable] data the data to perform the calculation against
# @param [Hash] opts processing options
#
# @option opts [Integer] :size the size of the resample
#
# @option opts [Symbol] :from describes the nature of the data.
# :sample (the default) indicates *data* is a raw data sample,
# :frequency (or :freq) indicates *data* is a frequency distribution
# created from the #frequency function.
#
# @return [Object] the highest value in the sample for the given probability
#
# @see #frequency
# @see #cumulative_distribution_function
#
# @see http://en.wikipedia.org/wiki/Resampling_(statistics)
def sample_without_replacement(data, opts={}, &block)
return [] if data.nil? || data.empty?
if opts[:from] == :freq || opts[:from] == :frequency
data = data.inject([]) do |memo, item|
item.last.times{ memo << item.first }
memo
end
else
data = Collection.collect(data, &block).shuffle!
end
length = opts[:length] || opts[:size] || opts[:count] || (data.length / 2)
return data.slice!(0, length)
end
alias :resample_without_replacement :sample_without_replacement
alias :jackknife :sample_without_replacement
end
end
| 38.380952 | 189 | 0.639335 |
384d1b2a4880411be72988c7c74849e852e9771f | 217 | Rails.application.config.middleware.use OmniAuth::Builder do
# provider :github, ENV['GITHUB_KEY'], ENV['GITHUB_SECRET']
provider :github, 'e34d8260405f64d20dc1', 'cefe52df15831f2eb8e8a12346c633bc4885847d'
end | 54.25 | 88 | 0.792627 |
edb892d518373edeb05a153e8a09db3dc6d17bca | 1,317 | require 'bundler/setup'
require 'open-uri'
require 'rubygems'
require 'nokogiri'
require_relative 'document_info'
class DocumentInfoController
def initialize()
@url = nil
@charset = nil
end
def get (url, username = "", password = "")
#get html
html = open(url, http_basic_authentication: [username, password]) do |f|
f.read
end
@charset = html.encoding.to_s
return html
end
def parse (get)
#parse
doc = Nokogiri::HTML.parse(get, nil, @charset)
doc_t = doc.css('tr')
doc_gr=[]
i = 2
while i < doc_t.length
doc_i=[]
doc_i = doc_t[i].css('td')
doc_g = doc_i[5].children.to_s
doc_r = doc_i[7].children.to_s
doc_g = doc_g.gsub("\n", "")
doc_r = doc_r.gsub("\n", "")
#if doc_g.include?("่ฌ็พฉ่ณๆ")
if doc_r.scan("-").length != 4 || doc_i[4].children.to_s.include?("่ญฐไบ้ฒ")
i = i + 1
else
doc_gr.push(DocumentInfo.new(doc_g, doc_r))
i = i + 1
end
end
return doc_gr
end
def url ()
#url
return @url
end
def update_url(url)
#update_url
@url = url
end
end
| 23.517857 | 84 | 0.497342 |
e9452e8f3ce9e0a3e1d9d2fddba3d93a0ef4a8ad | 2,949 | #
# Copyright:: 2018, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class Chef
class Resource
class ChocolateyConfig < Chef::Resource
preview_resource true
resource_name :chocolatey_config
description "Use the chocolatey_config resource to add or remove Chocolatey configuration keys."
introduced "14.3"
property :config_key, String, name_property: true,
description: "The name of the config. The resource's name will be used if this isn't provided."
property :value, String,
description: "The value to set."
load_current_value do
current_val = fetch_config_element(config_key)
current_value_does_not_exist! if current_val.nil?
config_key config_key
value current_val
end
# @param [String] id the config name
# @return [String] the element's value field
def fetch_config_element(id)
require "rexml/document"
config_file = "#{ENV['ALLUSERSPROFILE']}\\chocolatey\\config\\chocolatey.config"
raise "Could not find the Chocolatey config at #{config_file}!" unless ::File.exist?(config_file)
contents = REXML::Document.new(::File.read(config_file))
data = REXML::XPath.first(contents, "//config/add[@key=\"#{id}\"]")
data ? data.attribute("value").to_s : nil # REXML just returns nil if it can't find anything so avoid an undefined method error
end
action :set do
description "Sets a Chocolatey config value."
raise "#{new_resource}: When adding a Chocolatey config you must pass the 'value' property!" unless new_resource.value
converge_if_changed do
shell_out!(choco_cmd("set"))
end
end
action :unset do
description "Unsets a Chocolatey config value."
if current_resource
converge_by("unset Chocolatey config '#{new_resource.config_key}'") do
shell_out!(choco_cmd("unset"))
end
end
end
action_class do
# @param [String] action the name of the action to perform
# @return [String] the choco config command string
def choco_cmd(action)
cmd = "#{ENV['ALLUSERSPROFILE']}\\chocolatey\\bin\\choco config #{action} --name #{new_resource.config_key}"
cmd << " --value #{new_resource.value}" if action == "set"
cmd
end
end
end
end
end
| 35.107143 | 135 | 0.665649 |
1a2e6762ed1e2db2432fb968cacf6bc35af192e9 | 918 | # RSpec Exercise 2 - From App Academy Open Full Stack Course
# https://open.appacademy.io/learn/full-stack-online/software-engineering-foundations/rspec-exercise-2
# Solution by: Alex Boutilier
#
# Part of 'bootler/fullstack-solutions' repository
# https://github.com/bootler/fullstack-solutions
def partition(arr,num)
return [arr.select { |ele| ele < num },arr.select { |ele| ele >= num }]
end
def merge(h1,h2)
return {**h1,**h2}
end
def censor(sent,curses)
words = sent.split(" ")
words.each do |word|
if curses.any? { |curse| curse == word.downcase }
word.each_char.with_index do |ch,idx|
if "aeiou".include?(ch.downcase)
word[idx] = '*'
end
end
end
end
return words.join(" ")
end
def power_of_two?(num)
if Math.log2(num) % 1 == 0
return true
else
return false
end
end | 26.228571 | 102 | 0.608932 |
f825b8d1dc7d18392e95c22e436a71e53935c30a | 16,117 | namespace :prune_db do
desc "Fixes a mis-match between a vote's prompt_id and its appearance's prompt_id. Sets the appearance prompt_id to match the vote's prompt_id"
task :fix_promptid_mismatch => :environment do
bad_records = Vote.connection.select_all "
SELECT
votes.prompt_id, appearances.id appearance_id,
appearances.prompt_id appearance_prompt_id
FROM votes LEFT JOIN appearances
ON (votes.id = appearances.answerable_id
AND appearances.answerable_type = 'Vote')
WHERE votes.prompt_id <> appearances.prompt_id"
bad_records.each do |record|
Appearance.update_all("prompt_id = #{record["prompt_id"]}", "id = #{record["appearance_id"]} AND prompt_id = #{record["appearance_prompt_id"]}")
end
end
desc "Generate density information for each question - should be run nightly"
task(:generate_density_information => :environment) do
# calculating densities is expensive, so only do it for questions with new data
question_ids = Vote.count(:conditions => ['date(created_at) = ?', Date.yesterday], :group => 'question_id').keys()
Question.find(:all, :conditions => {:id => question_ids}).each do |q|
q.save_densities!
end
# we can just copy the previous night's data for remaining questions
Question.find(:all, :conditions => ['id NOT IN (?)', question_ids]).each do |q|
densities = q.densities.find(:all, :conditions => ['date(created_at) = ?', Date.yesterday])
densities.each do |d|
new_d = d.clone
new_d.created_at = new_d.updated_at = Time.now
new_d.save!
end
if densities.blank?
#fallback in case there wasn't a successful run yesterday
q.save_densities!
end
end
end
desc "Generate appearances for any votes that have no current appearance, should only need to be run once"
task(:generate_appearances_for_existing_votes => :environment) do
votes = Vote.all
count = 0
votes.each do |v|
if v.appearance.nil?
print "."
a = Appearance.create(:voter_id => v.voter_id, :site_id => v.site_id, :prompt_id => v.prompt_id, :question_id => v.question_id, :created_at => v.created_at, :updated_at => v.updated_at)
v.appearance = a
v.save
count += 1
end
end
print count
end
desc "Don't run unless you know what you are doing"
task(:generate_lots_of_votes => :environment) do
if Rails.env.production?
print "You probably don't want to run this in production as it will falsify a bunch of random votes"
end
current_user = User.first
1000.times do |n|
puts "#{n} votes completed" if n % 100 == 0
question = Question.find(214) # test question change as needed
@prompt = question.catchup_choose_prompt(1).first
@appearance = current_user.record_appearance(current_user.default_visitor, @prompt)
direction = (rand(2) == 0) ? "left" : "right"
current_user.record_vote(:prompt => @prompt, :direction => direction, :appearance_lookup => @appearance.lookup)
end
end
desc "Dump votes of a question by left vs right id"
task(:make_csv => :environment) do
q = Question.find(214)
the_prompts = q.prompts_hash_by_choice_ids
#hash_of_choice_ids_from_left_to_right_to_votes
the_hash = {}
q.choices.each do |l|
q.choices.each do |r|
next if l.id == r.id
if not the_hash.has_key?(l.id)
the_hash[l.id] = {}
the_hash[l.id][l.id] = 0
end
p = the_prompts["#{l.id}, #{r.id}"]
if p.nil?
the_hash[l.id][r.id] = 0
else
the_hash[l.id][r.id] = p.appearances.size
end
end
end
the_hash.sort.each do |xval, row|
rowarray = []
row.sort.each do |yval, cell|
rowarray << cell
end
puts rowarray.join(", ")
end
end
desc "Should only need to be run once"
task(:generate_all_possible_prompts => :environment) do
Question.find(:all).each do |q|
choices = q.choices
if q.prompts.size > choices.size**2 - choices.size
print "ERROR: #{q.id}\n"
next
elsif q.prompts.size == choices.size**2 - choices.size
print "#{q.id} has enough prompts, skipping...\n"
next
else
print "#{q.id} should add #{(choices.size ** 2 - choices.size) - q.prompts.size}\n"
end
created_timestring = q.created_at.to_s(:db)
updated_timestring = Time.now.to_s(:db) #isn't rails awesome?
promptscount=0
inserts = []
the_prompts = Prompt.find(:all, :select => 'id, left_choice_id, right_choice_id', :conditions => {:question_id => q.id})
the_prompts_hash = {}
the_prompts.each do |p|
the_prompts_hash["#{p.left_choice_id},#{p.right_choice_id}"] = 1
end
choices.each do |l|
choices.each do |r|
if l.id == r.id
next
else
#p = the_prompts.find{|o| o.left_choice_id == l.id && o.right_choice_id == r.id}
keystring = "#{l.id},#{r.id}"
p = the_prompts_hash[keystring]
if p.nil?
inserts.push("(NULL, #{q.id}, NULL, #{l.id}, '#{created_timestring}', '#{updated_timestring}', NULL, 0, #{r.id}, NULL, NULL)")
promptscount+=1
end
end
end
end
print "Added #{promptscount} to #{q.id}\n"
sql = "INSERT INTO `prompts` (`algorithm_id`, `question_id`, `voter_id`, `left_choice_id`, `created_at`, `updated_at`, `tracking`, `votes_count`, `right_choice_id`, `active`, `randomkey`) VALUES #{inserts.join(', ')}"
unless inserts.empty?
ActiveRecord::Base.connection.execute(sql)
end
Question.update_counters(q.id, :prompts_count => promptscount)
end
end
desc "Generate past density information"
task(:generate_past_densities => :environment) do
#this is not elegant, but should only be run once, so quick and dirty wins
start_date = Vote.find(:all, :conditions => 'loser_choice_id IS NOT NULL', :order => :created_at, :limit => 1).first.created_at.to_date
start_date.upto(Date.today) do |the_date|
questions = Question.find(:all, :conditions => ['created_at < ?', the_date])
print the_date.to_s
questions.each do |q|
puts q.id
relevant_choices = q.choices.find(:all, :conditions => ['created_at < ?', the_date])
seed_choices = 0
if relevant_choices == 0
next
#this question had not been created yet
end
relevant_choices.each do |c|
if !c.user_created
seed_choices+=1
end
end
nonseed_choices = relevant_choices.size - seed_choices
seed_seed_total = seed_choices **2 - seed_choices
nonseed_nonseed_total = nonseed_choices **2 - nonseed_choices
seed_nonseed_total = seed_choices * nonseed_choices
nonseed_seed_total = seed_choices * nonseed_choices
seed_seed_sum = 0
seed_nonseed_sum= 0
nonseed_seed_sum= 0
nonseed_nonseed_sum= 0
q.appearances.find_each(:conditions => ['prompt_id IS NOT NULL AND created_at < ?', the_date]) do |a|
p = a.prompt
if p.left_choice.user_created == false && p.right_choice.user_created == false
seed_seed_sum += 1
elsif p.left_choice.user_created == false && p.right_choice.user_created == true
seed_nonseed_sum += 1
elsif p.left_choice.user_created == true && p.right_choice.user_created == false
nonseed_seed_sum += 1
elsif p.left_choice.user_created == true && p.right_choice.user_created == true
nonseed_nonseed_sum += 1
end
end
densities = {}
densities[:seed_seed] = seed_seed_sum.to_f / seed_seed_total.to_f
densities[:seed_nonseed] = seed_nonseed_sum.to_f / seed_nonseed_total.to_f
densities[:nonseed_seed] = nonseed_seed_sum.to_f / nonseed_seed_total.to_f
densities[:nonseed_nonseed] = nonseed_nonseed_sum.to_f / nonseed_nonseed_total.to_f
densities.each do |type, average|
d = Density.new
d.created_at = the_date
d.question_id = q.id
d.prompt_type = type.to_s
d.value = average.nan? ? nil : average
d.save!
end
puts "Seed_seed sum: #{seed_seed_sum}, seed_seed total num: #{seed_seed_total}"
puts "Seed_nonseed sum: #{seed_nonseed_sum}, seed_nonseed total num: #{seed_nonseed_total}"
puts "Nonseed_seed sum: #{nonseed_seed_sum}, nonseed_seed total num: #{nonseed_seed_total}"
puts "Nonseed_nonseed sum: #{nonseed_nonseed_sum}, nonseed_nonseed total num: #{nonseed_nonseed_total}"
end
end
end
desc "Invalidates votes with bad response times"
task :invalidate_votes_with_bad_response_times, [:start_vote_id] => [:environment] do |t, args|
args.with_defaults(:start_vote_id => 0)
STDOUT.sync = true
Vote.find_each(:batch_size => 10000, :include => :appearance, :conditions => ["votes.id >= ?", args[:start_vote_id]]) do |v|
next if v.nil? || v.appearance.nil?
server_response_time = v.created_at.to_f - v.appearance.created_at.to_f
if v.time_viewed && v.time_viewed/1000 > server_response_time
v.time_viewed = nil
v.missing_response_time_exp = "invalid"
v.save!
print "."
end
end
print "\n"
end
task :associate_skips_with_appearances => :environment do
skips_to_fix = Skip.find(:all, :conditions => {:appearance_id => nil})
skips_to_fix.each do |skip|
puts "Skip #{skip.id} : "
possible_appearances = skip.skipper.appearances.find(:all, :conditions => {:prompt_id => skip.prompt_id})
if possible_appearances.nil? || possible_appearances.empty?
puts " I couldn't find any matches!"
skip.delete
next
end
if possible_appearances.size > 1
puts " More than one possible appearance"
possible_appearances.delete_if{|a| a.answered?}
if possible_appearances.size > 1 || possible_appearances.size == 0
puts" And I couldn't narrow it down.... moving on"
skip.delete
next
end
end
possible_appearance = possible_appearances.first
if possible_appearance.answered?
puts " This appearance has been answered already! Moving on"
skip.delete
else
puts " MATCH"
skip.appearance_id = possible_appearance.id
skip.save!
end
end
end
task(:move_vote_and_skip_ids_to_appearance => :environment) do
#Vote.find_each do |v|
# @appearance = Appearance.find(v.appearance_id)
# @appearance.answerable = v
# @appearance.save
# if v.id % 1000 == 0
# puts v.id
# end
# end
Skip.find_each do |s|
if s.appearance_id
@appearance = Appearance.find(s.appearance_id)
if @appearance.answerable
puts "Appearance #{@appearance.id} has more than one skip!"
else
@appearance.answerable = s
@appearance.save
end
end
end
end
task(:remove_double_counted_votes_with_same_appearance => :environment) do
votes_with_no_appearance = []
Vote.find_each(:include => :appearance) do |v|
puts v.id if v.id % 1000 == 0
votes_with_no_appearance << v if v.appearance.nil?
end
skips_with_no_appearance = []
Skip.find_each(:include => :appearance) do |s|
puts s.id if s.id % 1000 == 0
skips_with_no_appearance << s if s.appearance.nil?
end
puts "#{votes_with_no_appearance.size} Votes"
puts "#{skips_with_no_appearance.size} Skips"
votes_with_no_appearance.each do |v|
v.valid_record = false
v.validity_information = "No associated appearance object"
v.save!
end
skips_with_no_appearance.each do |s|
s.valid_record = false
s.validity_information = "No associated appearance object"
s.save!
end
end
#call this by doing rake prune_db:populate_seed_ideas['blahblah',questionnum], where blahblah is the filename
task(:populate_seed_ideas, [:args1, :args2,] => [:environment]) do | task, arguments|
filename = arguments[:args1]
question_num = arguments[:args2]
puts filename
puts question_num
q = Question.find(question_num)
creator_id = q.creator_id
File.open(filename, "r") do |infile|
while( data= infile.gets)
c = Choice.new(:creator_id => creator_id,
:question_id => q.id,
:active => true,
:data => data.chomp)
c.save
end
end
end
desc "Searches questions for orphaned votes (votes with no appearance) and marks them as invalid"
task :invalidate_orphaned_votes => :environment do
question_ids = ENV["question_ids"].split(/[\s,]+/)
question_ids.each do |question_id|
question = Question.find(question_id)
orphaned_votes = Vote.find(:all,
:select => "votes.id",
:joins => "LEFT JOIN appearances ON (votes.id = appearances.answerable_id AND answerable_type <> 'Skip')",
:conditions => ["answerable_id IS NULL AND votes.valid_record = 1 AND votes.question_id = ?", question.id])
puts "Question ##{question.id} has #{orphaned_votes.count} orphaned votes"
orphaned_votes.each do |orphaned_vote_id|
orphaned_vote = Vote.find(orphaned_vote_id.id)
# attempt to find sibling vote
# sibling vote is one that is valid has the same voter and prompt,
# is associated with an appearance, and created within 10 seconds
sibling_vote = nil
votes = Vote.find(:all, :conditions => {:voter_id => orphaned_vote.voter_id, :prompt_id => orphaned_vote.prompt_id})
votes.each do |vote|
next if vote.id == orphaned_vote.id
next if vote.created_at > orphaned_vote.created_at + 5.seconds
next if vote.created_at < orphaned_vote.created_at - 5.seconds
next if vote.appearance == nil
sibling_vote = vote
break
end
info = "Appearance XXXX already answered"
if sibling_vote
info = "Appearance #{sibling_vote.appearance.id} already answered"
end
orphaned_vote.update_attributes!(:valid_record => false, :validity_information => info)
end
end
end
desc "Updates cached values for losses and wins for choices."
task :update_cached_losses_wins => :environment do
Question.all.each do |question|
question.choices.each do |choice|
choice.reload
true_losses = question.votes.count(:conditions => {:loser_choice_id => choice.id})
true_wins = choice.votes.count
Choice.update_counters choice.id,
:losses => (true_losses - choice.losses),
:wins => (true_wins - choice.wins)
choice.reload
choice.score = choice.compute_score
choice.save(false)
end
end
end
desc "Update cached values for prompts on left and right for choices."
task :update_cached_prompts_on_left_right => :environment do
question_ids = ENV["question_ids"].split(/[\s,]+/) if ENV["question_ids"]
if !question_ids.blank?
questions = Question.find(question_ids)
else
questions = Question.all
end
questions.each do |question|
question.choices.each do |choice|
choice.reload
Choice.update_counters choice.id,
:prompts_on_the_left_count => choice.prompts_on_the_left.count - choice.prompts_on_the_left_count,
:prompts_on_the_right_count => choice.prompts_on_the_right.count - choice.prompts_on_the_right_count
end
end
end
desc "Recomputes scores for all choices."
task :recompute_scores => :environment do
Choice.find_each do |choice|
choice.reload
choice.score = choice.compute_score
choice.save(false)
end
end
end
| 33.78826 | 223 | 0.637588 |
f7150c4400c910afe949e8af13a2444b3260d379 | 187 | class AddAdCategoryToPresentations < ActiveRecord::Migration
def change
add_column :presentations, :ad_category, :string
add_column :sessions, :ad_category, :string
end
end
| 26.714286 | 60 | 0.770053 |
edf1475772cc7359debaf8fc36bcb9d8ea903273 | 550 | desc "update created time stamps on existing listings"
task update_created: :environment do
puts "Updating timestamps..."
xml = Nokogiri::XML(open(Movie::HBO_XML_URL))
hbo_features = Hash.from_xml(xml.to_s)['response']['body']['productResponses']['featureResponse']
count = 0
hbo_features.each do |feature|
@movie = Movie.where(title: feature['title']).first
if @movie
puts "Updating time stamp for: #{@movie.title}"
@movie.created_at = feature['startDate']
@movie.save
count += 1
end
end
p count
end | 32.352941 | 99 | 0.683636 |
1d2ba53f9d96fc1e4a90812f0e4ca7ffd70ce4b7 | 297 | module Locomotive
module Wagon
module Liquid
module Drops
class SessionProxy < ::Liquid::Drop
def before_method(meth)
request = @context.registers[:request]
request.session[meth.to_sym]
end
end
end
end
end
end | 16.5 | 50 | 0.569024 |
e212cab198e2d1fd8f1d28cbba073c0a4e4118de | 358 |
require 'bblib' unless defined?(BBLib)
require 'socket'
require 'json'
require 'yaml'
require 'securerandom'
require 'openssl'
require 'digest/sha1'
require_relative 'ava/version'
require_relative 'replicant/replicant'
require_relative 'replicant/chained_replicant'
require_relative 'client/client'
require_relative 'controller/controller'
module Ava
end
| 19.888889 | 46 | 0.818436 |
21302f2b1ec626d51ecbd1d7f388673db2643705 | 527 | # frozen_string_literal: true
module CaseGen
class ExpectRule
include ComboMatcher
def initialize(rule_data)
@rule_data = rule_data
end
def apply(combos)
combos.each do |combo|
expect_keys = combo.names.select { |name| combo.send(name) == :expect }
next if expect_keys.none?
next unless matches_criteria(combo, expect_keys)
expect_keys.each do |expect_key|
combo.send("#{expect_key}=", @rule_data[expect_key])
end
end
end
end
end
| 21.08 | 79 | 0.643264 |
f8f07d114b2e9632d27e99ba6cbb6ab976db589b | 98 | # desc "Explaining what the task does"
# task :jsonapi_parser_rails do
# # Task goes here
# end
| 19.6 | 38 | 0.714286 |
110f5fcf1a58398a2960e14c816194482fc71199 | 755 | require 'chefspec'
describe 'zypper_package::purge' do
let(:chef_run) { ChefSpec::SoloRunner.new(platform: 'opensuse', version: '42.3').converge(described_recipe) }
it 'purges a zypper_package with an explicit action' do
expect(chef_run).to purge_zypper_package('explicit_action')
expect(chef_run).to_not purge_zypper_package('not_explicit_action')
end
it 'purges a zypper_package with attributes' do
expect(chef_run).to purge_zypper_package('with_attributes').with(version: '1.0.0')
expect(chef_run).to_not purge_zypper_package('with_attributes').with(version: '1.2.3')
end
it 'purges a zypper_package when specifying the identity attribute' do
expect(chef_run).to purge_zypper_package('identity_attribute')
end
end
| 37.75 | 111 | 0.766887 |
5d62b78a075fac1cb908c0e0035872824c9d79b0 | 3,394 | require 'spec_helper'
describe 'sudo::default_entry' do
context 'supported operating systems' do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts){ os_facts }
let(:title) { 'default_entry_spec'}
context 'default parameters' do
let(:params) { {:content => ['first', 'second']} }
it { is_expected.to compile.with_all_deps }
it do
is_expected.to create_concat__fragment("sudo_default_entry_#{title}")
.with_content("Defaults first, second\n")
end
end
context 'def_type = host and target specified' do
let(:params) { {:content => ['first', 'second'], :def_type => 'host',
:target => 'some_host_target'} }
it { is_expected.to compile.with_all_deps }
it do
is_expected.to create_concat__fragment("sudo_default_entry_#{title}")
.with_content("Defaults@some_host_target first, second\n")
end
end
context 'def_type = cmnd and target specified' do
let(:params) { {:content => ['first', 'second'], :def_type => 'cmnd',
:target => 'some_cmnd_target'} }
it { is_expected.to compile.with_all_deps }
it do
is_expected.to create_concat__fragment("sudo_default_entry_#{title}")
.with_content("Defaults!some_cmnd_target first, second\n")
end
end
context 'def_type = user' do
let(:params) { {:content => ['first', 'second'], :def_type => 'user'} }
it { is_expected.to compile.with_all_deps }
it do
is_expected.to create_concat__fragment("sudo_default_entry_#{title}")
.with_content("Defaults: first, second\n")
end
end
context 'def_type = runas' do
let(:params) { {:content => ['first', 'second'], :def_type => 'runas'} }
it { is_expected.to compile.with_all_deps }
it do
is_expected.to create_concat__fragment("sudo_default_entry_#{title}")
.with_content("Defaults> first, second\n")
end
end
context 'def_type = cmnd' do
let(:params) { {:content => ['first', 'second'], :def_type => 'cmnd'} }
it { is_expected.to compile.with_all_deps }
it do
is_expected.to create_concat__fragment("sudo_default_entry_#{title}")
.with_content("Defaults! first, second\n")
end
end
# Test cve 2019-14287 mitigation
context 'test cve mitigation ' do
let(:params) { {:content => ['%ALL', '!%wheel'], :def_type => 'runas'} }
context 'sudo version < 1.8.28' do
let(:facts) { os_facts.merge( { :sudo_version => '1.8.0' })}
it do
is_expected.to create_concat__fragment("sudo_default_entry_#{title}")
.with_content("Defaults> %ALL, !%wheel, !%#-1\n")
end
end
context 'sudo version >= 1.8.28' do
let(:facts) { os_facts.merge( { :sudo_version => '1.8.30' })}
it do
is_expected.to create_concat__fragment("sudo_default_entry_#{title}")
.with_content("Defaults> %ALL, !%wheel\n")
end
end
end
end
end
end
end
| 35.726316 | 84 | 0.553329 |
26da61bfd8458755dc15d7784bba65e67dbfba17 | 1,678 | # frozen_string_literal: true
module Resolvers
# Base resolver class include everything you need for sorting and filtering entities
class BaseResolver < GraphQL::Schema::Resolver
# override in your resolver to allow order by attributes
def allowed_filter_attributes
raise 'Return an array with your allowed filter attributes.'
end
# apply_filter recursively loops through "OR" branches
def apply_filter(scope, value)
branches = normalize_filters(value).reduce { |a, b| a.or(b) }
scope.merge branches
end
def normalize_filters(value, branches = [])
scope = resources
allowed_filter_attributes.each do |filter_attr|
if value[filter_attr.to_sym].present?
scope = scope.where("#{filter_attr} LIKE ?", "%#{value[filter_attr.to_sym]}%")
end
end
branches << scope
value[:OR].reduce(branches) { |s, v| normalize_filters(v, s) } if value[:OR].present?
branches
end
# override in your resolver to allow order by attributes
def allowed_order_attributes
raise 'Return an array with your allowed order attributes.'
end
# apply order_by
def apply_order_by(scope, value)
direction = 'asc'
if value[:attribute].present? &&
allowed_order_attributes.include?(value[:attribute])
direction = value[:direction] if value[:direction].present? && %w[asc desc].include?(value[:direction].downcase)
scope = scope.order("#{value[:attribute]} #{direction}")
end
scope
end
def current_ability
Ability.new(context[:current_user])
end
end
end
| 33.56 | 121 | 0.655542 |
ab932aed7e921ed4cf6145617a7657c3d98d72a9 | 1,646 | require 'spec_helper'
feature "Creating new resources" do
scenario "Creating a resource in a collection" do
datacenters = Abiquo::Resource("http://abiquo.example.com/api/admin/datacenters", auth)
stub_auth_request(:options, "http://admin:[email protected]/api/admin/datacenters").
to_return(:headers => {'Allow' => 'GET, POST, OPTIONS'})
stub_auth_request(:post, "http://admin:[email protected]/api/admin/datacenters").with do |req|
# we parse because comparing strings is too fragile because of order changing, different indentations, etc.
# we're expecting something very close to this:
# <datacenter>
# <name>Wadus</name>
# </datacenter>
Nokogiri.parse(req.body).at_xpath("/datacenter/name").text == "Wadus"
end.to_return(:body => %q{
<datacenter>
<name>Wadus</name>
<link rel="edit" href="http://abiquo.example.com/api/admin/datacenters/1"/>
</datacenter>
})
datacenter = datacenters.create(:name => "Wadus")
datacenter.should be_a(Abiquo::Resource)
datacenter.name.should == "Wadus"
stub_auth_request(:options, "http://admin:[email protected]/api/admin/datacenters/1").
to_return(:headers => {'Allow' => 'GET, PUT, OPTIONS'})
stub_auth_request(:get, "http://admin:[email protected]/api/admin/datacenters/1").to_return(:body => %q{
<datacenter>
<name>Wadus</name>
<link rel="edit" href="http://abiquo.example.com/api/admin/datacenters/1"/>
</datacenter>
})
datacenter.name == Abiquo::Resource(datacenter.url, auth).name
end
end
| 37.409091 | 115 | 0.659781 |
bf47411ab2bb7d3cfd12d862aeca122ec3e32657 | 1,580 | module Fog
module Compute
class OpenStack
class Real
def list_usages(date_start = nil, date_end = nil, detailed=false)
params = Hash.new
params[:start] = date_start.iso8601.gsub(/\+.*/, '') if date_start
params[:end] = date_end.iso8601.gsub(/\+.*/, '') if date_end
params[:detailed] = (detailed ? '1' : '0') if detailed
request(
:expects => [200, 203],
:method => 'GET',
:path => 'os-simple-tenant-usage',
:query => params
)
end
end
class Mock
def list_usages(date_start = nil, date_end = nil, detailed=false)
params = Hash.new
response = Excon::Response.new
response.status = 200
response.body = {"tenant_usages"=>[{
"total_memory_mb_usage" => 0.00036124444444444445,
"total_vcpus_usage" => 7.055555555555556e-07,
"start" => "2012-03-06 05:05:56.349001",
"tenant_id" => "b97c8abba0c44a0987c63b858a4823e5",
"stop" => "2012-03-06 05:05:56.349255",
"total_hours" => 7.055555555555556e-07,
"total_local_gb_usage" => 0.0
}
]
}
response
end
end
end
end
end
| 37.619048 | 93 | 0.427848 |
33fb6b8ef0d1c2e479ad8ae602a48e6a0cc56fe0 | 469 | # frozen_string_literal: true
class AddEmailHeaderAndFooterEnabledFlagToAppearancesTable < ActiveRecord::Migration[5.0]
include Gitlab::Database::MigrationHelpers
disable_ddl_transaction!
DOWNTIME = false
def up
add_column_with_default(:appearances, :email_header_and_footer_enabled, :boolean, default: false) # rubocop:disable Migration/AddColumnWithDefault
end
def down
remove_column(:appearances, :email_header_and_footer_enabled)
end
end
| 26.055556 | 150 | 0.812367 |
180a43b70757eba754e3e3217f5857447a44ce1b | 70 | //
puts "Hello, World!"
//
text = "Hello, World!"
print "#{text}"
| 8.75 | 22 | 0.528571 |
286a985f7c31dce3434cead215d59df9fa098ea8 | 19,405 | #
# DO NOT MODIFY!!!!
# This file is automatically generated by Racc 1.4.14
# from Racc grammer file "".
#
require 'racc/parser.rb'
require "lexer/lexer"
require "parser/nodes"
class Parser < Racc::Parser
module_eval(<<'...end grammar.y/module_eval...', 'grammar.y', 144)
def parse(code, show_tokens=false)
@tokens = Lexer.new.tokenize(code) # Tokenize the code using our lexer
puts @tokens.inspect if show_tokens
do_parse # Kickoff the parsing process
end
def next_token
@tokens.shift
end
...end grammar.y/module_eval...
##### State transition tables begin ###
racc_action_table = [
27, 25, 26, 16, 18, 19, 20, 21, 22, 23,
24, 27, 25, 26, 16, 18, 19, 20, 21, 22,
23, 24, 30, 72, 41, 42, 39, 40, 28, 15,
30, 17, 41, 42, 39, 40, 30, 16, 41, 42,
15, 74, 17, 27, 25, 26, 47, 18, 19, 20,
21, 22, 23, 24, 27, 25, 26, 48, 18, 19,
20, 21, 22, 23, 24, 17, 16, 30, 49, 41,
42, 51, 15, 68, 30, 84, 41, 42, 39, 40,
27, 25, 26, 15, 18, 19, 20, 21, 22, 23,
24, 27, 25, 26, 17, 18, 19, 20, 21, 22,
23, 24, 46, 79, 45, 78, 27, 25, 26, 15,
18, 19, 20, 21, 22, 23, 24, 27, 25, 26,
15, 18, 19, 20, 21, 22, 23, 24, 86, 53,
85, 72, 27, 25, 26, 15, 18, 19, 20, 21,
22, 23, 24, 27, 25, 26, 15, 18, 19, 20,
21, 22, 23, 24, 45, 30, 30, 82, 27, 25,
26, 15, 18, 19, 20, 21, 22, 23, 24, 27,
25, 26, 15, 18, 19, 20, 21, 22, 23, 24,
72, 88, nil, nil, 27, 25, 26, 15, 18, 19,
20, 21, 22, 23, 24, 27, 25, 26, 15, 18,
19, 20, 21, 22, 23, 24, nil, nil, nil, nil,
27, 25, 26, 15, 18, 19, 20, 21, 22, 23,
24, 27, 25, 26, 15, 18, 19, 20, 21, 22,
23, 24, nil, nil, nil, nil, 27, 25, 26, 15,
18, 19, 20, 21, 22, 23, 24, 27, 25, 26,
15, 18, 19, 20, 21, 22, 23, 24, nil, nil,
nil, nil, 27, 25, 26, 15, 18, 19, 20, 21,
22, 23, 24, 27, 25, 26, 15, 18, 19, 20,
21, 22, 23, 24, nil, nil, nil, nil, 27, 25,
26, 15, 18, 19, 20, 21, 22, 23, 24, nil,
nil, 30, 15, 41, 42, 39, 40, 35, 36, 37,
38, 33, 34, 32, 31, nil, nil, 15, 66, 72,
nil, 30, nil, 41, 42, 39, 40, 35, 36, 37,
38, 33, 34, 32, 31, 30, nil, 41, 42, 39,
40, 35, 36, 37, 38, 33, 34, 32, 31, 30,
nil, 41, 42, 39, 40, 35, 36, 37, 38, 33,
34, 32, 31, 30, nil, 41, 42, 39, 40, 35,
36, 37, 38, 33, 34, 32, 31, 30, nil, 41,
42, 39, 40, 35, 36, 37, 38, 33, 34, 32,
31, 30, nil, 41, 42, 39, 40, 35, 36, 37,
38, 33, 34, 32, 31, 30, nil, 41, 42, 39,
40, 35, 36, 37, 38, 33, 34, 32, 31, 30,
nil, 41, 42, 39, 40, 35, 36, 37, 38, 33,
34, 32, 30, nil, 41, 42, 39, 40, 35, 36,
37, 38, 33, 34, 30, nil, 41, 42, 39, 40,
35, 36, 37, 38, 30, nil, 41, 42, 39, 40,
35, 36, 37, 38, 30, nil, 41, 42, 39, 40 ]
racc_action_check = [
0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 72, 72, 72, 72, 72, 72, 72, 72, 72,
72, 72, 58, 48, 58, 58, 58, 58, 1, 0,
59, 0, 59, 59, 59, 59, 62, 2, 62, 62,
72, 48, 72, 45, 45, 45, 24, 45, 45, 45,
45, 45, 45, 45, 15, 15, 15, 25, 15, 15,
15, 15, 15, 15, 15, 2, 80, 63, 26, 63,
63, 28, 45, 45, 60, 80, 60, 60, 60, 60,
27, 27, 27, 15, 27, 27, 27, 27, 27, 27,
27, 29, 29, 29, 80, 29, 29, 29, 29, 29,
29, 29, 23, 69, 23, 69, 31, 31, 31, 27,
31, 31, 31, 31, 31, 31, 31, 32, 32, 32,
29, 32, 32, 32, 32, 32, 32, 32, 81, 30,
81, 49, 33, 33, 33, 31, 33, 33, 33, 33,
33, 33, 33, 34, 34, 34, 32, 34, 34, 34,
34, 34, 34, 34, 53, 64, 65, 74, 35, 35,
35, 33, 35, 35, 35, 35, 35, 35, 35, 36,
36, 36, 34, 36, 36, 36, 36, 36, 36, 36,
85, 86, nil, nil, 37, 37, 37, 35, 37, 37,
37, 37, 37, 37, 37, 38, 38, 38, 36, 38,
38, 38, 38, 38, 38, 38, nil, nil, nil, nil,
39, 39, 39, 37, 39, 39, 39, 39, 39, 39,
39, 40, 40, 40, 38, 40, 40, 40, 40, 40,
40, 40, nil, nil, nil, nil, 41, 41, 41, 39,
41, 41, 41, 41, 41, 41, 41, 42, 42, 42,
40, 42, 42, 42, 42, 42, 42, 42, nil, nil,
nil, nil, 46, 46, 46, 41, 46, 46, 46, 46,
46, 46, 46, 47, 47, 47, 42, 47, 47, 47,
47, 47, 47, 47, nil, nil, nil, nil, 79, 79,
79, 46, 79, 79, 79, 79, 79, 79, 79, nil,
nil, 43, 47, 43, 43, 43, 43, 43, 43, 43,
43, 43, 43, 43, 43, nil, nil, 79, 43, 50,
nil, 50, nil, 50, 50, 50, 50, 50, 50, 50,
50, 50, 50, 50, 50, 3, nil, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 52,
nil, 52, 52, 52, 52, 52, 52, 52, 52, 52,
52, 52, 52, 67, nil, 67, 67, 67, 67, 67,
67, 67, 67, 67, 67, 67, 67, 70, nil, 70,
70, 70, 70, 70, 70, 70, 70, 70, 70, 70,
70, 71, nil, 71, 71, 71, 71, 71, 71, 71,
71, 71, 71, 71, 71, 83, nil, 83, 83, 83,
83, 83, 83, 83, 83, 83, 83, 83, 83, 54,
nil, 54, 54, 54, 54, 54, 54, 54, 54, 54,
54, 54, 55, nil, 55, 55, 55, 55, 55, 55,
55, 55, 55, 55, 56, nil, 56, 56, 56, 56,
56, 56, 56, 56, 57, nil, 57, 57, 57, 57,
57, 57, 57, 57, 61, nil, 61, 61, 61, 61 ]
racc_action_pointer = [
-2, 28, 32, 320, nil, nil, nil, nil, nil, nil,
nil, nil, nil, nil, nil, 52, nil, nil, nil, nil,
nil, nil, nil, 73, 17, 46, 56, 78, 71, 89,
118, 104, 115, 130, 141, 156, 167, 182, 193, 208,
219, 234, 245, 286, nil, 41, 260, 271, 10, 118,
306, nil, 334, 123, 404, 417, 429, 439, 7, 15,
59, 449, 21, 52, 140, 141, nil, 348, nil, 73,
362, 376, 9, nil, 146, nil, nil, nil, nil, 286,
61, 98, nil, 390, nil, 167, 170, nil, nil ]
racc_action_default = [
-1, -56, -2, -3, -6, -7, -8, -9, -10, -11,
-12, -13, -14, -15, -16, -56, -18, -19, -20, -21,
-22, -23, -24, -46, -44, -56, -56, -56, -56, -5,
-56, -56, -56, -56, -56, -56, -56, -56, -56, -56,
-56, -56, -56, -56, -25, -56, -56, -56, -56, -56,
-56, 89, -4, -27, -32, -33, -34, -35, -36, -37,
-38, -39, -40, -41, -42, -43, -17, -30, -28, -56,
-47, -45, -56, -49, -51, -54, -55, -26, -29, -56,
-56, -56, -52, -31, -48, -56, -56, -50, -53 ]
racc_goto_table = [
29, 2, 43, 73, 75, 76, 44, 1, 69, 81,
nil, nil, nil, nil, 50, nil, 52, nil, 54, 55,
56, 57, 58, 59, 60, 61, 62, 63, 64, 65,
nil, nil, 67, 70, 71, nil, 77, nil, nil, nil,
87, nil, nil, nil, nil, nil, nil, nil, nil, nil,
nil, nil, nil, nil, nil, nil, nil, nil, nil, nil,
nil, nil, nil, nil, nil, nil, 83, nil, nil, nil,
nil, nil, nil, 80, nil, nil, nil, nil, 29 ]
racc_goto_check = [
4, 2, 3, 17, 17, 17, 15, 1, 16, 18,
nil, nil, nil, nil, 3, nil, 3, nil, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3, 3, 3,
nil, nil, 3, 3, 3, nil, 15, nil, nil, nil,
17, nil, nil, nil, nil, nil, nil, nil, nil, nil,
nil, nil, nil, nil, nil, nil, nil, nil, nil, nil,
nil, nil, nil, nil, nil, nil, 3, nil, nil, nil,
nil, nil, nil, 2, nil, nil, nil, nil, 4 ]
racc_goto_pointer = [
nil, 7, 1, -13, -2, nil, nil, nil, nil, nil,
nil, nil, nil, nil, nil, -17, -37, -45, -65 ]
racc_goto_default = [
nil, nil, nil, 3, 4, 5, 6, 7, 8, 9,
10, 11, 12, 13, 14, nil, nil, nil, nil ]
racc_reduce_table = [
0, 0, :racc_error,
0, 35, :_reduce_1,
1, 35, :_reduce_2,
1, 36, :_reduce_3,
3, 36, :_reduce_4,
2, 36, :_reduce_5,
1, 36, :_reduce_6,
1, 37, :_reduce_none,
1, 37, :_reduce_none,
1, 37, :_reduce_none,
1, 37, :_reduce_none,
1, 37, :_reduce_none,
1, 37, :_reduce_none,
1, 37, :_reduce_none,
1, 37, :_reduce_none,
1, 37, :_reduce_none,
1, 37, :_reduce_none,
3, 37, :_reduce_17,
1, 38, :_reduce_none,
1, 38, :_reduce_none,
1, 39, :_reduce_20,
1, 39, :_reduce_21,
1, 39, :_reduce_22,
1, 39, :_reduce_23,
1, 39, :_reduce_24,
2, 40, :_reduce_25,
4, 40, :_reduce_26,
3, 40, :_reduce_27,
2, 49, :_reduce_28,
3, 49, :_reduce_29,
1, 50, :_reduce_30,
3, 50, :_reduce_31,
3, 41, :_reduce_32,
3, 41, :_reduce_33,
3, 41, :_reduce_34,
3, 41, :_reduce_35,
3, 41, :_reduce_36,
3, 41, :_reduce_37,
3, 41, :_reduce_38,
3, 41, :_reduce_39,
3, 41, :_reduce_40,
3, 41, :_reduce_41,
3, 41, :_reduce_42,
3, 41, :_reduce_43,
1, 42, :_reduce_44,
3, 43, :_reduce_45,
1, 44, :_reduce_46,
3, 45, :_reduce_47,
3, 51, :_reduce_48,
3, 46, :_reduce_49,
6, 46, :_reduce_50,
0, 52, :_reduce_51,
1, 52, :_reduce_52,
3, 52, :_reduce_53,
3, 47, :_reduce_54,
3, 48, :_reduce_55 ]
racc_reduce_n = 56
racc_shift_n = 89
racc_token_table = {
false => 0,
:error => 1,
:IF => 2,
:DEF => 3,
:CLASS => 4,
:NEWLINE => 5,
:NUMBER => 6,
:STRING => 7,
:TRUE => 8,
:FALSE => 9,
:NIL => 10,
:IDENTIFIER => 11,
:CONSTANT => 12,
:INDENT => 13,
:DEDENT => 14,
"." => 15,
"!" => 16,
"*" => 17,
"/" => 18,
"+" => 19,
"-" => 20,
">" => 21,
">=" => 22,
"<" => 23,
"<=" => 24,
"==" => 25,
"!=" => 26,
"&&" => 27,
"||" => 28,
"=" => 29,
"," => 30,
"(" => 31,
")" => 32,
";" => 33 }
racc_nt_base = 34
racc_use_result_var = true
Racc_arg = [
racc_action_table,
racc_action_check,
racc_action_default,
racc_action_pointer,
racc_goto_table,
racc_goto_check,
racc_goto_default,
racc_goto_pointer,
racc_nt_base,
racc_reduce_table,
racc_token_table,
racc_shift_n,
racc_reduce_n,
racc_use_result_var ]
Racc_token_to_s_table = [
"$end",
"error",
"IF",
"DEF",
"CLASS",
"NEWLINE",
"NUMBER",
"STRING",
"TRUE",
"FALSE",
"NIL",
"IDENTIFIER",
"CONSTANT",
"INDENT",
"DEDENT",
"\".\"",
"\"!\"",
"\"*\"",
"\"/\"",
"\"+\"",
"\"-\"",
"\">\"",
"\">=\"",
"\"<\"",
"\"<=\"",
"\"==\"",
"\"!=\"",
"\"&&\"",
"\"||\"",
"\"=\"",
"\",\"",
"\"(\"",
"\")\"",
"\";\"",
"$start",
"Program",
"Expressions",
"Expression",
"Terminator",
"Literal",
"Call",
"Operator",
"GetConstant",
"SetConstant",
"GetLocal",
"SetLocal",
"Def",
"Class",
"If",
"Arguments",
"ArgList",
"Block",
"ParamList" ]
Racc_debug_parser = false
##### State transition tables end #####
# reduce 0 omitted
module_eval(<<'.,.,', 'grammar.y', 28)
def _reduce_1(val, _values, result)
result = Nodes.new([])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 29)
def _reduce_2(val, _values, result)
result = val[0]
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 33)
def _reduce_3(val, _values, result)
result = Nodes.new(val)
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 34)
def _reduce_4(val, _values, result)
result = val[0] << val[2]
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 35)
def _reduce_5(val, _values, result)
result = val[0]
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 36)
def _reduce_6(val, _values, result)
result = Nodes.new([])
result
end
.,.,
# reduce 7 omitted
# reduce 8 omitted
# reduce 9 omitted
# reduce 10 omitted
# reduce 11 omitted
# reduce 12 omitted
# reduce 13 omitted
# reduce 14 omitted
# reduce 15 omitted
# reduce 16 omitted
module_eval(<<'.,.,', 'grammar.y', 50)
def _reduce_17(val, _values, result)
result = val[1]
result
end
.,.,
# reduce 18 omitted
# reduce 19 omitted
module_eval(<<'.,.,', 'grammar.y', 59)
def _reduce_20(val, _values, result)
result = NumberNode.new(val[0])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 60)
def _reduce_21(val, _values, result)
result = StringNode.new(val[0])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 61)
def _reduce_22(val, _values, result)
result = TrueNode.new
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 62)
def _reduce_23(val, _values, result)
result = FalseNode.new
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 63)
def _reduce_24(val, _values, result)
result = NilNode.new
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 67)
def _reduce_25(val, _values, result)
result = CallNode.new(nil, val[0], val[1])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 69)
def _reduce_26(val, _values, result)
result = CallNode.new(val[0], val[2], val[3])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 70)
def _reduce_27(val, _values, result)
result = CallNode.new(val[0], val[2], [])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 74)
def _reduce_28(val, _values, result)
result = []
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 75)
def _reduce_29(val, _values, result)
result = val[1]
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 79)
def _reduce_30(val, _values, result)
result = val
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 80)
def _reduce_31(val, _values, result)
result = val[0] << val[2]
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 84)
def _reduce_32(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 85)
def _reduce_33(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 86)
def _reduce_34(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 87)
def _reduce_35(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 88)
def _reduce_36(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 89)
def _reduce_37(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 90)
def _reduce_38(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 91)
def _reduce_39(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 92)
def _reduce_40(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 93)
def _reduce_41(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 94)
def _reduce_42(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 95)
def _reduce_43(val, _values, result)
result = CallNode.new(val[0], val[1], [val[2]])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 99)
def _reduce_44(val, _values, result)
result = GetConstantNode.new(val[0])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 103)
def _reduce_45(val, _values, result)
result = SetConstantNode.new(val[0], val[2])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 107)
def _reduce_46(val, _values, result)
result = GetLocalNode.new(val[0])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 111)
def _reduce_47(val, _values, result)
result = SetLocalNode.new(val[0], val[2])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 115)
def _reduce_48(val, _values, result)
result = val[1]
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 119)
def _reduce_49(val, _values, result)
result = DefNode.new(val[1], [], val[2])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 121)
def _reduce_50(val, _values, result)
result = DefNode.new(val[1], val[3], val[5])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 125)
def _reduce_51(val, _values, result)
result = []
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 126)
def _reduce_52(val, _values, result)
result = val
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 127)
def _reduce_53(val, _values, result)
result = val[0] << val[2]
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 131)
def _reduce_54(val, _values, result)
result = ClassNode.new(val[1], val[2])
result
end
.,.,
module_eval(<<'.,.,', 'grammar.y', 135)
def _reduce_55(val, _values, result)
result = IfNode.new(val[1], val[2])
result
end
.,.,
def _reduce_none(val, _values, result)
val[0]
end
end # class Parser
| 28.369883 | 74 | 0.44035 |
ffa0a311b058195a7e7fbe446186313b81c9e870 | 78 | Rails.application.routes.draw do
mount PdfMerge::Engine => "/pdf_merge"
end
| 19.5 | 40 | 0.75641 |
e84d971871ffd12a30f2a903fc4a8783ccca5a5e | 877 | require 'puppet/indirector/status'
require 'puppet/indirector/rest'
class Puppet::Indirector::Status::Rest < Puppet::Indirector::REST
desc "Get puppet master's status via REST. Useful because it tests the health
of both the web server and the indirector."
def find(request)
return super unless use_http_client?
session = Puppet.lookup(:http_session)
api = session.route_to(:puppet)
_, status = api.get_status(request.key)
status
rescue Puppet::HTTP::ResponseError => e
if e.response.code == 404
return nil unless request.options[:fail_on_404]
_, body = parse_response(e.response.nethttp)
msg = _("Find %{uri} resulted in 404 with the message: %{body}") % { uri: elide(e.response.url.path, 100), body: body }
raise Puppet::Error, msg
else
raise convert_to_http_error(e.response.nethttp)
end
end
end
| 31.321429 | 125 | 0.700114 |
62141941bc7fa668aa1c31a16052216ad4b6fd18 | 635 | require 'net/http'
require 'uri'
module Rss
extend ActiveSupport::Concern
class_methods do
def getRssContent(url)
uri = URI.parse(url)
response = Net::HTTP.get(uri)
hash = Hash.from_xml(response)
rssList = Array.new
hash["rss"]["channel"]["item"].each { |item|
eachItem = {
"title" => item["title"],
"link" => item["link"],
"pubDate" => DateTime.parse(item["pubDate"]).strftime("%Y-%m-%d"),
"description" => item["description"],
}
rssList.push(eachItem)
}
rssList
end
end
end
| 22.678571 | 79 | 0.519685 |
1c83a9a40e75cfa77263bcc59154d10348660219 | 1,547 | # :nodoc:
#
# Copyright (C) 2014-2020 Authlete, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Authlete
module Model
module Request
class UserInfoRequest < Authlete::Model::Request::Base
attr_accessor :token
attr_accessor :clientCertificate
alias_method :client_certificate, :clientCertificate
alias_method :client_certificate=, :clientCertificate=
attr_accessor :dpop
attr_accessor :htu
attr_accessor :htm
private
def defaults
{
token: nil,
clientCertificate: nil,
dpop: nil,
htu: nil,
htm: nil
}
end
def set_params(hash)
@token = hash[:token]
@clientCertificate = hash[:clientCertificate]
@dpop = hash[:dpop]
@htu = hash[:htu]
@htm = hash[:htm]
end
end
end
end
end
| 26.672414 | 74 | 0.586296 |
f850a2330f35f48aa72d9f549448c062c7c39fb8 | 1,432 | class Opal::RSpec::UpstreamTests::Runner
def initialize(gem_name = ::RSpec.current_example.metadata[:gem_name])
@gem_name = gem_name
@config = Opal::RSpec::UpstreamTests::Config.new(gem_name)
end
def run
@config.stubs.each { |f| ::Opal::Config.stubbed_files << f }
output, exit_status = StdoutCapturingRunner.run { opal_rspec_runner.run }
Opal::RSpec::UpstreamTests::Result.new(
exit_status,
output,
JSON.parse(File.read("/tmp/#{@gem_name}-results.json"), symbolize_names: true),
)
end
module StdoutCapturingRunner
def self.run
output_io = StringIO.new
previous_stdout = $stdout
previous_stderr = $stderr
$stdout = output_io
$stderr = output_io
begin
exit_status = yield
ensure
$stdout = previous_stdout
$stderr = previous_stderr
end
output_io.rewind
[output_io.read, exit_status]
end
end
private
def opal_rspec_runner
::Opal::RSpec::Runner.new do |server, task|
# A lot of specs, can take longer on slower machines
# task.timeout = 80000
task.files = @config.files_to_run
task.default_path = "#{@gem_name}/upstream/spec"
@config.load_paths.each do |path|
server.append_path(path)
end
server.debug = ENV['OPAL_DEBUG']
task.requires.unshift(File.join(@config.submodule_root, 'spec/requires.rb'))
end
end
end
| 25.571429 | 85 | 0.653631 |
f75f705ee9adfa77d322807aa75efb8a3fb75838 | 47 | module CharacterTitles
VERSION = '0.1.1'
end
| 11.75 | 22 | 0.723404 |
5df67b088a614d6750a8a2d93c0225ab9f68e2da | 1,188 | Pod::Spec.new do |spec|
spec.name = "AUKit"
spec.version = "3.0.20"
spec.summary = "Internal framework of AppUnite."
spec.homepage = "http://appunite.com/"
spec.license = 'Apache 2.0'
spec.author = { "emil.wojtaszek" => "[email protected]" }
spec.source = { :git => 'https://review.appunite.com/iosaukit.git', :commit => 'd28a21e' }
spec.requires_arc = true
spec.ios.deployment_target = '5.0'
#spec.source = { :git => "http://EXAMPLE/AUKit.git", :tag => "0.0.1" }
spec.frameworks = ['Foundation', 'UIKit', 'CoreData', 'QuartzCore', 'Twitter', 'Accounts', 'AudioToolbox', 'Security', 'CoreLocation']
spec.subspec 'Core' do |ss|
ss.platform = :ios
ss.source_files = 'Classes/**/*.{h,m}', 'Classes'
ss.dependency 'AFNetworking', '~> 1.3'
ss.dependency 'SSKeychain', '~> 1.0.2'
ss.dependency 'UIDeviceAddition', '~> 1.0'
ss.dependency 'Facebook-iOS-SDK', '~> 3.6.0'
ss.dependency 'NSData+Base64'
ss.dependency 'ODRefreshControl-appunite'
ss.dependency 'AFOAuth1Client-appunite'
ss.dependency 'SBJson', '~> 3.2'
#ss.resources = "Resources/AUKit.bundle/"
end
end
| 38.322581 | 138 | 0.611111 |
e2d15c1f53e2125ac5279afea8af08fc5022152c | 21,772 | module ActiveMerchant #:nodoc:
class InvalidCountryCodeError < StandardError
end
class CountryCodeFormatError < StandardError
end
class CountryCode
attr_reader :value, :format
def initialize(value)
@value = value.to_s.upcase
detect_format
end
def to_s
value
end
private
def detect_format
case @value
when /^[[:alpha:]]{2}$/
@format = :alpha2
when /^[[:alpha:]]{3}$/
@format = :alpha3
when /^[[:digit:]]{3}$/
@format = :numeric
else
raise CountryCodeFormatError, "The country code is not formatted correctly #{@value}"
end
end
end
class Country
include RequiresParameters
attr_reader :name
def initialize(options = {})
requires!(options, :name, :alpha2, :alpha3, :numeric)
@name = options.delete(:name)
@codes = options.collect{|k,v| CountryCode.new(v)}
end
def code(format)
@codes.select{|c| c.format == format}
end
def to_s
@name
end
COUNTRIES = [
{ :alpha2 => 'AF', :name => 'Afghanistan', :alpha3 => 'AFG', :numeric => '004' },
{ :alpha2 => 'AL', :name => 'Albania', :alpha3 => 'ALB', :numeric => '008' },
{ :alpha2 => 'DZ', :name => 'Algeria', :alpha3 => 'DZA', :numeric => '012' },
{ :alpha2 => 'AS', :name => 'American Samoa', :alpha3 => 'ASM', :numeric => '016' },
{ :alpha2 => 'AD', :name => 'Andorra', :alpha3 => 'AND', :numeric => '020' },
{ :alpha2 => 'AO', :name => 'Angola', :alpha3 => 'AGO', :numeric => '024' },
{ :alpha2 => 'AI', :name => 'Anguilla', :alpha3 => 'AIA', :numeric => '660' },
{ :alpha2 => 'AG', :name => 'Antigua and Barbuda', :alpha3 => 'ATG', :numeric => '028' },
{ :alpha2 => 'AR', :name => 'Argentina', :alpha3 => 'ARG', :numeric => '032' },
{ :alpha2 => 'AM', :name => 'Armenia', :alpha3 => 'ARM', :numeric => '051' },
{ :alpha2 => 'AW', :name => 'Aruba', :alpha3 => 'ABW', :numeric => '533' },
{ :alpha2 => 'AU', :name => 'Australia', :alpha3 => 'AUS', :numeric => '036' },
{ :alpha2 => 'AT', :name => 'Austria', :alpha3 => 'AUT', :numeric => '040' },
{ :alpha2 => 'AX', :name => 'ร
land Islands', :alpha3 => 'ALA', :numeric => '248' },
{ :alpha2 => 'AZ', :name => 'Azerbaijan', :alpha3 => 'AZE', :numeric => '031' },
{ :alpha2 => 'BS', :name => 'Bahamas', :alpha3 => 'BHS', :numeric => '044' },
{ :alpha2 => 'BH', :name => 'Bahrain', :alpha3 => 'BHR', :numeric => '048' },
{ :alpha2 => 'BD', :name => 'Bangladesh', :alpha3 => 'BGD', :numeric => '050' },
{ :alpha2 => 'BB', :name => 'Barbados', :alpha3 => 'BRB', :numeric => '052' },
{ :alpha2 => 'BY', :name => 'Belarus', :alpha3 => 'BLR', :numeric => '112' },
{ :alpha2 => 'BE', :name => 'Belgium', :alpha3 => 'BEL', :numeric => '056' },
{ :alpha2 => 'BZ', :name => 'Belize', :alpha3 => 'BLZ', :numeric => '084' },
{ :alpha2 => 'BJ', :name => 'Benin', :alpha3 => 'BEN', :numeric => '204' },
{ :alpha2 => 'BM', :name => 'Bermuda', :alpha3 => 'BMU', :numeric => '060' },
{ :alpha2 => 'BT', :name => 'Bhutan', :alpha3 => 'BTN', :numeric => '064' },
{ :alpha2 => 'BO', :name => 'Bolivia', :alpha3 => 'BOL', :numeric => '068' },
{ :alpha2 => 'BA', :name => 'Bosnia and Herzegovina', :alpha3 => 'BIH', :numeric => '070' },
{ :alpha2 => 'BW', :name => 'Botswana', :alpha3 => 'BWA', :numeric => '072' },
{ :alpha2 => 'BR', :name => 'Brazil', :alpha3 => 'BRA', :numeric => '076' },
{ :alpha2 => 'BN', :name => 'Brunei Darussalam', :alpha3 => 'BRN', :numeric => '096' },
{ :alpha2 => 'BG', :name => 'Bulgaria', :alpha3 => 'BGR', :numeric => '100' },
{ :alpha2 => 'BF', :name => 'Burkina Faso', :alpha3 => 'BFA', :numeric => '854' },
{ :alpha2 => 'BI', :name => 'Burundi', :alpha3 => 'BDI', :numeric => '108' },
{ :alpha2 => 'KH', :name => 'Cambodia', :alpha3 => 'KHM', :numeric => '116' },
{ :alpha2 => 'CM', :name => 'Cameroon', :alpha3 => 'CMR', :numeric => '120' },
{ :alpha2 => 'CA', :name => 'Canada', :alpha3 => 'CAN', :numeric => '124' },
{ :alpha2 => 'CV', :name => 'Cape Verde', :alpha3 => 'CPV', :numeric => '132' },
{ :alpha2 => 'KY', :name => 'Cayman Islands', :alpha3 => 'CYM', :numeric => '136' },
{ :alpha2 => 'CF', :name => 'Central African Republic', :alpha3 => 'CAF', :numeric => '140' },
{ :alpha2 => 'TD', :name => 'Chad', :alpha3 => 'TCD', :numeric => '148' },
{ :alpha2 => 'CL', :name => 'Chile', :alpha3 => 'CHL', :numeric => '152' },
{ :alpha2 => 'CN', :name => 'China', :alpha3 => 'CHN', :numeric => '156' },
{ :alpha2 => 'CO', :name => 'Colombia', :alpha3 => 'COL', :numeric => '170' },
{ :alpha2 => 'KM', :name => 'Comoros', :alpha3 => 'COM', :numeric => '174' },
{ :alpha2 => 'CG', :name => 'Congo', :alpha3 => 'COG', :numeric => '178' },
{ :alpha2 => 'CD', :name => 'Congo, the Democratic Republic of the', :alpha3 => 'COD', :numeric => '180' },
{ :alpha2 => 'CK', :name => 'Cook Islands', :alpha3 => 'COK', :numeric => '184' },
{ :alpha2 => 'CR', :name => 'Costa Rica', :alpha3 => 'CRI', :numeric => '188' },
{ :alpha2 => 'CI', :name => 'Cote D\'Ivoire', :alpha3 => 'CIV', :numeric => '384' },
{ :alpha2 => 'HR', :name => 'Croatia', :alpha3 => 'HRV', :numeric => '191' },
{ :alpha2 => 'CU', :name => 'Cuba', :alpha3 => 'CUB', :numeric => '192' },
{ :alpha2 => 'CY', :name => 'Cyprus', :alpha3 => 'CYP', :numeric => '196' },
{ :alpha2 => 'CZ', :name => 'Czech Republic', :alpha3 => 'CZE', :numeric => '203' },
{ :alpha2 => 'DK', :name => 'Denmark', :alpha3 => 'DNK', :numeric => '208' },
{ :alpha2 => 'DJ', :name => 'Djibouti', :alpha3 => 'DJI', :numeric => '262' },
{ :alpha2 => 'DM', :name => 'Dominica', :alpha3 => 'DMA', :numeric => '212' },
{ :alpha2 => 'DO', :name => 'Dominican Republic', :alpha3 => 'DOM', :numeric => '214' },
{ :alpha2 => 'EC', :name => 'Ecuador', :alpha3 => 'ECU', :numeric => '218' },
{ :alpha2 => 'EG', :name => 'Egypt', :alpha3 => 'EGY', :numeric => '818' },
{ :alpha2 => 'SV', :name => 'El Salvador', :alpha3 => 'SLV', :numeric => '222' },
{ :alpha2 => 'GQ', :name => 'Equatorial Guinea', :alpha3 => 'GNQ', :numeric => '226' },
{ :alpha2 => 'ER', :name => 'Eritrea', :alpha3 => 'ERI', :numeric => '232' },
{ :alpha2 => 'EE', :name => 'Estonia', :alpha3 => 'EST', :numeric => '233' },
{ :alpha2 => 'ET', :name => 'Ethiopia', :alpha3 => 'ETH', :numeric => '231' },
{ :alpha2 => 'FK', :name => 'Falkland Islands (Malvinas)', :alpha3 => 'FLK', :numeric => '238' },
{ :alpha2 => 'FO', :name => 'Faroe Islands', :alpha3 => 'FRO', :numeric => '234' },
{ :alpha2 => 'FJ', :name => 'Fiji', :alpha3 => 'FJI', :numeric => '242' },
{ :alpha2 => 'FI', :name => 'Finland', :alpha3 => 'FIN', :numeric => '246' },
{ :alpha2 => 'FR', :name => 'France', :alpha3 => 'FRA', :numeric => '250' },
{ :alpha2 => 'GF', :name => 'French Guiana', :alpha3 => 'GUF', :numeric => '254' },
{ :alpha2 => 'PF', :name => 'French Polynesia', :alpha3 => 'PYF', :numeric => '258' },
{ :alpha2 => 'GA', :name => 'Gabon', :alpha3 => 'GAB', :numeric => '266' },
{ :alpha2 => 'GM', :name => 'Gambia', :alpha3 => 'GMB', :numeric => '270' },
{ :alpha2 => 'GE', :name => 'Georgia', :alpha3 => 'GEO', :numeric => '268' },
{ :alpha2 => 'DE', :name => 'Germany', :alpha3 => 'DEU', :numeric => '276' },
{ :alpha2 => 'GH', :name => 'Ghana', :alpha3 => 'GHA', :numeric => '288' },
{ :alpha2 => 'GI', :name => 'Gibraltar', :alpha3 => 'GIB', :numeric => '292' },
{ :alpha2 => 'GR', :name => 'Greece', :alpha3 => 'GRC', :numeric => '300' },
{ :alpha2 => 'GL', :name => 'Greenland', :alpha3 => 'GRL', :numeric => '304' },
{ :alpha2 => 'GD', :name => 'Grenada', :alpha3 => 'GRD', :numeric => '308' },
{ :alpha2 => 'GP', :name => 'Guadeloupe', :alpha3 => 'GLP', :numeric => '312' },
{ :alpha2 => 'GU', :name => 'Guam', :alpha3 => 'GUM', :numeric => '316' },
{ :alpha2 => 'GT', :name => 'Guatemala', :alpha3 => 'GTM', :numeric => '320' },
{ :alpha2 => 'GN', :name => 'Guinea', :alpha3 => 'GIN', :numeric => '324' },
{ :alpha2 => 'GW', :name => 'Guinea-Bissau', :alpha3 => 'GNB', :numeric => '624' },
{ :alpha2 => 'GY', :name => 'Guyana', :alpha3 => 'GUY', :numeric => '328' },
{ :alpha2 => 'GG', :name => 'Guernsey', :alpha3 => 'GGY', :numeric => '831' },
{ :alpha2 => 'HT', :name => 'Haiti', :alpha3 => 'HTI', :numeric => '332' },
{ :alpha2 => 'VA', :name => 'Holy See (Vatican City State)', :alpha3 => 'VAT', :numeric => '336' },
{ :alpha2 => 'HN', :name => 'Honduras', :alpha3 => 'HND', :numeric => '340' },
{ :alpha2 => 'HK', :name => 'Hong Kong', :alpha3 => 'HKG', :numeric => '344' },
{ :alpha2 => 'HU', :name => 'Hungary', :alpha3 => 'HUN', :numeric => '348' },
{ :alpha2 => 'IS', :name => 'Iceland', :alpha3 => 'ISL', :numeric => '352' },
{ :alpha2 => 'IN', :name => 'India', :alpha3 => 'IND', :numeric => '356' },
{ :alpha2 => 'ID', :name => 'Indonesia', :alpha3 => 'IDN', :numeric => '360' },
{ :alpha2 => 'IR', :name => 'Iran, Islamic Republic of', :alpha3 => 'IRN', :numeric => '364' },
{ :alpha2 => 'IQ', :name => 'Iraq', :alpha3 => 'IRQ', :numeric => '368' },
{ :alpha2 => 'IE', :name => 'Ireland', :alpha3 => 'IRL', :numeric => '372' },
{ :alpha2 => 'IL', :name => 'Israel', :alpha3 => 'ISR', :numeric => '376' },
{ :alpha2 => 'IT', :name => 'Italy', :alpha3 => 'ITA', :numeric => '380' },
{ :alpha2 => 'JM', :name => 'Jamaica', :alpha3 => 'JAM', :numeric => '388' },
{ :alpha2 => 'JP', :name => 'Japan', :alpha3 => 'JPN', :numeric => '392' },
{ :alpha2 => 'JO', :name => 'Jordan', :alpha3 => 'JOR', :numeric => '400' },
{ :alpha2 => 'KZ', :name => 'Kazakhstan', :alpha3 => 'KAZ', :numeric => '398' },
{ :alpha2 => 'KE', :name => 'Kenya', :alpha3 => 'KEN', :numeric => '404' },
{ :alpha2 => 'KI', :name => 'Kiribati', :alpha3 => 'KIR', :numeric => '296' },
{ :alpha2 => 'KP', :name => 'Korea, Democratic People\'s Republic of', :alpha3 => 'PRK', :numeric => '408' },
{ :alpha2 => 'KR', :name => 'Korea, Republic of', :alpha3 => 'KOR', :numeric => '410' },
{ :alpha2 => 'KW', :name => 'Kuwait', :alpha3 => 'KWT', :numeric => '414' },
{ :alpha2 => 'KG', :name => 'Kyrgyzstan', :alpha3 => 'KGZ', :numeric => '417' },
{ :alpha2 => 'LA', :name => 'Lao People\'s Democratic Republic', :alpha3 => 'LAO', :numeric => '418' },
{ :alpha2 => 'LV', :name => 'Latvia', :alpha3 => 'LVA', :numeric => '428' },
{ :alpha2 => 'LB', :name => 'Lebanon', :alpha3 => 'LBN', :numeric => '422' },
{ :alpha2 => 'LS', :name => 'Lesotho', :alpha3 => 'LSO', :numeric => '426' },
{ :alpha2 => 'LR', :name => 'Liberia', :alpha3 => 'LBR', :numeric => '430' },
{ :alpha2 => 'LY', :name => 'Libyan Arab Jamahiriya', :alpha3 => 'LBY', :numeric => '434' },
{ :alpha2 => 'LI', :name => 'Liechtenstein', :alpha3 => 'LIE', :numeric => '438' },
{ :alpha2 => 'LT', :name => 'Lithuania', :alpha3 => 'LTU', :numeric => '440' },
{ :alpha2 => 'LU', :name => 'Luxembourg', :alpha3 => 'LUX', :numeric => '442' },
{ :alpha2 => 'MO', :name => 'Macao', :alpha3 => 'MAC', :numeric => '446' },
{ :alpha2 => 'MK', :name => 'Macedonia, the Former Yugoslav Republic of', :alpha3 => 'MKD', :numeric => '807' },
{ :alpha2 => 'MG', :name => 'Madagascar', :alpha3 => 'MDG', :numeric => '450' },
{ :alpha2 => 'MW', :name => 'Malawi', :alpha3 => 'MWI', :numeric => '454' },
{ :alpha2 => 'MY', :name => 'Malaysia', :alpha3 => 'MYS', :numeric => '458' },
{ :alpha2 => 'MV', :name => 'Maldives', :alpha3 => 'MDV', :numeric => '462' },
{ :alpha2 => 'ML', :name => 'Mali', :alpha3 => 'MLI', :numeric => '466' },
{ :alpha2 => 'MT', :name => 'Malta', :alpha3 => 'MLT', :numeric => '470' },
{ :alpha2 => 'MH', :name => 'Marshall Islands', :alpha3 => 'MHL', :numeric => '584' },
{ :alpha2 => 'MQ', :name => 'Martinique', :alpha3 => 'MTQ', :numeric => '474' },
{ :alpha2 => 'MR', :name => 'Mauritania', :alpha3 => 'MRT', :numeric => '478' },
{ :alpha2 => 'MU', :name => 'Mauritius', :alpha3 => 'MUS', :numeric => '480' },
{ :alpha2 => 'MX', :name => 'Mexico', :alpha3 => 'MEX', :numeric => '484' },
{ :alpha2 => 'FM', :name => 'Micronesia, Federated States of', :alpha3 => 'FSM', :numeric => '583' },
{ :alpha2 => 'MD', :name => 'Moldova, Republic of', :alpha3 => 'MDA', :numeric => '498' },
{ :alpha2 => 'MC', :name => 'Monaco', :alpha3 => 'MCO', :numeric => '492' },
{ :alpha2 => 'MN', :name => 'Mongolia', :alpha3 => 'MNG', :numeric => '496' },
{ :alpha2 => 'MS', :name => 'Montserrat', :alpha3 => 'MSR', :numeric => '500' },
{ :alpha2 => 'MA', :name => 'Morocco', :alpha3 => 'MAR', :numeric => '504' },
{ :alpha2 => 'MZ', :name => 'Mozambique', :alpha3 => 'MOZ', :numeric => '508' },
{ :alpha2 => 'MM', :name => 'Myanmar', :alpha3 => 'MMR', :numeric => '104' },
{ :alpha2 => 'NA', :name => 'Namibia', :alpha3 => 'NAM', :numeric => '516' },
{ :alpha2 => 'NR', :name => 'Nauru', :alpha3 => 'NRU', :numeric => '520' },
{ :alpha2 => 'NP', :name => 'Nepal', :alpha3 => 'NPL', :numeric => '524' },
{ :alpha2 => 'NL', :name => 'Netherlands', :alpha3 => 'NLD', :numeric => '528' },
{ :alpha2 => 'AN', :name => 'Netherlands Antilles', :alpha3 => 'ANT', :numeric => '530' },
{ :alpha2 => 'NC', :name => 'New Caledonia', :alpha3 => 'NCL', :numeric => '540' },
{ :alpha2 => 'NZ', :name => 'New Zealand', :alpha3 => 'NZL', :numeric => '554' },
{ :alpha2 => 'NI', :name => 'Nicaragua', :alpha3 => 'NIC', :numeric => '558' },
{ :alpha2 => 'NE', :name => 'Niger', :alpha3 => 'NER', :numeric => '562' },
{ :alpha2 => 'NG', :name => 'Nigeria', :alpha3 => 'NGA', :numeric => '566' },
{ :alpha2 => 'NU', :name => 'Niue', :alpha3 => 'NIU', :numeric => '570' },
{ :alpha2 => 'NF', :name => 'Norfolk Island', :alpha3 => 'NFK', :numeric => '574' },
{ :alpha2 => 'MP', :name => 'Northern Mariana Islands', :alpha3 => 'MNP', :numeric => '580' },
{ :alpha2 => 'NO', :name => 'Norway', :alpha3 => 'NOR', :numeric => '578' },
{ :alpha2 => 'OM', :name => 'Oman', :alpha3 => 'OMN', :numeric => '512' },
{ :alpha2 => 'PK', :name => 'Pakistan', :alpha3 => 'PAK', :numeric => '586' },
{ :alpha2 => 'PW', :name => 'Palau', :alpha3 => 'PLW', :numeric => '585' },
{ :alpha2 => 'PA', :name => 'Panama', :alpha3 => 'PAN', :numeric => '591' },
{ :alpha2 => 'PG', :name => 'Papua New Guinea', :alpha3 => 'PNG', :numeric => '598' },
{ :alpha2 => 'PY', :name => 'Paraguay', :alpha3 => 'PRY', :numeric => '600' },
{ :alpha2 => 'PE', :name => 'Peru', :alpha3 => 'PER', :numeric => '604' },
{ :alpha2 => 'PH', :name => 'Philippines', :alpha3 => 'PHL', :numeric => '608' },
{ :alpha2 => 'PN', :name => 'Pitcairn', :alpha3 => 'PCN', :numeric => '612' },
{ :alpha2 => 'PL', :name => 'Poland', :alpha3 => 'POL', :numeric => '616' },
{ :alpha2 => 'PT', :name => 'Portugal', :alpha3 => 'PRT', :numeric => '620' },
{ :alpha2 => 'PR', :name => 'Puerto Rico', :alpha3 => 'PRI', :numeric => '630' },
{ :alpha2 => 'QA', :name => 'Qatar', :alpha3 => 'QAT', :numeric => '634' },
{ :alpha2 => 'RE', :name => 'Reunion', :alpha3 => 'REU', :numeric => '638' },
{ :alpha2 => 'RO', :name => 'Romania', :alpha3 => 'ROM', :numeric => '642' },
{ :alpha2 => 'RU', :name => 'Russian Federation', :alpha3 => 'RUS', :numeric => '643' },
{ :alpha2 => 'RW', :name => 'Rwanda', :alpha3 => 'RWA', :numeric => '646' },
{ :alpha2 => 'SH', :name => 'Saint Helena', :alpha3 => 'SHN', :numeric => '654' },
{ :alpha2 => 'KN', :name => 'Saint Kitts and Nevis', :alpha3 => 'KNA', :numeric => '659' },
{ :alpha2 => 'LC', :name => 'Saint Lucia', :alpha3 => 'LCA', :numeric => '662' },
{ :alpha2 => 'PM', :name => 'Saint Pierre and Miquelon', :alpha3 => 'SPM', :numeric => '666' },
{ :alpha2 => 'VC', :name => 'Saint Vincent and the Grenadines', :alpha3 => 'VCT', :numeric => '670' },
{ :alpha2 => 'WS', :name => 'Samoa', :alpha3 => 'WSM', :numeric => '882' },
{ :alpha2 => 'SM', :name => 'San Marino', :alpha3 => 'SMR', :numeric => '674' },
{ :alpha2 => 'ST', :name => 'Sao Tome and Principe', :alpha3 => 'STP', :numeric => '678' },
{ :alpha2 => 'SA', :name => 'Saudi Arabia', :alpha3 => 'SAU', :numeric => '682' },
{ :alpha2 => 'SN', :name => 'Senegal', :alpha3 => 'SEN', :numeric => '686' },
{ :alpha2 => 'SC', :name => 'Seychelles', :alpha3 => 'SYC', :numeric => '690' },
{ :alpha2 => 'SL', :name => 'Sierra Leone', :alpha3 => 'SLE', :numeric => '694' },
{ :alpha2 => 'SG', :name => 'Singapore', :alpha3 => 'SGP', :numeric => '702' },
{ :alpha2 => 'SK', :name => 'Slovakia', :alpha3 => 'SVK', :numeric => '703' },
{ :alpha2 => 'SI', :name => 'Slovenia', :alpha3 => 'SVN', :numeric => '705' },
{ :alpha2 => 'SB', :name => 'Solomon Islands', :alpha3 => 'SLB', :numeric => '090' },
{ :alpha2 => 'SO', :name => 'Somalia', :alpha3 => 'SOM', :numeric => '706' },
{ :alpha2 => 'ZA', :name => 'South Africa', :alpha3 => 'ZAF', :numeric => '710' },
{ :alpha2 => 'ES', :name => 'Spain', :alpha3 => 'ESP', :numeric => '724' },
{ :alpha2 => 'LK', :name => 'Sri Lanka', :alpha3 => 'LKA', :numeric => '144' },
{ :alpha2 => 'SD', :name => 'Sudan', :alpha3 => 'SDN', :numeric => '736' },
{ :alpha2 => 'SR', :name => 'Suriname', :alpha3 => 'SUR', :numeric => '740' },
{ :alpha2 => 'SJ', :name => 'Svalbard and Jan Mayen', :alpha3 => 'SJM', :numeric => '744' },
{ :alpha2 => 'SZ', :name => 'Swaziland', :alpha3 => 'SWZ', :numeric => '748' },
{ :alpha2 => 'SE', :name => 'Sweden', :alpha3 => 'SWE', :numeric => '752' },
{ :alpha2 => 'CH', :name => 'Switzerland', :alpha3 => 'CHE', :numeric => '756' },
{ :alpha2 => 'SY', :name => 'Syrian Arab Republic', :alpha3 => 'SYR', :numeric => '760' },
{ :alpha2 => 'TW', :name => 'Taiwan, Province of China', :alpha3 => 'TWN', :numeric => '158' },
{ :alpha2 => 'TJ', :name => 'Tajikistan', :alpha3 => 'TJK', :numeric => '762' },
{ :alpha2 => 'TZ', :name => 'Tanzania, United Republic of', :alpha3 => 'TZA', :numeric => '834' },
{ :alpha2 => 'TH', :name => 'Thailand', :alpha3 => 'THA', :numeric => '764' },
{ :alpha2 => 'TG', :name => 'Togo', :alpha3 => 'TGO', :numeric => '768' },
{ :alpha2 => 'TK', :name => 'Tokelau', :alpha3 => 'TKL', :numeric => '772' },
{ :alpha2 => 'TO', :name => 'Tonga', :alpha3 => 'TON', :numeric => '776' },
{ :alpha2 => 'TT', :name => 'Trinidad and Tobago', :alpha3 => 'TTO', :numeric => '780' },
{ :alpha2 => 'TN', :name => 'Tunisia', :alpha3 => 'TUN', :numeric => '788' },
{ :alpha2 => 'TR', :name => 'Turkey', :alpha3 => 'TUR', :numeric => '792' },
{ :alpha2 => 'TM', :name => 'Turkmenistan', :alpha3 => 'TKM', :numeric => '795' },
{ :alpha2 => 'TC', :name => 'Turks and Caicos Islands', :alpha3 => 'TCA', :numeric => '796' },
{ :alpha2 => 'TV', :name => 'Tuvalu', :alpha3 => 'TUV', :numeric => '798' },
{ :alpha2 => 'UG', :name => 'Uganda', :alpha3 => 'UGA', :numeric => '800' },
{ :alpha2 => 'UA', :name => 'Ukraine', :alpha3 => 'UKR', :numeric => '804' },
{ :alpha2 => 'AE', :name => 'United Arab Emirates', :alpha3 => 'ARE', :numeric => '784' },
{ :alpha2 => 'GB', :name => 'United Kingdom', :alpha3 => 'GBR', :numeric => '826' },
{ :alpha2 => 'US', :name => 'United States', :alpha3 => 'USA', :numeric => '840' },
{ :alpha2 => 'UY', :name => 'Uruguay', :alpha3 => 'URY', :numeric => '858' },
{ :alpha2 => 'UZ', :name => 'Uzbekistan', :alpha3 => 'UZB', :numeric => '860' },
{ :alpha2 => 'VU', :name => 'Vanuatu', :alpha3 => 'VUT', :numeric => '548' },
{ :alpha2 => 'VE', :name => 'Venezuela', :alpha3 => 'VEN', :numeric => '862' },
{ :alpha2 => 'VN', :name => 'Viet Nam', :alpha3 => 'VNM', :numeric => '704' },
{ :alpha2 => 'VG', :name => 'Virgin Islands, British', :alpha3 => 'VGB', :numeric => '092' },
{ :alpha2 => 'VI', :name => 'Virgin Islands, U.S.', :alpha3 => 'VIR', :numeric => '850' },
{ :alpha2 => 'WF', :name => 'Wallis and Futuna', :alpha3 => 'WLF', :numeric => '876' },
{ :alpha2 => 'EH', :name => 'Western Sahara', :alpha3 => 'ESH', :numeric => '732' },
{ :alpha2 => 'YE', :name => 'Yemen', :alpha3 => 'YEM', :numeric => '887' },
{ :alpha2 => 'ZM', :name => 'Zambia', :alpha3 => 'ZMB', :numeric => '894' },
{ :alpha2 => 'ZW', :name => 'Zimbabwe', :alpha3 => 'ZWE', :numeric => '716' }
]
def self.find(name)
raise InvalidCountryCodeError, "Cannot lookup country for an empty name" if name.blank?
case name.length
when 2, 3
upcase_name = name.upcase
country_code = CountryCode.new(name)
country = COUNTRIES.detect{|c| c[country_code.format] == upcase_name }
else
country = COUNTRIES.detect{|c| c[:name] == name }
end
raise InvalidCountryCodeError, "No country could be found for the country #{name}" if country.nil?
Country.new(country.dup)
end
end
end
| 72.573333 | 119 | 0.472396 |
1a25d55a4e40a34cdf1496fb1bec844f8d2ab103 | 7,638 | # -*- encoding: utf-8 -*-
#
# Author:: Fletcher Nichol (<[email protected]>)
#
# Copyright (C) 2014, Fletcher Nichol
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative "../spec_helper"
require "kitchen"
describe Kitchen::Logger do
before do
@orig_stdout = $stdout
$stdout = StringIO.new
end
after do
$stdout = @orig_stdout
end
def colorize(*args)
Kitchen::Color.colorize(*args)
end
let(:opts) do
{ :color => :red }
end
let(:logger) do
Kitchen::Logger.new(opts)
end
it "sets the log level to :info by default" do
logger.level.must_equal Kitchen::Util.to_logger_level(:info)
logger.debug?.must_equal false
logger.info?.must_equal true
logger.error?.must_equal true
logger.warn?.must_equal true
logger.fatal?.must_equal true
end
it "sets a level at creation" do
opts[:level] = Kitchen::Util.to_logger_level(:warn)
logger.level.must_equal Kitchen::Util.to_logger_level(:warn)
logger.info?.must_equal false
logger.warn?.must_equal true
logger.fatal?.must_equal true
end
it "sets a level after creation" do
logger.level = Kitchen::Util.to_logger_level(:fatal)
logger.level.must_equal Kitchen::Util.to_logger_level(:fatal)
logger.warn?.must_equal false
logger.fatal?.must_equal true
end
it "datetime_format is nil by default" do
logger.datetime_format.must_equal nil
end
it "sets datetime_format after creation" do
logger.datetime_format = "smart?"
logger.datetime_format.must_equal "smart?"
end
it "sets progname to Kitchen by default" do
logger.progname.must_equal "Kitchen"
end
it "sets progname at creation" do
opts[:progname] = "Dream Theater"
logger.progname.must_equal "Dream Theater"
end
it "sets progname after creation" do
logger.progname = "MASTA"
logger.progname.must_equal "MASTA"
end
describe "stdout-based logger" do
let(:stdout) { StringIO.new }
before { opts[:stdout] = stdout }
it "sets up a simple STDOUT logger by default" do
opts.delete(:stdout)
logger.info("hello")
$stdout.string.must_equal colorize(" hello", opts[:color]) + "\n"
end
it "accepts a :stdout option to redirect output" do
logger.info("hello")
stdout.string.must_equal colorize(" hello", opts[:color]) + "\n"
end
describe "for severity" do
before { opts[:level] = Kitchen::Util.to_logger_level(:debug) }
it "logs to banner" do
logger.banner("yo")
stdout.string.must_equal colorize("-----> yo", opts[:color]) + "\n"
end
it "logs to debug" do
logger.debug("yo")
stdout.string.must_equal colorize("D yo", opts[:color]) + "\n"
end
it "logs to info" do
logger.info("yo")
stdout.string.must_equal colorize(" yo", opts[:color]) + "\n"
end
it "logs to error" do
logger.error("yo")
stdout.string.must_equal colorize(">>>>>> yo", opts[:color]) + "\n"
end
it "logs to warn" do
logger.warn("yo")
stdout.string.must_equal colorize("$$$$$$ yo", opts[:color]) + "\n"
end
it "logs to fatal" do
logger.fatal("yo")
stdout.string.must_equal colorize("!!!!!! yo", opts[:color]) + "\n"
end
it "logs to unknown" do
logger.unknown("yo")
stdout.string.must_equal colorize("?????? yo", opts[:color]) + "\n"
end
end
describe "#<<" do
it "message with a newline are logged on info" do
logger << "yo\n"
stdout.string.must_equal colorize(" yo", opts[:color]) + "\n"
end
it "message with multiple newlines are separately logged on info" do
logger << "yo\nheya\n"
stdout.string.must_equal(
colorize(" yo", opts[:color]) + "\n" +
colorize(" heya", opts[:color]) + "\n"
)
end
it "message with info, error, and banner lines will be preserved" do
logger << [
"-----> banner",
" info",
">>>>>> error",
"vanilla"
].join("\n").concat("\n")
stdout.string.must_equal(
colorize("-----> banner", opts[:color]) + "\n" +
colorize(" info", opts[:color]) + "\n" +
colorize(">>>>>> error", opts[:color]) + "\n" +
colorize(" vanilla", opts[:color]) + "\n"
)
end
end
end
describe "opened IO logdev-based logger" do
let(:logdev) { StringIO.new }
before { opts[:logdev] = logdev }
describe "for severity" do
before { opts[:level] = Kitchen::Util.to_logger_level(:debug) }
let(:ts) { "\\[[^\\]]+\\]" }
it "logs to banner" do
logger.banner("yo")
logdev.string.must_match(/^I, #{ts} INFO -- Kitchen: -----> yo$/)
end
it "logs to debug" do
logger.debug("yo")
logdev.string.must_match(/^D, #{ts} DEBUG -- Kitchen: yo$/)
end
it "logs to info" do
logger.info("yo")
logdev.string.must_match(/^I, #{ts} INFO -- Kitchen: yo$/)
end
it "logs to error" do
logger.error("yo")
logdev.string.must_match(/^E, #{ts} ERROR -- Kitchen: yo$/)
end
it "logs to warn" do
logger.warn("yo")
logdev.string.must_match(/^W, #{ts} WARN -- Kitchen: yo$/)
end
it "logs to fatal" do
logger.fatal("yo")
logdev.string.must_match(/^F, #{ts} FATAL -- Kitchen: yo$/)
end
it "logs to unknown" do
logger.unknown("yo")
logdev.string.must_match(/^A, #{ts} ANY -- Kitchen: yo$/)
end
end
end
describe "file IO logdev-based logger" do
let(:logfile) { Dir::Tmpname.make_tmpname(%w[kitchen .log], nil) }
before do
opts[:logdev] = logfile
FakeFS.activate!
FileUtils.mkdir_p("/tmp")
end
after do
FakeFS.deactivate!
FakeFS::FileSystem.clear
end
describe "for severity" do
before { opts[:level] = Kitchen::Util.to_logger_level(:debug) }
let(:ts) { "\\[[^\\]]+\\]" }
it "logs to banner" do
logger.banner("yo")
IO.read(logfile).must_match(/^I, #{ts} INFO -- Kitchen: -----> yo$/)
end
it "logs to debug" do
logger.debug("yo")
IO.read(logfile).must_match(/^D, #{ts} DEBUG -- Kitchen: yo$/)
end
it "logs to info" do
logger.info("yo")
IO.read(logfile).must_match(/^I, #{ts} INFO -- Kitchen: yo$/)
end
it "logs to error" do
logger.error("yo")
IO.read(logfile).must_match(/^E, #{ts} ERROR -- Kitchen: yo$/)
end
it "logs to warn" do
logger.warn("yo")
IO.read(logfile).must_match(/^W, #{ts} WARN -- Kitchen: yo$/)
end
it "logs to fatal" do
logger.fatal("yo")
IO.read(logfile).must_match(/^F, #{ts} FATAL -- Kitchen: yo$/)
end
it "logs to unknown" do
logger.unknown("yo")
IO.read(logfile).must_match(/^A, #{ts} ANY -- Kitchen: yo$/)
end
end
end
end
| 23.86875 | 77 | 0.581566 |
1d4f2ee5345a0d0c8aa0c82773c6823266b9e4b9 | 449 | # frozen_string_literal: true
require 'vk/api/responses'
module Vk
module API
class Notifications < Vk::Schema::Namespace
module Responses
# @see https://github.com/VKCOM/vk-api-schema/blob/master/objects.json
class MarkAsViewedResponse < Vk::Schema::Response
# @return [API::Base::BoolInt] Result
attribute :response, API::Base::BoolInt.optional.default(nil)
end
end
end
end
end
| 26.411765 | 78 | 0.663697 |
62ad39177d2b2d8f8a4af39d171e0219a539f5ab | 4,438 | # Note that x.even are stable releases, x.odd are devel releases
class Node010 < Formula
desc "Platform built on V8 to build network applications"
homepage "https://nodejs.org/"
url "https://nodejs.org/dist/v0.10.41/node-v0.10.41.tar.gz"
sha256 "79f694e2a5c42543b75d0c69f6860499d7593136d0f6b59e7163b9e66fb2c995"
head "https://github.com/nodejs/node.git", :branch => "v0.10-staging"
bottle do
sha256 "3b809332664b36e68e4080c0565ff8840ff7b4201133ebff6edca851a9b8953e" => :el_capitan
sha256 "20a72b8fd7efb91914746ffba064fbf3d2e3f69cb095d02ef7b77b7b05bf9138" => :yosemite
sha256 "567366769ce4b435d1edcdd06de06c64ed9a8c8fb8c78df9f696190cd113e4b3" => :mavericks
end
deprecated_option "enable-debug" => "with-debug"
option "with-debug", "Build with debugger hooks"
option "without-npm", "npm will not be installed"
option "without-completion", "npm bash completion will not be installed"
depends_on :python => :build
depends_on "openssl" => :optional
fails_with :llvm do
build 2326
end
resource "npm" do
url "https://registry.npmjs.org/npm/-/npm-2.14.4.tgz"
sha256 "c8b602de5d51f956aa8f9c34d89be38b2df3b7c25ff6588030eb8224b070db27"
end
conflicts_with "node",
:because => "Differing versions of the same formulae."
def install
args = %W[--prefix=#{prefix} --without-npm]
args << "--debug" if build.with? "debug"
if build.with? "openssl"
args << "--shared-openssl"
else
args << "--without-ssl2" << "--without-ssl3"
end
system "./configure", *args
system "make", "install"
if build.with? "npm"
resource("npm").stage buildpath/"npm_install"
# make sure npm can find node
ENV.prepend_path "PATH", bin
# make sure user prefix settings in $HOME are ignored
ENV["HOME"] = buildpath/".brew_home"
# set log level temporarily for npm's `make install`
ENV["NPM_CONFIG_LOGLEVEL"] = "verbose"
cd buildpath/"npm_install" do
system "./configure", "--prefix=#{libexec}/npm"
system "make", "install"
end
if build.with? "completion"
bash_completion.install \
buildpath/"npm_install/lib/utils/completion.sh" => "npm"
end
end
end
def post_install
return if build.without? "npm"
node_modules = HOMEBREW_PREFIX/"lib/node_modules"
node_modules.mkpath
npm_exec = node_modules/"npm/bin/npm-cli.js"
# Kill npm but preserve all other modules across node updates/upgrades.
rm_rf node_modules/"npm"
cp_r libexec/"npm/lib/node_modules/npm", node_modules
# This symlink doesn't hop into homebrew_prefix/bin automatically so
# remove it and make our own. This is a small consequence of our bottle
# npm make install workaround. All other installs **do** symlink to
# homebrew_prefix/bin correctly. We ln rather than cp this because doing
# so mimics npm's normal install.
ln_sf npm_exec, "#{HOMEBREW_PREFIX}/bin/npm"
# Let's do the manpage dance. It's just a jump to the left.
# And then a step to the right, with your hand on rm_f.
["man1", "man3", "man5", "man7"].each do |man|
# Dirs must exist first: https://github.com/Homebrew/homebrew/issues/35969
mkdir_p HOMEBREW_PREFIX/"share/man/#{man}"
rm_f Dir[HOMEBREW_PREFIX/"share/man/#{man}/{npm.,npm-,npmrc.}*"]
ln_sf Dir[libexec/"npm/lib/node_modules/npm/man/#{man}/npm*"], HOMEBREW_PREFIX/"share/man/#{man}"
end
npm_root = node_modules/"npm"
npmrc = npm_root/"npmrc"
npmrc.atomic_write("prefix = #{HOMEBREW_PREFIX}\n")
end
def caveats
s = ""
if build.without? "npm"
s += <<-EOS.undent
Homebrew has NOT installed npm. If you later install it, you should supplement
your NODE_PATH with the npm module folder:
#{HOMEBREW_PREFIX}/lib/node_modules
EOS
end
s
end
test do
path = testpath/"test.js"
path.write "console.log('hello');"
output = shell_output("#{bin}/node #{path}").strip
assert_equal "hello", output
if build.with? "npm"
# make sure npm can find node
ENV.prepend_path "PATH", opt_bin
assert_equal which("node"), opt_bin/"node"
assert (HOMEBREW_PREFIX/"bin/npm").exist?, "npm must exist"
assert (HOMEBREW_PREFIX/"bin/npm").executable?, "npm must be executable"
system "#{HOMEBREW_PREFIX}/bin/npm", "--verbose", "install", "npm@latest"
end
end
end
| 32.874074 | 103 | 0.677557 |
7953b6862bde5028247070cf587fd6a7c84d0cbc | 6,368 | require "spec_helper"
require_relative "shared_examples"
RSpec.describe OrcaApi::LockService, orca_api_mock: true do
let(:service) { described_class.new(orca_api) }
describe "#list" do
context "ๆไปไธญ" do
it "ไธ่ฆงใๅๅพใงใใ" do
expect_data = [
{
path: "/api21/medicalmodv37",
body: {
"=medicalv3req7" => {
"Request_Number" => "00",
"Karte_Uid" => orca_api.karte_uid,
}
},
result: "api21_medicalmodv37_00.json",
},
]
expect_orca_api_call(expect_data, binding)
result = service.list
expect(result.ok?).to be true
end
end
context "ๆไปไธญใงใฏใชใ" do
it "ใจใฉใผใซใฏใชใใใ็ฉบใฎไธ่ฆงใๅๅพใงใใ" do
expect_data = [
{
path: "/api21/medicalmodv37",
body: {
"=medicalv3req7" => {
"Request_Number" => "00",
"Karte_Uid" => orca_api.karte_uid,
}
},
result: "api21_medicalmodv37_00_E10.json",
},
]
expect_orca_api_call(expect_data, binding)
result = service.list
expect(result.ok?).to be true
expect(result.lock_information).to eq([])
expect(result["Lock_Information"]).to eq([])
end
end
end
describe "#unlock" do
context "ๆไปไธญ" do
it "ๆไปๅถๅพกใ่งฃ้คใใ" do
expect_data = [
{
path: "/api21/medicalmodv37",
body: {
"medicalv3req7" => {
"Request_Number" => "01",
"Karte_Uid" => orca_api.karte_uid,
"=Delete_Information" => {
"Delete_Karte_Uid" => "karte_uid",
"Delete_Orca_Uid" => "2204825e-c628-4747-8fc2-9e337b32125b",
},
}
},
result: "api21_medicalmodv37_01_one_S40.json",
},
{
path: "/api21/medicalmodv37",
body: {
"medicalv3req7" => {
"Request_Number" => "`prev.response_number`",
"Karte_Uid" => "`prev.karte_uid`",
"Orca_Uid" => "`prev.orca_uid`",
"=Delete_Information" => "`prev.delete_information`",
"Select_Answer" => "Ok",
}
},
result: "api21_medicalmodv37_01_one.json",
},
]
expect_orca_api_call(expect_data, binding)
result = service.unlock("karte_uid", "2204825e-c628-4747-8fc2-9e337b32125b")
expect(result.ok?).to be true
end
end
context "ๆไปไธญใงใฏใชใ" do
it "ใจใฉใผใ่ฟใ" do
expect_data = [
{
path: "/api21/medicalmodv37",
body: {
"medicalv3req7" => {
"Request_Number" => "01",
"Karte_Uid" => orca_api.karte_uid,
"=Delete_Information" => {
"Delete_Karte_Uid" => "karte_uid",
"Delete_Orca_Uid" => "7b7c82a9-c703-4f5d-87a0-8312786f2dd5",
},
}
},
result: "api21_medicalmodv37_01_one_E13.json",
},
]
expect_orca_api_call(expect_data, binding)
result = service.unlock("karte_uid", "7b7c82a9-c703-4f5d-87a0-8312786f2dd5")
expect(result.ok?).to be false
end
end
end
describe "#unlock_all" do
context "ๆไปไธญ" do
it "ใในใฆใฎๆไปๅถๅพกใ่งฃ้คใใ" do
expect_data = [
{
path: "/api21/medicalmodv37",
body: {
"medicalv3req7" => {
"Request_Number" => "01",
"Karte_Uid" => orca_api.karte_uid,
"=Delete_Information" => {
"Delete_Class" => "All",
},
}
},
result: "api21_medicalmodv37_01_all_S40.json",
},
{
path: "/api21/medicalmodv37",
body: {
"medicalv3req7" => {
"Request_Number" => "`prev.response_number`",
"Karte_Uid" => "`prev.karte_uid`",
"Orca_Uid" => "`prev.orca_uid`",
"=Delete_Information" => "`prev.delete_information`",
"Select_Answer" => "Ok",
}
},
result: "api21_medicalmodv37_01_all.json",
},
]
expect_orca_api_call(expect_data, binding)
result = service.unlock_all
expect(result.ok?).to be true
end
end
context "ๆไปไธญใงใฏใชใ" do
it "ๆไปๅถๅพกใ่งฃ้คใใใใใฎๆฅใฌใปAPIใๅผใณๅบใ" do
expect_data = [
{
path: "/api21/medicalmodv37",
body: {
"medicalv3req7" => {
"Request_Number" => "01",
"Karte_Uid" => orca_api.karte_uid,
"=Delete_Information" => {
"Delete_Class" => "All",
},
}
},
result: "api21_medicalmodv37_01_all_S40_empty.json",
},
{
body: {
"medicalv3req7" => {
"Request_Number" => "`prev.response_number`",
"Karte_Uid" => "`prev.karte_uid`",
"Orca_Uid" => "`prev.orca_uid`",
"=Delete_Information" => "`prev.delete_information`",
"Select_Answer" => "Ok",
}
},
result: "api21_medicalmodv37_01_all.json",
},
]
expect_orca_api_call(expect_data, binding)
result = service.unlock_all
expect(result.ok?).to be true
end
end
context "็ฐๅธธ็ณป: Karte_Uidใๆช่จญๅฎ" do
let(:orca_api) { double("OrcaApi::Client", karte_uid: "") }
it "ใจใฉใผใซใชใใใจ" do
expect_data = [
{
path: "/api21/medicalmodv37",
body: {
"medicalv3req7" => {
"Request_Number" => "01",
"Karte_Uid" => "",
"=Delete_Information" => {
"Delete_Class" => "All",
},
}
},
result: "api21_medicalmodv37_01_all_E06.json",
},
]
expect_orca_api_call(expect_data, binding)
result = service.unlock_all
expect(result.ok?).to be false
end
end
end
end
| 27.929825 | 84 | 0.468279 |
3878a284199a8e04498b356ac7a2f8d6c0bf8757 | 341 | class Video
def initialize(pathname)
@pathname = pathname
@md5sum = Digest::MD5.file(full_path).hexdigest
end
def ext
@pathname.extname.downcase
end
def name
@pathname.basename
end
def full_path
@pathname.to_s
end
def md5sum
@md5sum
end
def creation_datetime
@pathname.mtime
end
end | 12.178571 | 51 | 0.671554 |
ed9e656717db998385bda6d90aa217991780ad35 | 13,906 | require 'test/unit'
require 'socket'
require 'thread'
require 'test/test_helper'
require 'ipaddr'
WINDOWS = RbConfig::CONFIG['host_os'] =~ /Windows|mswin/
class SocketTest < Test::Unit::TestCase
include TestHelper
# Should this work on windows? JRUBY-6665
if !WINDOWS
def test_multicast_send_and_receive
multicast_addr = "225.4.5.6"
port = 6789
multicast_msg = "Hello from automated JRuby test suite"
assert_nothing_raised do
socket = UDPSocket.new
ip = IPAddr.new(multicast_addr).hton + IPAddr.new("0.0.0.0").hton
socket.setsockopt(Socket::IPPROTO_IP, Socket::IP_ADD_MEMBERSHIP, ip)
socket.bind(Socket::INADDR_ANY, port)
socket.send(multicast_msg, 0, multicast_addr, port)
msg, info = socket.recvfrom(1024)
assert_equal(multicast_msg, msg)
assert_equal(multicast_msg.size, msg.size)
assert_equal(port, info[1])
socket.close
end
end
end
def test_tcp_socket_allows_nil_for_hostname
assert_nothing_raised do
server = TCPServer.new(nil, 7789)
t = Thread.new do
s = server.accept
s.close
end
client = TCPSocket.new(nil, 7789)
client.write ""
t.join
end
end
#JRUBY-3827
def test_nil_hostname_and_passive_returns_inaddr_any
assert_nothing_raised do
addrs = Socket::getaddrinfo(nil, 7789, Socket::AF_UNSPEC, Socket::SOCK_STREAM, 0, Socket::AI_PASSIVE)
assert_not_equal(0, addrs.size)
assert_equal("0.0.0.0", addrs[0][2])
assert_equal("0.0.0.0", addrs[0][3])
end
end
def test_nil_hostname_and_no_flags_returns_localhost
assert_nothing_raised do
addrs = Socket::getaddrinfo(nil, 7789, Socket::AF_UNSPEC, Socket::SOCK_STREAM, 0)
assert_not_equal(0, addrs.size)
# FIXME, behaves differently on Windows, both JRuby and MRI.
# JRuby returns "127.0.0.1", "127.0.0.1"
# MRI returns "<actual_hostname>", "127.0.0.1"
unless WINDOWS
#assert_equal("localhost", addrs[0][2])
assert_equal("127.0.0.1", addrs[0][3])
end
end
end
def test_basic_socket_reverse_lookup
assert_nothing_raised do
reverse = BasicSocket.do_not_reverse_lookup
BasicSocket.do_not_reverse_lookup = !reverse
assert_equal(reverse, !BasicSocket.do_not_reverse_lookup)
BasicSocket.do_not_reverse_lookup = reverse
end
end
#JRUBY-2147
def test_tcp_close_read
socket = TCPServer.new(nil, 9999)
socket.close_read
assert(!socket.closed?)
socket.close
end
#JRUBY-2146
def test_tcp_close_write
socket = TCPServer.new(nil, 8888)
socket.close_write
assert(!socket.closed?)
socket.close
end
def test_tcp_close_read_then_write_should_close_socket
socket = TCPServer.new(nil, 7777)
socket.close_write
assert(!socket.closed?)
socket.close_read
assert(socket.closed?)
end
# JRUBY-2874
def test_raises_socket_error_on_out_of_range_port
[-2**16, -2**8, -2, -1, 2**16, 2**16 + 1, 2**17, 2**30 -1].each do |port|
assert_raises(SocketError) do
TCPSocket.new('localhost', port)
end
end
end
# JRUBY-4299
def test_tcp_socket_reuse_addr
socket = Socket.new(Socket::AF_INET, Socket::SOCK_STREAM, 0)
socket.setsockopt(Socket::SOL_SOCKET, Socket::SO_REUSEADDR, true)
assert_not_equal 0, socket.getsockopt(Socket::SOL_SOCKET, Socket::SO_REUSEADDR).unpack('i')[0]
ensure
socket.close
end
# JRUBY-4299
def test_udp_socket_reuse_addr
socket = Socket.new(Socket::AF_INET, Socket::SOCK_DGRAM, 0)
socket.setsockopt(Socket::SOL_SOCKET, Socket::SO_REUSEADDR, true)
assert_not_equal 0, socket.getsockopt(Socket::SOL_SOCKET, Socket::SO_REUSEADDR).unpack('i')[0]
ensure
socket.close
end
# JRUBY-4868
def test_getservbyname
assert_equal(21, Socket.getservbyname('ftp'))
assert_equal(21, Socket.getservbyname('21'))
assert_equal(21, Socket.getservbyname(' 21'))
end
end
class UNIXSocketTests < Test::Unit::TestCase
IS19 = RUBY_VERSION =~ /1\.9/
# this is intentional, otherwise test run fails on windows
def test_dummy; end
if defined?(UNIXSocket) && !WINDOWS
def test_unix_socket_path
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
server = UNIXServer.open(path)
assert_equal path, server.path
cli = UNIXSocket.open(path)
assert_equal "", cli.path
cli.close
server.close
File.unlink(path) if File.exist?(path)
end
def test_unix_socket_addr
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
server = UNIXServer.open(path)
assert_equal ["AF_UNIX", path], server.addr
cli = UNIXSocket.open(path)
assert_equal ["AF_UNIX", ""], cli.addr
cli.close
server.close
File.unlink(path) if File.exist?(path)
end
def test_unix_socket_peeraddr_raises_enotconn
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
server = UNIXServer.open(path)
assert_raises(Errno::ENOTCONN) do
server.peeraddr
end
File.unlink(path) if File.exist?(path)
end
=begin new UNIXSocket stuff needs work
def test_unix_socket_peeraddr
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
server = UNIXServer.open(path)
cli = UNIXSocket.open(path)
ssrv = server.accept
assert_equal ["AF_UNIX", ""], ssrv.peeraddr
assert_equal ["AF_UNIX", path], cli.peeraddr
ssrv.close
cli.close
server.close
File.unlink(path) if File.exist?(path)
end
=end
def test_unix_socket_raises_exception_on_too_long_path
assert_raises(ArgumentError) do
# on some platforms, 103 is invalid length (MacOS)
# on others, 108 (Linux), we'll take the biggest one
UNIXSocket.new("a" * 108)
end
end
def test_unix_socket_raises_exception_on_path_that_cant_exist
path = "a"
File.unlink(path) if File.exist?(path)
assert_raises(Errno::ENOENT) do
UNIXSocket.new(path)
end
end
def test_can_create_socket_server
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
sock = UNIXServer.open(path)
assert File.exist?(path)
sock.close
File.unlink(path) if File.exist?(path)
end
def test_can_create_socket_server_and_accept_nonblocking
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
sock = UNIXServer.open(path)
assert File.exist?(path)
begin
sock.accept_nonblock
assert false, "failed to raise EAGAIN"
rescue Errno::EAGAIN => e
assert IO::WaitReadable === e if IS19
end
cli = UNIXSocket.open(path)
sock.accept_nonblock.close
cli.close
sock.close
File.unlink(path) if File.exist?(path)
end
def test_can_create_socket_server_and_relisten
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
sock = UNIXServer.open(path)
assert File.exist?(path)
sock.listen(1)
assert File.exist?(path)
sock.close
File.unlink(path) if File.exist?(path)
end
# JRUBY-5708
def test_can_create_socket_server_and_blocking_select_blocks_on_it
require 'timeout'
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
sock = UNIXServer.open(path)
assert File.exist?(path)
assert_raises(Timeout::Error) do
Timeout::timeout(0.1) do
IO.select [sock], nil, nil, 1
end
end
sock.close
File.unlink(path) if File.exist?(path)
end
def test_can_create_socket_server_and_client_connected_to_it
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
sock = UNIXServer.open(path)
assert File.exist?(path)
cli = UNIXSocket.open(path)
cli.close
sock.close
File.unlink(path) if File.exist?(path)
end
def test_can_create_socket_server_and_client_connected_to_it_and_send_from_client_to_server
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
sock = UNIXServer.open(path)
assert File.exist?(path)
cli = UNIXSocket.open(path)
servsock = sock.accept
cli.send("hello",0)
assert_equal "hello", servsock.recv(5)
servsock.close
cli.close
sock.close
File.unlink(path) if File.exist?(path)
end
=begin New UNIXSocket stuff needs work
def test_can_create_socket_server_and_client_connected_to_it_and_send_from_server_to_client
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
sock = UNIXServer.open(path)
assert File.exist?(path)
cli = UNIXSocket.open(path)
servsock = sock.accept
servsock.send("hello",0)
assert_equal "hello", cli.recv(5)
servsock.close
cli.close
sock.close
File.unlink(path) if File.exist?(path)
end
def test_can_create_socket_server_and_client_connected_to_it_and_send_from_client_to_server_using_recvfrom
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
sock = UNIXServer.open(path)
assert File.exist?(path)
cli = UNIXSocket.open(path)
servsock = sock.accept
cli.send("hello",0)
assert_equal ["hello", ["AF_UNIX", ""]], servsock.recvfrom(5)
servsock.close
cli.close
sock.close
File.unlink(path) if File.exist?(path)
end
def test_can_create_socket_server_and_client_connected_to_it_and_send_from_server_to_client_using_recvfrom
path = "/tmp/sample"
File.unlink(path) if File.exist?(path)
sock = UNIXServer.open(path)
assert File.exist?(path)
cli = UNIXSocket.open(path)
servsock = sock.accept
servsock.send("hello",0)
data = cli.recvfrom(5)
assert_equal "hello", data[0]
assert_equal "AF_UNIX", data[1][0]
servsock.close
cli.close
sock.close
File.unlink(path) if File.exist?(path)
end
=end
def test_can_create_socketpair_and_send_from_one_to_the_other
sock1, sock2 = UNIXSocket.socketpair
sock1.send("hello", 0)
assert_equal "hello", sock2.recv(5)
sock1.close
sock2.close
end
def test_can_create_socketpair_and_can_send_from_the_other
sock1, sock2 = UNIXSocket.socketpair
sock2.send("hello", 0)
assert_equal "hello", sock1.recv(5)
sock2.close
sock1.close
end
def test_can_create_socketpair_and_can_send_from_the_other_with_recvfrom
sock1, sock2 = UNIXSocket.socketpair
sock2.send("hello", 0)
assert_equal ["hello", ["AF_UNIX", ""]], sock1.recvfrom(5)
sock2.close
sock1.close
end
def test_can_read_and_get_minus_one
sock1, sock2 = UNIXSocket.socketpair
sock2.send("hello", 0)
assert_equal "hell", sock1.recv(4)
assert_equal "", sock1.recv(0)
assert_equal "o", sock1.recv(1)
sock2.close
sock1.close
assert_raises(IOError) do
sock1.recv(1)
end
end
end
end
class ServerTest < Test::Unit::TestCase
def test_server_close_interrupts_pending_accepts
# unfortunately this test is going to not be 100% reliable
# since it involves thread interaction and it's impossible to
# do things like wait until the other thread blocks
port = 41258
server = TCPServer.new('localhost', port)
queue = Queue.new
thread = Thread.new do
server.accept
end
# wait until the thread is sleeping (ready to accept)
Thread.pass while thread.alive? && thread.status != "sleep"
# close the server
server.close
# propagate the thread's termination error, checking it for IOError
# NOTE: 1.8 raises IOError, 1.9 EBADF, so this isn't consistent. I'm
# changing it to Exception so we can at least test the interrupt.
assert_raise(IOError) {thread.value}
end
# JRUBY-2874
def test_raises_socket_error_on_out_of_range_port
[-2**16, -2**8, -2, -1, 2**16, 2**16 + 1, 2**17, 2**30 -1].each do |port|
assert_raises(SocketError) do
TCPServer.new('localhost', port)
end
end
end
# JRUBY-4299
def test_server_reuse_addr
socket = TCPServer.new("127.0.0.1", 7777)
socket.setsockopt(Socket::SOL_SOCKET, Socket::SO_REUSEADDR, true)
assert_not_equal 0, socket.getsockopt(Socket::SOL_SOCKET, Socket::SO_REUSEADDR).unpack('i')[0]
ensure
socket.close
end
# JRUBY-5111
def test_server_methods_with_closed_socket
socket = TCPServer.new("127.0.0.1", 7777)
socket.close
assert_raises(IOError) { socket.addr }
assert_raises(IOError) { socket.getsockname }
end
# JRUBY-5876
def test_syswrite_raises_epipe
t = Thread.new do
server = TCPServer.new("127.0.0.1", 1234)
while sock = server.accept
sock.close
end
end
Thread.pass while t.alive? and t.status != 'sleep'
sock = TCPSocket.new("127.0.0.1", 1234)
sock.setsockopt Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1
delay = 0.1
tries = 0
loop do
sock.syswrite("2")
end
rescue => ex
# FIXME: Throws 'unknown' error
if !WINDOWS
assert Errno::EPIPE === ex
end
end
end
| 27.001942 | 114 | 0.634906 |
e2b8330b0bcd6d0848c6d4d43aded4cf42aed503 | 1,438 | # Copyright (c) 2008-2013 Michael Dvorkin and contributors.
#
# Fat Free CRM is freely distributable under the terms of MIT license.
# See MIT-LICENSE file or http://www.opensource.org/licenses/mit-license.php
#------------------------------------------------------------------------------
require 'spec_helper'
describe "/tasks/new" do
include TasksHelper
before do
login_and_assign
assign(:task, FactoryGirl.build(:task))
assign(:users, [ current_user ])
assign(:bucket, Setting.task_bucket[1..-1] << [ "On Specific Date...", :specific_time ])
assign(:category, Setting.unroll(:task_category))
end
it "should toggle empty message div if it exists" do
render
rendered.should include("crm.flick('empty', 'toggle')")
end
describe "new task" do
before { @task_with_time = Setting.task_calendar_with_time }
after { Setting.task_calendar_with_time = @task_with_time }
it "create: should render [new] template into :create_task div" do
params[:cancel] = nil
render
rendered.should include("jQuery('#create_task').html")
rendered.should include("crm.flip_form('create_task');")
end
end
describe "cancel new task" do
it "should hide [create task] form" do
params[:cancel] = "true"
render
rendered.should_not include("jQuery('#create_task').html")
rendered.should include("crm.flip_form('create_task');")
end
end
end
| 29.346939 | 92 | 0.652295 |
392520c800c71f9c58a016f6fa73a79048a1e7e5 | 1,453 | require 'securerandom'
##
# This file mounts each app in the Padrino project to a specified sub-uri.
# You can mount additional applications using any of these commands below:
#
# Padrino.mount('blog').to('/blog')
# Padrino.mount('blog', :app_class => 'BlogApp').to('/blog')
# Padrino.mount('blog', :app_file => 'path/to/blog/app.rb').to('/blog')
#
# You can also map apps to a specified host:
#
# Padrino.mount('Admin').host('admin.example.org')
# Padrino.mount('WebSite').host(/.*\.?example.org/)
# Padrino.mount('Foo').to('/foo').host('bar.example.org')
#
# Note 1: Mounted apps (by default) should be placed into the project root at '/app_name'.
# Note 2: If you use the host matching remember to respect the order of the rules.
#
# By default, this file mounts the primary app which was generated with this project.
# However, the mounted app can be modified as needed:
#
# Padrino.mount('AppName', :app_file => 'path/to/file', :app_class => 'BlogApp').to('/')
#
##
# Setup global project settings for your apps. These settings are inherited by every subapp. You can
# override these settings in the subapps as needed.
#
Padrino.configure_apps do
# enable :sessions
set :session_secret, ENV.fetch('SESSION_SECRET')
set :protection, :except => :path_traversal
set :protect_from_csrf, true
end
# Mounts the core application for this project
Padrino.mount('Observatory::App', :app_file => Padrino.root('app/app.rb')).to('/')
| 37.25641 | 100 | 0.706813 |
1cacfcf24ddda3b8119261ed4a9350be71d419d5 | 550 | RSpec.describe 'feature' do
xit 'randomises players into groups of 4' do
names = %w[A B C D E F G H I J]
players = generate_players(names)
tournament_generator = Tournament::Generator.new('output.csv')
tournament_generator.create_tournament(players)
contents = File.read('output.csv')
expect(contents).to eq(players)
end
def generate_players(names)
full_players = []
names.each do |name|
full_players.push(
name: name,
email: "#{name}@mail.com"
)
end
full_players
end
end
| 23.913043 | 66 | 0.656364 |
03db2696441bc1c13fb8cd8be121a65ac094247b | 379 | require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe PasswordResetsHelper do
#Delete this example and add some real ones or delete this file
it "should be included in the object returned by #helper" do
included_modules = (class << helper; self; end).send :included_modules
included_modules.should include(PasswordResetsHelper)
end
end
| 31.583333 | 74 | 0.762533 |
61675753f30bb0d01a0def42205cef70104e49ed | 84 | # frozen_string_literal: true
class CommitteeContribution < ActiveRecord::Base
end
| 16.8 | 48 | 0.833333 |
4aed87e9346339750ba559840ae3b898f0c07f38 | 966 | require_relative "../canvas_base_input_type"
module LMSGraphQL
module Types
module Canvas
class CanvasFileInput < BaseInputObject
description "Plagiarism Detection Submissions. API Docs: https://canvas.instructure.com/doc/api/plagiarism_detection_submissions.html"
argument :size, Int, "Example: 4", required: false
argument :content_type, String, "Example: text/plain", required: false
argument :url, String, "Example: http://www.example.com/files/569/download?download_frd=1&verifier=c6HdZmxOZa0Fiin2cbvZeI8I5ry7yqD7RChQzb6P", required: false
argument :id, ID, "Example: 569", required: false
argument :display_name, String, "Example: file.txt", required: false
argument :created_at, LMSGraphQL::Types::DateTimeType, "Example: 2012-07-06T14:58:50Z", required: false
argument :updated_at, LMSGraphQL::Types::DateTimeType, "Example: 2012-07-06T14:58:50Z", required: false
end
end
end
end | 48.3 | 163 | 0.736025 |
4adae48c743326756f68a6abf5f2d4e1304af784 | 583 | set :stage, :staging
server 'oaklan-roots.sit', user: 'vagrant', roles: %w{web app db}
set :ssh_options, {
keys: %w(~/.vagrant.d/insecure_private_key),
}
# and/or per server
# server 'example.com',
# user: 'user_name',
# roles: %w{web app},
# ssh_options: {
# user: 'user_name', # overrides user setting above
# keys: %w(/home/user_name/.ssh/id_rsa),
# forward_agent: false,
# auth_methods: %w(publickey password)
# # password: 'please use keys'
# }
# setting per server overrides global ssh_options
# fetch(:default_env).merge!(rails_env: :staging)
| 25.347826 | 65 | 0.662093 |
084e0ec3cad68e61d5ae1e69c6fa331e33f8551d | 5,318 | #
# Be sure to run `pod spec lint XWebView.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# โโโ Spec Metadata โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "XWebView-iOS"
s.version = "0.13.0"
s.summary = "An extensible WebView (based on WKWebView)"
s.description = <<-DESC
XWebView is an extensible WebView which is built on top of WKWebView,
the modern WebKit framework debuted in iOS 8.0. It provides fast Web
runtime with carefully designed plugin API for developing sophisticated
iOS native or hybrid applications.
Plugins written in Objective-C or Swift programming language can be
automatically exposed in JavaScript context. With capabilities offered
by plugins, Web apps can look and behave exactly like native apps. They
will be no longer a second-class citizen on iOS platform.
DESC
s.homepage = "https://github.com/yegail/XWebView"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# โโโ Spec License โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = { :type => "Apache License, Version 2.0", :file => "LICENSE" }
# โโโ Author Metadata โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.authors = { 'Zhenyu Liang' => '[email protected]', 'Jonathan Dong' => '[email protected]', 'David Kim' => '[email protected]', 'Fernando Martรญnez' => '[email protected]'}
# s.social_media_url = ""
# โโโ Platform Specifics โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# When using multiple platforms
s.ios.deployment_target = "9.0"
s.osx.deployment_target = "10.11"
# โโโ Source Location โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/yegail/XWebView.git", :tag => s.version.to_s }
# โโโ Source Code โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "XWebView/*.swift", "XWebView/XWebView.h"
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# โโโ Resources โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
s.resource = "XWebView/xwebview.js"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# โโโ Project Linking โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
s.framework = "WebKit"
s.ios.framework = "MobileCoreServices"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# โโโ Project Settings โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 38.817518 | 194 | 0.598721 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.