hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
018fa78fb44491abd353e8a65f677352f233c8e3 | 953 | FactoryBot.define do
factory :products_qhp, :class => 'Products::Qhp' do
issuer_id { "1234" }
state_postal_code { "DC" }
standard_component_id { "12340987" }
plan_marketing_name { "gold plan" }
hios_product_id { "1234" }
network_id { "123" }
service_area_id { "12" }
formulary_id { "123" }
is_new_plan { "yes" }
plan_type { "test" }
metal_level { "bronze" }
unique_plan_design { "" }
qhp_or_non_qhp { "qhp" }
insurance_plan_pregnancy_notice_req_ind { "yes" }
is_specialist_referral_required { "yes" }
hsa_eligibility { "yes" }
emp_contribution_amount_for_hsa_or_hra { "1000" }
child_only_offering { "no" }
is_wellness_program_offered { "yes" }
plan_effective_date { "04/01/2015".to_date }
out_of_country_coverage { "yes" }
out_of_service_area_coverage { "yes" }
national_network { "yes" }
summary_benefit_and_coverage_url { "www.example.com" }
end
end
| 31.766667 | 58 | 0.665268 |
790e19223a1d1dc163e9ea279879dbb8e81f606a | 9,958 | =begin
#Products
#No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator)
The version of the OpenAPI document: v3
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'date'
module Hubspot
module Crm
module Products
class StandardError
attr_accessor :status
attr_accessor :id
attr_accessor :category
attr_accessor :sub_category
attr_accessor :message
attr_accessor :errors
attr_accessor :context
attr_accessor :links
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'status' => :'status',
:'id' => :'id',
:'category' => :'category',
:'sub_category' => :'subCategory',
:'message' => :'message',
:'errors' => :'errors',
:'context' => :'context',
:'links' => :'links'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'status' => :'String',
:'id' => :'String',
:'category' => :'ErrorCategory',
:'sub_category' => :'Object',
:'message' => :'String',
:'errors' => :'Array<ErrorDetail>',
:'context' => :'Hash<String, Array<String>>',
:'links' => :'Hash<String, String>'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `Hubspot::Crm::Products::StandardError` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `Hubspot::Crm::Products::StandardError`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'status')
self.status = attributes[:'status']
end
if attributes.key?(:'id')
self.id = attributes[:'id']
end
if attributes.key?(:'category')
self.category = attributes[:'category']
end
if attributes.key?(:'sub_category')
self.sub_category = attributes[:'sub_category']
end
if attributes.key?(:'message')
self.message = attributes[:'message']
end
if attributes.key?(:'errors')
if (value = attributes[:'errors']).is_a?(Array)
self.errors = value
end
end
if attributes.key?(:'context')
if (value = attributes[:'context']).is_a?(Hash)
self.context = value
end
end
if attributes.key?(:'links')
if (value = attributes[:'links']).is_a?(Hash)
self.links = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @status.nil?
invalid_properties.push('invalid value for "status", status cannot be nil.')
end
if @category.nil?
invalid_properties.push('invalid value for "category", category cannot be nil.')
end
if @message.nil?
invalid_properties.push('invalid value for "message", message cannot be nil.')
end
if @errors.nil?
invalid_properties.push('invalid value for "errors", errors cannot be nil.')
end
if @context.nil?
invalid_properties.push('invalid value for "context", context cannot be nil.')
end
if @links.nil?
invalid_properties.push('invalid value for "links", links cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @status.nil?
return false if @category.nil?
return false if @message.nil?
return false if @errors.nil?
return false if @context.nil?
return false if @links.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
status == o.status &&
id == o.id &&
category == o.category &&
sub_category == o.sub_category &&
message == o.message &&
errors == o.errors &&
context == o.context &&
links == o.links
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[status, id, category, sub_category, message, errors, context, links].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
Hubspot::Crm::Products.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
end
end
| 32.122581 | 219 | 0.542981 |
0308249c50e3eafb4ce77e8a1a75004708399628 | 348 | windows = RbConfig::CONFIG['host_os'] =~ /mswin|mingw/
exclude :test_ascii_incompatible_path, "needs investigation"
exclude :test_basename, "needs investigation"
exclude :test_dirname, "needs investigation"
exclude :test_extname, "needs investigation"
exclude :test_join, "needs investigation"
exclude :test_path, "needs investigation" if windows
| 38.666667 | 60 | 0.804598 |
11025388762aed985fe6d176e4d468716a34394b | 692 | require File.dirname(__FILE__) + '/../abstract_unit'
class AtomicWriteTest < Test::Unit::TestCase
def test_atomic_write_without_errors
contents = "Atomic Text"
File.atomic_write(file_name, Dir.pwd) do |file|
file.write(contents)
assert !File.exist?(file_name)
end
assert File.exist?(file_name)
assert_equal contents, File.read(file_name)
ensure
File.unlink(file_name) rescue nil
end
def test_atomic_write_doesnt_write_when_block_raises
File.atomic_write(file_name) do |file|
file.write("testing")
raise "something bad"
end
rescue
assert !File.exist?(file_name)
end
def file_name
"atomic.file"
end
end
| 23.066667 | 54 | 0.702312 |
213fb9b9c14082c4cf985dca566b75630657e7a7 | 974 | require 'rails_helper'
RSpec.describe PreviousVersionOfClaim do
subject(:previous_version) { described_class.new(claim) }
let(:claim) { create(:archived_pending_delete_claim, evidence_checklist_ids: [3, 4, 1]) }
describe 'call' do
subject(:call) { previous_version.call }
it { is_expected.to be_a Claim::BaseClaim }
context 'when claim was archived by legacy paper_trail' do
before do
# manually update the, correctly, archived record to reflect the legacy style
version = claim.versions.last
new_object = version.object_deserialized.transform_values do |value|
if value.present? && value.eql?("---\n- 3\n- 4\n- 1\n")
[3, 4, 1]
else
value
end
end
version.update_columns object: PaperTrail.serializer.dump(new_object)
end
it { expect { previous_version.version }.to raise_error(ActiveRecord::SerializationTypeMismatch) }
end
end
end
| 31.419355 | 104 | 0.668378 |
ab48af5bb7212ca1ea6222301b9d80028fd06702 | 212 | FactoryBot.define do
factory :acts_as_historiable_history, class: 'History' do
log { "MyText" }
owner { "" }
historiable { "" }
ip_address { "MyString" }
user_agent { "MyString" }
end
end
| 21.2 | 59 | 0.627358 |
e86976b4a3894aa5a34b4c30123ceedf25b7ca79 | 5,017 | begin
require 'active_record'
rescue LoadError
require 'rubygems'
require 'active_record'
end
# Authenticates against a plain SQL table.
#
# This assumes that all of your users are stored in a table that has a 'username'
# column and a 'password' column. When the user logs in, CAS conects to the
# database and looks for a matching username/password in the users table. If a
# matching username and password is found, authentication is successful.
#
# Any database backend supported by ActiveRecord can be used.
#
# Config example:
#
# authenticator:
# class: CASServer::Authenticators::SQL
# database:
# adapter: mysql
# database: some_database_with_users_table
# username: root
# password:
# server: localhost
# user_table: users
# username_column: username
# password_column: password
#
# When replying to a CAS client's validation request, the server will normally
# provide the client with the authenticated user's username. However it is now
# possible for the server to provide the client with additional attributes.
# You can configure the SQL authenticator to provide data from additional
# columns in the users table by listing the names of the columns under the
# 'extra_attributes' option. Note though that this functionality is experimental.
# It should work with RubyCAS-Client, but may or may not work with other CAS
# clients.
#
# For example, with this configuration, the 'full_name' and 'access_level'
# columns will be provided to your CAS clients along with the username:
#
# authenticator:
# class: CASServer::Authenticators::SQL
# database:
# adapter: mysql
# database: some_database_with_users_table
# user_table: users
# username_column: username
# password_column: password
# ignore_type_column: true # indicates if you want to ignore Single Table Inheritance 'type' field
# extra_attributes: full_name, access_level
#
class CASServer::Authenticators::SQL < CASServer::Authenticators::Base
def self.setup(options)
raise CASServer::AuthenticatorError, "Invalid authenticator configuration!" unless options[:database]
user_model_name = "CASUser_#{options[:auth_index]}"
$LOG.debug "CREATING USER MODEL #{user_model_name}"
class_eval %{
class #{user_model_name} < ActiveRecord::Base
end
}
@user_model = const_get(user_model_name)
@user_model.establish_connection(options[:database])
if ActiveRecord::VERSION::STRING >= '3.2'
@user_model.table_name = (options[:user_table] || 'users')
else
@user_model.set_table_name(options[:user_table] || 'users')
end
@user_model.inheritance_column = 'no_inheritance_column' if options[:ignore_type_column]
begin
@user_model.connection
rescue => e
$LOG.debug e
raise "SQL Authenticator can not connect to database"
end
end
def self.user_model
@user_model
end
def validate(credentials)
read_standard_credentials(credentials)
raise_if_not_configured
log_connection_pool_size
user_model.connection_pool.checkin(user_model.connection)
if matching_users.size > 0
$LOG.warn("#{self.class}: Multiple matches found for user #{@username.inspect}") if matching_users.size > 1
unless @options[:extra_attributes].blank?
if matching_users.size > 1
$LOG.warn("#{self.class}: Unable to extract extra_attributes because multiple matches were found for #{@username.inspect}")
else
user = matching_users.first
extract_extra(user)
log_extra
end
end
return true
else
return false
end
end
protected
def user_model
self.class.user_model
end
def username_column
@options[:username_column] || 'username'
end
def password_column
@options[:password_column] || 'password'
end
def raise_if_not_configured
raise CASServer::AuthenticatorError.new(
"Cannot validate credentials because the authenticator hasn't yet been configured"
) unless @options
end
def extract_extra user
@extra_attributes = {}
extra_attributes_to_extract.each do |col|
@extra_attributes[col] = user[col.to_sym]
end
end
def log_extra
if @extra_attributes.empty?
$LOG.warn("#{self.class}: Did not read any extra_attributes for user #{@username.inspect} even though an :extra_attributes option was provided.")
else
$LOG.debug("#{self.class}: Read the following extra_attributes for user #{@username.inspect}: #{@extra_attributes.inspect}")
end
end
def log_connection_pool_size
log_msg = "#{self.class}: [#{user_model}] "
log_msg += "Connection pool size: #{user_model.connection_pool.connections.length}"
log_msg += "/#{user_model.connection_pool.instance_variable_get(:@size)}"
$LOG.debug log_msg
end
def matching_users
user_model.find(:all, :conditions => ["#{username_column} = ? AND #{password_column} = ?", @username, @password])
end
end
| 31.553459 | 151 | 0.71477 |
392997e364e8489b7f4d39229ff7237c9ec2bb10 | 15,614 | RSpec.describe "chargebacks API" do
let(:field) { FactoryGirl.create(:chargeable_field) }
it "can fetch the list of all chargeback rates" do
chargeback_rate = FactoryGirl.create(:chargeback_rate)
api_basic_authorize collection_action_identifier(:chargebacks, :read, :get)
get api_chargebacks_url
expect_result_resources_to_include_hrefs(
"resources", [api_chargeback_url(nil, chargeback_rate)]
)
expect_result_to_match_hash(response.parsed_body, "count" => 1)
expect(response).to have_http_status(:ok)
end
it "can show an individual chargeback rate" do
chargeback_rate = FactoryGirl.create(:chargeback_rate)
api_basic_authorize action_identifier(:chargebacks, :read, :resource_actions, :get)
get api_chargeback_url(nil, chargeback_rate)
expect_result_to_match_hash(
response.parsed_body,
"description" => chargeback_rate.description,
"guid" => chargeback_rate.guid,
"id" => chargeback_rate.id.to_s,
"href" => api_chargeback_url(nil, chargeback_rate)
)
expect(response).to have_http_status(:ok)
end
it "can fetch chargeback rate details" do
chargeback_rate_detail = FactoryGirl.build(:chargeback_rate_detail, :chargeable_field => field)
chargeback_tier = FactoryGirl.create(:chargeback_tier, :chargeback_rate_detail_id => chargeback_rate_detail.id,
:start => 0, :finish => Float::INFINITY, :fixed_rate => 0.0,
:variable_rate => 0.0)
chargeback_rate_detail.chargeback_tiers = [chargeback_tier]
chargeback_rate = FactoryGirl.create(:chargeback_rate,
:chargeback_rate_details => [chargeback_rate_detail])
api_basic_authorize
get(api_chargeback_rates_url(nil, chargeback_rate))
expect_query_result(:rates, 1, 1)
expect_result_resources_to_include_hrefs(
"resources",
[api_chargeback_rate_url(nil, chargeback_rate, chargeback_rate_detail)]
)
end
it "can fetch an individual chargeback rate detail" do
chargeback_rate_detail = FactoryGirl.build(:chargeback_rate_detail, :description => "rate_1", :chargeable_field => field)
chargeback_tier = FactoryGirl.create(:chargeback_tier, :chargeback_rate_detail_id => chargeback_rate_detail.id,
:start => 0, :finish => Float::INFINITY, :fixed_rate => 0.0,
:variable_rate => 0.0)
chargeback_rate_detail.chargeback_tiers = [chargeback_tier]
chargeback_rate = FactoryGirl.create(:chargeback_rate,
:chargeback_rate_details => [chargeback_rate_detail])
api_basic_authorize
get(api_chargeback_rate_url(nil, chargeback_rate, chargeback_rate_detail))
expect_result_to_match_hash(
response.parsed_body,
"chargeback_rate_id" => chargeback_rate.id.to_s,
"href" => api_chargeback_rate_url(nil, chargeback_rate, chargeback_rate_detail),
"id" => chargeback_rate_detail.id.to_s,
"description" => "rate_1"
)
expect(response).to have_http_status(:ok)
end
it "can list of all currencies" do
currency = FactoryGirl.create(:chargeback_rate_detail_currency)
api_basic_authorize
get '/api/currencies'
expect_result_resources_to_include_hrefs(
"resources", [api_currency_url(nil, currency)]
)
expect_result_to_match_hash(response.parsed_body, "count" => 1)
expect(response).to have_http_status(:ok)
end
it "can show an individual currency" do
currency = FactoryGirl.create(:chargeback_rate_detail_currency)
api_basic_authorize
get "/api/currencies/#{currency.id}"
expect_result_to_match_hash(
response.parsed_body,
"name" => currency.name,
"id" => currency.id.to_s,
"href" => api_currency_url(nil, currency)
)
expect(response).to have_http_status(:ok)
end
it "can list of all measures" do
measure = FactoryGirl.create(:chargeback_rate_detail_measure)
api_basic_authorize
get '/api/measures'
expect_result_resources_to_include_hrefs(
"resources", [api_measure_url(nil, measure)]
)
expect_result_to_match_hash(response.parsed_body, "count" => 1)
expect(response).to have_http_status(:ok)
end
it "can show an individual measure" do
measure = FactoryGirl.create(:chargeback_rate_detail_measure)
api_basic_authorize
get "/api/measures/#{measure.id}"
expect_result_to_match_hash(
response.parsed_body,
"name" => measure.name,
"id" => measure.id.to_s,
"href" => api_measure_url(nil, measure)
)
expect(response).to have_http_status(:ok)
end
context "with an appropriate role" do
it "can create a new chargeback rate" do
api_basic_authorize action_identifier(:chargebacks, :create, :collection_actions)
expect do
post(
api_chargebacks_url,
:params => {
:description => "chargeback_0",
:rate_type => "Storage"
}
)
end.to change(ChargebackRate, :count).by(1)
expect_result_to_match_hash(response.parsed_body["results"].first, "description" => "chargeback_0",
"rate_type" => "Storage",
"default" => false)
expect(response).to have_http_status(:ok)
end
it "returns bad request for incomplete chargeback rate" do
api_basic_authorize action_identifier(:chargebacks, :create, :collection_actions)
expect do
post api_chargebacks_url, :params => { :rate_type => "Storage" }
end.not_to change(ChargebackRate, :count)
expect_bad_request(/description can't be blank/i)
end
it "can edit a chargeback rate through POST" do
chargeback_rate = FactoryGirl.create(:chargeback_rate, :description => "chargeback_0")
api_basic_authorize action_identifier(:chargebacks, :edit)
post api_chargeback_url(nil, chargeback_rate), :params => gen_request(:edit, :description => "chargeback_1")
expect(response.parsed_body["description"]).to eq("chargeback_1")
expect(response).to have_http_status(:ok)
expect(chargeback_rate.reload.description).to eq("chargeback_1")
end
it "can edit a chargeback rate through PATCH" do
chargeback_rate = FactoryGirl.create(:chargeback_rate, :description => "chargeback_0")
api_basic_authorize action_identifier(:chargebacks, :edit)
patch api_chargeback_url(nil, chargeback_rate), :params => [{:action => "edit",
:path => "description",
:value => "chargeback_1"}]
expect(response.parsed_body["description"]).to eq("chargeback_1")
expect(response).to have_http_status(:ok)
expect(chargeback_rate.reload.description).to eq("chargeback_1")
end
it "can delete a chargeback rate" do
chargeback_rate = FactoryGirl.create(:chargeback_rate)
api_basic_authorize action_identifier(:chargebacks, :delete)
expect do
delete api_chargeback_url(nil, chargeback_rate)
end.to change(ChargebackRate, :count).by(-1)
expect(response).to have_http_status(:no_content)
end
it "can delete a chargeback rate through POST" do
chargeback_rate = FactoryGirl.create(:chargeback_rate)
api_basic_authorize action_identifier(:chargebacks, :delete)
expect do
post api_chargeback_url(nil, chargeback_rate), :params => { :action => "delete" }
end.to change(ChargebackRate, :count).by(-1)
expect(response).to have_http_status(:ok)
end
it "can create a new chargeback rate detail" do
api_basic_authorize action_identifier(:rates, :create, :collection_actions)
chargeback_rate = FactoryGirl.create(:chargeback_rate)
expect do
post(
api_rates_url,
:params => {
:description => "rate_0",
:group => "fixed",
:chargeback_rate_id => chargeback_rate.id,
:chargeable_field_id => field.id,
:source => "used",
:enabled => true
}
)
end.to change(ChargebackRateDetail, :count).by(1)
expect_result_to_match_hash(response.parsed_body["results"].first, "description" => "rate_0", "enabled" => true)
expect(response).to have_http_status(:ok)
end
it "returns bad request for incomplete chargeback rate detail" do
api_basic_authorize action_identifier(:rates, :create, :collection_actions)
expect do
post api_rates_url, :params => { :description => "rate_0", :enabled => true }
end.not_to change(ChargebackRateDetail, :count)
expect_bad_request(/Chargeback rate can't be blank/i)
expect_bad_request(/Chargeable field can't be blank/i)
end
it "can edit a chargeback rate detail through POST" do
chargeback_rate_detail = FactoryGirl.build(:chargeback_rate_detail, :description => "rate_0", :chargeable_field => field)
chargeback_tier = FactoryGirl.create(:chargeback_tier, :chargeback_rate_detail_id => chargeback_rate_detail.id,
:start => 0, :finish => Float::INFINITY, :fixed_rate => 0.0,
:variable_rate => 0.0)
chargeback_rate_detail.chargeback_tiers = [chargeback_tier]
chargeback_rate_detail.save
api_basic_authorize action_identifier(:rates, :edit)
post api_rate_url(nil, chargeback_rate_detail), :params => gen_request(:edit, :description => "rate_1")
expect(response.parsed_body["description"]).to eq("rate_1")
expect(response).to have_http_status(:ok)
expect(chargeback_rate_detail.reload.description).to eq("rate_1")
end
it "can edit a chargeback rate detail through PATCH" do
chargeback_rate_detail = FactoryGirl.build(:chargeback_rate_detail, :description => "rate_0", :chargeable_field => field)
chargeback_tier = FactoryGirl.create(:chargeback_tier, :chargeback_rate_detail_id => chargeback_rate_detail.id,
:start => 0, :finish => Float::INFINITY, :fixed_rate => 0.0,
:variable_rate => 0.0)
chargeback_rate_detail.chargeback_tiers = [chargeback_tier]
chargeback_rate_detail.save
api_basic_authorize action_identifier(:rates, :edit)
patch api_rate_url(nil, chargeback_rate_detail), :params => [{:action => "edit", :path => "description", :value => "rate_1"}]
expect(response.parsed_body["description"]).to eq("rate_1")
expect(response).to have_http_status(:ok)
expect(chargeback_rate_detail.reload.description).to eq("rate_1")
end
it "can delete a chargeback rate detail" do
chargeback_rate_detail = FactoryGirl.build(:chargeback_rate_detail, :chargeable_field => field)
chargeback_tier = FactoryGirl.create(:chargeback_tier, :chargeback_rate_detail_id => chargeback_rate_detail.id,
:start => 0, :finish => Float::INFINITY, :fixed_rate => 0.0,
:variable_rate => 0.0)
chargeback_rate_detail.chargeback_tiers = [chargeback_tier]
chargeback_rate_detail.save
api_basic_authorize action_identifier(:rates, :delete)
expect do
delete api_rate_url(nil, chargeback_rate_detail)
end.to change(ChargebackRateDetail, :count).by(-1)
expect(response).to have_http_status(:no_content)
end
it "can delete a chargeback rate detail through POST" do
chargeback_rate_detail = FactoryGirl.build(:chargeback_rate_detail, :chargeable_field => field)
chargeback_tier = FactoryGirl.create(:chargeback_tier, :chargeback_rate_detail_id => chargeback_rate_detail.id,
:start => 0, :finish => Float::INFINITY, :fixed_rate => 0.0,
:variable_rate => 0.0)
chargeback_rate_detail.chargeback_tiers = [chargeback_tier]
chargeback_rate_detail.save
api_basic_authorize action_identifier(:rates, :delete)
expect do
post api_rate_url(nil, chargeback_rate_detail), :params => { :action => "delete" }
end.to change(ChargebackRateDetail, :count).by(-1)
expect(response).to have_http_status(:ok)
end
end
context "without an appropriate role" do
it "cannot create a chargeback rate" do
api_basic_authorize
expect { post api_chargebacks_url, :params => { :description => "chargeback_0" } }.not_to change(ChargebackRate, :count)
expect(response).to have_http_status(:forbidden)
end
it "cannot edit a chargeback rate" do
chargeback_rate = FactoryGirl.create(:chargeback_rate, :description => "chargeback_0")
api_basic_authorize
expect do
post api_chargeback_url(nil, chargeback_rate), :params => gen_request(:edit, :description => "chargeback_1")
end.not_to change { chargeback_rate.reload.description }
expect(response).to have_http_status(:forbidden)
end
it "cannot delete a chargeback rate" do
chargeback_rate = FactoryGirl.create(:chargeback_rate)
api_basic_authorize
expect do
delete api_chargeback_url(nil, chargeback_rate)
end.not_to change(ChargebackRate, :count)
expect(response).to have_http_status(:forbidden)
end
it "cannot create a chargeback rate detail" do
api_basic_authorize
expect { post api_rates_url, :params => { :description => "rate_0", :enabled => true } }.not_to change(ChargebackRateDetail, :count)
expect(response).to have_http_status(:forbidden)
end
it "cannot edit a chargeback rate detail" do
chargeback_rate_detail = FactoryGirl.build(:chargeback_rate_detail, :description => "rate_1", :chargeable_field => field)
chargeback_tier = FactoryGirl.create(:chargeback_tier, :chargeback_rate_detail_id => chargeback_rate_detail.id,
:start => 0, :finish => Float::INFINITY, :fixed_rate => 0.0,
:variable_rate => 0.0)
chargeback_rate_detail.chargeback_tiers = [chargeback_tier]
chargeback_rate_detail.save
api_basic_authorize
expect do
post api_rate_url(nil, chargeback_rate_detail), :params => gen_request(:edit, :description => "rate_2")
end.not_to change { chargeback_rate_detail.reload.description }
expect(response).to have_http_status(:forbidden)
end
it "cannot delete a chargeback rate detail" do
chargeback_rate_detail = FactoryGirl.build(:chargeback_rate_detail, :chargeable_field => field)
chargeback_tier = FactoryGirl.create(:chargeback_tier, :chargeback_rate_detail_id => chargeback_rate_detail.id,
:start => 0, :finish => Float::INFINITY, :fixed_rate => 0.0,
:variable_rate => 0.0)
chargeback_rate_detail.chargeback_tiers = [chargeback_tier]
chargeback_rate_detail.save
api_basic_authorize
expect do
delete api_rate_url(nil, chargeback_rate_detail)
end.not_to change(ChargebackRateDetail, :count)
expect(response).to have_http_status(:forbidden)
end
end
end
| 42.2 | 138 | 0.661714 |
ff7b6cddb74e7f636ab6ee5b33d4321f78e830c9 | 4,763 | # frozen_string_literal: true
# This file was auto-generated by lib/tasks/web.rake
require_relative 'endpoints/admin_analytics'
require_relative 'endpoints/admin_apps'
require_relative 'endpoints/admin_apps_approved'
require_relative 'endpoints/admin_apps_requests'
require_relative 'endpoints/admin_apps_restricted'
require_relative 'endpoints/admin_auth_policy'
require_relative 'endpoints/admin_barriers'
require_relative 'endpoints/admin_conversations'
require_relative 'endpoints/admin_conversations_ekm'
require_relative 'endpoints/admin_conversations_restrictAccess'
require_relative 'endpoints/admin_emoji'
require_relative 'endpoints/admin_inviteRequests'
require_relative 'endpoints/admin_inviteRequests_approved'
require_relative 'endpoints/admin_inviteRequests_denied'
require_relative 'endpoints/admin_teams'
require_relative 'endpoints/admin_teams_admins'
require_relative 'endpoints/admin_teams_owners'
require_relative 'endpoints/admin_teams_settings'
require_relative 'endpoints/admin_usergroups'
require_relative 'endpoints/admin_users'
require_relative 'endpoints/admin_users_session'
require_relative 'endpoints/api'
require_relative 'endpoints/apps'
require_relative 'endpoints/apps_connections'
require_relative 'endpoints/apps_event_authorizations'
require_relative 'endpoints/apps_manifest'
require_relative 'endpoints/auth'
require_relative 'endpoints/auth_teams'
require_relative 'endpoints/bots'
require_relative 'endpoints/calls'
require_relative 'endpoints/calls_participants'
require_relative 'endpoints/chat'
require_relative 'endpoints/chat_scheduledMessages'
require_relative 'endpoints/conversations'
require_relative 'endpoints/dialog'
require_relative 'endpoints/dnd'
require_relative 'endpoints/emoji'
require_relative 'endpoints/files'
require_relative 'endpoints/files_comments'
require_relative 'endpoints/files_remote'
require_relative 'endpoints/migration'
require_relative 'endpoints/oauth'
require_relative 'endpoints/oauth_v2'
require_relative 'endpoints/openid_connect'
require_relative 'endpoints/pins'
require_relative 'endpoints/reactions'
require_relative 'endpoints/reminders'
require_relative 'endpoints/rtm'
require_relative 'endpoints/search'
require_relative 'endpoints/stars'
require_relative 'endpoints/team'
require_relative 'endpoints/team_billing'
require_relative 'endpoints/team_preferences'
require_relative 'endpoints/team_profile'
require_relative 'endpoints/tooling_tokens'
require_relative 'endpoints/usergroups'
require_relative 'endpoints/usergroups_users'
require_relative 'endpoints/users'
require_relative 'endpoints/users_admin'
require_relative 'endpoints/users_prefs'
require_relative 'endpoints/users_profile'
require_relative 'endpoints/views'
require_relative 'endpoints/workflows'
module Slack
module Web
module Api
module Endpoints
include Slack::Web::Api::Mixins::Conversations
include Slack::Web::Api::Mixins::Users
include AdminAnalytics
include AdminApps
include AdminAppsApproved
include AdminAppsRequests
include AdminAppsRestricted
include AdminAuthPolicy
include AdminBarriers
include AdminConversations
include AdminConversationsEkm
include AdminConversationsRestrictaccess
include AdminEmoji
include AdminInviterequests
include AdminInviterequestsApproved
include AdminInviterequestsDenied
include AdminTeams
include AdminTeamsAdmins
include AdminTeamsOwners
include AdminTeamsSettings
include AdminUsergroups
include AdminUsers
include AdminUsersSession
include Api
include Apps
include AppsConnections
include AppsEventAuthorizations
include AppsManifest
include Auth
include AuthTeams
include Bots
include Calls
include CallsParticipants
include Chat
include ChatScheduledmessages
include Conversations
include Dialog
include Dnd
include Emoji
include Files
include FilesComments
include FilesRemote
include Migration
include Oauth
include OauthV2
include OpenidConnect
include Pins
include Reactions
include Reminders
include Rtm
include Search
include Stars
include Team
include TeamBilling
include TeamPreferences
include TeamProfile
include ToolingTokens
include Usergroups
include UsergroupsUsers
include Users
include UsersAdmin
include UsersPrefs
include UsersProfile
include Views
include Workflows
end
end
end
end
| 33.542254 | 63 | 0.78228 |
1cb58055431cc93f9e12bd477a5081ba16828b2c | 1,580 | require_relative '../../../search_test'
require_relative '../../../generator/group_metadata'
module USCoreTestKit
module USCoreV400
class PediatricWeightForHeightPatientCategoryDateSearchTest < Inferno::Test
include USCoreTestKit::SearchTest
title 'Server returns valid results for Observation search by patient + category + date'
description %(
A server SHALL support searching by
patient + category + date on the Observation resource. This test
will pass if resources are returned and match the search criteria. If
none are returned, the test is skipped.
[US Core Server CapabilityStatement](http://hl7.org/fhir/us/core/STU4/CapabilityStatement-us-core-server.html)
)
id :us_core_v400_pediatric_weight_for_height_patient_category_date_search_test
input :patient_ids,
title: 'Patient IDs',
description: 'Comma separated list of patient IDs that in sum contain all MUST SUPPORT elements'
def self.properties
@properties ||= SearchTestProperties.new(
resource_type: 'Observation',
search_param_names: ['patient', 'category', 'date'],
possible_status_search: true,
token_search_params: ['category'],
params_with_comparators: ['date']
)
end
def self.metadata
@metadata ||= Generator::GroupMetadata.new(YAML.load_file(File.join(__dir__, 'metadata.yml')))
end
def scratch_resources
scratch[:pediatric_weight_for_height_resources] ||= {}
end
run do
run_search_test
end
end
end
end
| 32.244898 | 110 | 0.702532 |
019b2184e00f12ba9c542eb03caea1d66959de66 | 543 | current_dir = File.dirname(__FILE__)
user = ENV['OPSCODE_USER'] || ENV['USER']
orgname = ENV['ORGNAME'] || "chef"
chef_server_url "https://chef-server.local"
node_name user
client_key "#{user}.pem"
validation_client_name "#{orgname}-validator"
validation_key "#{orgname}-validator.pem"
syntax_check_cache_path "syntax_check_cache"
cookbook_path ["#{current_dir}/../cookbooks"]
cookbook_copyright "Robert J. Berger"
cookbook_license "apachev2"
cookbook_email "[email protected]"
| 31.941176 | 56 | 0.679558 |
7aa8c15b4c2572e268b7524fad796a1282877024 | 442 | require 'honeybadger/plugin'
require 'honeybadger/ruby'
module Honeybadger
Plugin.register do
requirement { defined?(::Warden::Manager.after_set_user) }
execution do
::Warden::Manager.after_set_user do |user, auth, opts|
if user.respond_to?(:id)
::Honeybadger.context({
:user_scope => opts[:scope].to_s,
:user_id => user.id.to_s
})
end
end
end
end
end
| 22.1 | 62 | 0.604072 |
1ab034effa5daf0249a522489e7bafdbf928bb5c | 2,563 | require "rails_helper"
RSpec.describe 'home page', type: :feature, js: true do
it 'renders' do
visit '/'
expect(page).to have_selector('#home_latest_area', visible: true)
expect(page).not_to have_selector('.profile_link')
track_chunk = find(".asset", match: :first)
track_chunk.find(".play_link").click
expect(track_chunk).to have_selector('.private_check_box label', visible: true)
expect(track_chunk).to have_no_selector('.add_to_favorites')
track_chunk.find('.private_check_box label').click
expect(track_chunk).to have_selector('span.only_user_name', visible: true)
track_chunk.find('textarea').fill_in with: "Hey this is a comment from a guest"
# private checkbox is technically offscreen, but we still want to confirm it's checked
expect(track_chunk.find('.private_check_box .private', visible: :all)).to be_checked
Percy.snapshot(page, name: 'Home as Guest')
akismet_stub_response_ham
track_chunk.find('input[type=submit]').click
expect(track_chunk.find('.ajax_waiting')).to have_text('Submitted, thanks!')
end
it 'renders logged in' do
logged_in(:arthur) do
visit '/'
expect(page).to have_selector('.profile_link')
# let's snap the dark theem nav while we are at it
switch_themes
expect(page).to have_selector('.profile_link')
track = find(".asset", match: :first)
track.find(".play_link").click
page.scroll_to(track)
expect(track).to have_selector('.add_to_favorites')
expect(track).to have_selector('.stitches_seek')
# This currently fails, because playback actually fails
# TODO: look into the fixtures and confirm playback on this particular track is happy
# expect(track).to have_selector('.stitches_seek .loaded')
track.find(".stitches_seek").click # click in the middle of the seekbar
track.find(".play_link").click # pause the track
expect(track).to have_selector('.add_to_favorites')
Percy.snapshot(page, name: 'Home as User')
end
end
it 'properly runs javascript and reports console errors', :allow_js_errors do
visit '/'
page.execute_script("console.error('hello from capybara')")
warnings = page.driver.browser.manage.logs.get(:browser)
# Ignore font complaints
# SEVERE 2021-02-24 17:55:00 +0100: https://cdn.alonetone.com/fonts/Alright-v2-Normal-Bold-latin1-tnum.woff2 - Failed to load resource: net::ERR_FAILED
expect(warnings.select { |w| w.level == 'SEVERE' && !w.message.include?('font') }.size).to eq(1)
end
end
| 39.430769 | 155 | 0.703472 |
1d833183ec3df316b0ca9ce467f2ed078f4345d7 | 2,580 | module Gitlab
module Diff
class Highlight
attr_reader :diff_file, :diff_lines, :raw_lines, :repository
delegate :old_path, :new_path, :old_sha, :new_sha, to: :diff_file, prefix: :diff
def initialize(diff_lines, repository: nil)
@repository = repository
if diff_lines.is_a?(Gitlab::Diff::File)
@diff_file = diff_lines
@diff_lines = @diff_file.diff_lines
else
@diff_lines = diff_lines
end
@raw_lines = @diff_lines.map(&:text)
end
def highlight
@diff_lines.map.with_index do |diff_line, i|
diff_line = diff_line.dup
# ignore highlighting for "match" lines
next diff_line if diff_line.meta?
rich_line = highlight_line(diff_line) || ERB::Util.html_escape(diff_line.text)
if line_inline_diffs = inline_diffs[i]
begin
rich_line = InlineDiffMarker.new(diff_line.text, rich_line).mark(line_inline_diffs)
# This should only happen when the encoding of the diff doesn't
# match the blob, which is a bug. But we shouldn't fail to render
# completely in that case, even though we want to report the error.
rescue RangeError => e
Gitlab::Sentry.track_exception(e, issue_url: 'https://gitlab.com/gitlab-org/gitlab-ce/issues/45441')
end
end
diff_line.rich_text = rich_line
diff_line
end
end
private
def highlight_line(diff_line)
return unless diff_file && diff_file.diff_refs
rich_line =
if diff_line.unchanged? || diff_line.added?
new_lines[diff_line.new_pos - 1]&.html_safe
elsif diff_line.removed?
old_lines[diff_line.old_pos - 1]&.html_safe
end
# Only update text if line is found. This will prevent
# issues with submodules given the line only exists in diff content.
if rich_line
line_prefix = diff_line.text =~ /\A(.)/ ? $1 : ' '
"#{line_prefix}#{rich_line}".html_safe
end
end
def inline_diffs
@inline_diffs ||= InlineDiff.for_lines(@raw_lines)
end
def old_lines
@old_lines ||= highlighted_blob_lines(diff_file.old_blob)
end
def new_lines
@new_lines ||= highlighted_blob_lines(diff_file.new_blob)
end
def highlighted_blob_lines(blob)
return [] unless blob
blob.load_all_data!
blob.present.highlight.lines
end
end
end
end
| 29.655172 | 114 | 0.615116 |
286cf8dcfac6fbc291d623715c3f335ed676dbbf | 1,389 | require 'rails_helper'
require 'ui/component'
describe Ui::Breadcrumbs, type: :cell do
controller ApplicationController
let(:result) { html }
let(:breadcrumbs) {
[
Ui::Breadcrumbs::Crumb.new(path: '#', name: 'Breadcrumb 1', current: true),
Ui::Breadcrumbs::Crumb.new(path: '#', name: 'Breadcrumb 2', current: false)
]
}
class TestRenderer < Ui::Component
def show
content_tag(:p, 'test rendered')
end
end
context '#show' do
context 'with a custom renderer' do
let(:html) {
cell(
described_class,
breadcrumbs,
delimiter: '<span>></span>',
item_renderer: ->(item) {
TestRenderer.new(item).call(:show)
}
).call(:show)
}
it 'renders the button' do
expect(result).to have_xpath "//p"
expect(result).to have_css '.ui-breadcrumbs'
expect(result).to have_content 'test rendered'
expect(result).to have_xpath '//span'
end
end
context 'with a default renderer' do
let(:html) {
cell(
described_class,
breadcrumbs,
).call(:show)
}
it 'renders the button' do
expect(result).to have_css '.ui-breadcrumbs'
expect(result).to have_content 'Breadcrumb 1'
expect(result).to have_content 'Breadcrumb 2'
end
end
end
end
| 24.368421 | 81 | 0.581713 |
088270950159070b2f209dcac62377f27389ea27 | 939 | # frozen_string_literal: true
require 'appeals/middleware/errors'
require 'common/client/configuration/rest'
require 'common/client/middleware/response/raise_error'
module Appeals
class Configuration < Common::Client::Configuration::REST
def app_token
Settings.appeals.app_token
end
def base_path
Settings.appeals.host
end
def service_name
'AppealsStatus'
end
def connection
Faraday.new(base_path, headers: base_request_headers, request: request_options) do |faraday|
faraday.use :breakers
faraday.request :json
faraday.response :betamocks if mock_enabled?
faraday.response :raise_error, error_prefix: service_name
faraday.response :appeals_errors
faraday.response :json
faraday.adapter Faraday.default_adapter
end
end
def mock_enabled?
[true, 'true'].include?(Settings.appeals.mock)
end
end
end
| 24.076923 | 98 | 0.7082 |
e879d916d46019ac3985f3cf3f9fcde3d639a659 | 1,775 | class Thrift < Formula
desc "Framework for scalable cross-language services development"
homepage "https://thrift.apache.org/"
url "https://www.apache.org/dyn/closer.cgi?path=/thrift/0.13.0/thrift-0.13.0.tar.gz"
sha256 "7ad348b88033af46ce49148097afe354d513c1fca7c607b59c33ebb6064b5179"
bottle do
cellar :any
sha256 "3a6dccee60ca25d75f99245cc46a7d84351c87c654b77837c3370c5247c80c49" => :catalina
sha256 "385c454b28a354be187de75d67c0133bca17cd1341f1e1abd10cba368e29a80d" => :mojave
sha256 "cb82d3f651ae5cb00a37713a050127a746358320e579d2fe55e08c4b9cd139bd" => :high_sierra
sha256 "e50023ab05171856aaccb3a820a4a3482a7e31e00e988565c86a9f2f486229d3" => :x86_64_linux
end
head do
url "https://github.com/apache/thrift.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
end
depends_on "bison" => :build
depends_on "boost"
depends_on "[email protected]"
def install
system "./bootstrap.sh" unless build.stable?
args = %W[
--disable-debug
--disable-tests
--prefix=#{prefix}
--libdir=#{lib}
--with-openssl=#{Formula["[email protected]"].opt_prefix}
--without-erlang
--without-haskell
--without-java
--without-perl
--without-php
--without-php_extension
--without-python
--without-ruby
--without-swift
]
ENV.cxx11 if ENV.compiler == :clang
# Don't install extensions to /usr:
ENV["PY_PREFIX"] = prefix
ENV["PHP_PREFIX"] = prefix
ENV["JAVA_PREFIX"] = buildpath
system "./configure", *args
ENV.deparallelize
system "make"
system "make", "install"
end
test do
system "#{bin}/thrift", "--version"
end
end
| 27.307692 | 94 | 0.683944 |
18f10107f10f1ab40203b5f5a95e13c9650e9014 | 18,062 | require 'spec_helper_acceptance'
require 'securerandom'
describe "ec2_autoscalinggroup" do
before(:all) do
@default_region = 'sa-east-1'
@aws = AwsHelper.new(@default_region)
end
def find_autoscaling_group(name)
groups = @aws.get_autoscaling_groups(name)
expect(groups.count).to eq(1)
groups.first
end
def find_launch_config(name)
config = @aws.get_launch_configs(name)
expect(config.count).to eq(1)
config.first
end
def find_scaling_policy(name, group)
policy = @aws.get_scaling_policies(name, group)
expect(policy.count).to eq(1)
policy.first
end
def find_alarm(name)
alarm = @aws.get_alarms(name)
expect(alarm.count).to eq(1)
alarm.first
end
describe 'autoscaling_group and related types' do
before(:all) do
name = "#{PuppetManifest.env_id}-#{SecureRandom.uuid}"
@asg_template = 'autoscaling_configurable.pp.tmpl'
@asg_template_delete = 'autoscaling_configurable_delete.pp.tmpl'
@duplicate_asg_template = 'autoscaling_duplicate.pp.tmpl'
@sg_delete = 'sg_delete.pp.tmpl'
# launch asg and related resources
@asg_config = {
:ensure => 'present',
:region => @default_region,
:sg_name => "#{name}-sg",
:lc_name => "#{name}-lc",
:sg_setting => "#{name}-sg",
:asg_name => "#{name}-asg",
:min_size => 2,
:max_size => 6,
:lc_setting => "#{name}-lc",
:availability_zones => ["#{@default_region}a", "#{@default_region}b"],
:policy_name => "#{name}-policy",
:second_policy_name => "#{name}-second_policy",
:asg_setting => "#{name}-asg",
:scaling_adjustment => 30,
:adjustment_type => 'PercentChangeInCapacity',
:alarm_name => "#{name}-cw_alarm",
:metric => 'CPUUtilization',
:namespace => 'AWS/EC2',
:statistic => 'Average',
:period => 120,
:threshold => 70,
:comparison_operator => 'GreaterThanOrEqualToThreshold',
:asg_setting => "#{name}-asg",
:evaluation_periods => 2,
:alarm_actions => "#{name}-policy",
}
r = PuppetManifest.new(@asg_template, @asg_config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
# launch duplicate resources
@duplicate_asg_config = {
:region => @default_region,
:sg_name => "#{name}-sg2",
:lc_name => "#{name}-lc2",
}
r2 = PuppetManifest.new(@duplicate_asg_template, @duplicate_asg_config).apply
expect(r2[:output].any?{ |o| o.include?('Error:')}).to eq(false)
end
after(:all) do
#audit this entire teardown
@asg_config[:ensure] = 'absent'
@duplicate_asg_config[:ensure] = 'absent'
duplicate_delete = 'duplicate_asg_delete.pp.tmpl'
r = PuppetManifest.new(@asg_template_delete, @asg_config).apply
# assert that none of the results contain 'Error:'
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
response = @aws.autoscaling_client.describe_auto_scaling_groups(
auto_scaling_group_names: [@asg_config[:asg_name]],
)
id = Array.new
response.data[:auto_scaling_groups].first[:instances].each do |x|
id.push(x[:instance_id])
end
@aws.ec2_client.wait_until(:instance_terminated, instance_ids: id)
# delete the security group
options = {
:ensure => 'absent',
:name => @asg_config[:sg_name],
:region => @default_region
}
ENV['AWS_REGION'] = @default_region
r2 = TestExecutor.puppet_resource('ec2_securitygroup', options, '--modulepath ../')
expect(r2.stdout).not_to match(/Error:/)
# terminate duplicate resources
r3 = PuppetManifest.new(duplicate_delete, @duplicate_asg_config).apply
expect(r3[:output].any?{ |o| o.include?('Error:')}).to eq(false)
end
it 'should run idempotently' do
success = PuppetManifest.new(@asg_template, @asg_config).apply[:exit_status].success?
expect(success).to eq(true)
end
context 'should create' do
context 'an auto scaling group' do
before(:all) do
@group = find_autoscaling_group(@asg_config[:asg_name])
end
it 'with the correct properties' do
expect(@group.min_size).to eq(@asg_config[:min_size])
expect(@group.max_size).to eq(@asg_config[:max_size])
expect(@group.launch_configuration_name).to eq(@asg_config[:lc_setting])
expect(@group.availability_zones).to eq(['sa-east-1a', 'sa-east-1b'])
end
end
context 'a launch configuration' do
before(:all) do
@lc = find_launch_config(@asg_config[:lc_name])
end
it 'with the correct properties' do
expect(@lc.image_id).to eq('ami-67a60d7a')
expect(@lc.instance_type).to eq('t1.micro')
end
end
context 'a CloudWatch alarm' do
before(:all) do
@alarm = find_alarm(@asg_config[:alarm_name])
end
it 'with the correct properties' do
expect(@alarm.namespace).to eq(@asg_config[:namespace])
expect(@alarm.statistic).to eq(@asg_config[:statistic])
expect(@alarm.period).to eq(@asg_config[:period])
expect(@alarm.threshold).to eq(@asg_config[:threshold])
expect(@alarm.comparison_operator).to eq(@asg_config[:comparison_operator])
expect(@alarm.evaluation_periods).to eq(@asg_config[:evaluation_periods])
end
end
context 'should create scaling policies' do
before(:all) do
@policy = find_scaling_policy(@asg_config[:policy_name], @asg_config[:asg_name])
end
it 'with the correct properties' do
expect(@policy.adjustment_type).to eq(@asg_config[:adjustment_type])
expect(@policy.scaling_adjustment).to eq(@asg_config[:scaling_adjustment])
expect(@policy.auto_scaling_group_name).to eq(@asg_config[:asg_name])
end
end
end
context 'using puppet resource to describe' do
context 'CloudWatch alarm' do
before(:all) do
ENV['AWS_REGION'] = @default_region
name = @asg_config[:alarm_name]
options = {:name => name}
@result = TestExecutor.puppet_resource('cloudwatch_alarm', options, '--modulepath ../')
@cw = find_alarm(name)
end
it 'metric is correct' do
regex = /metric\s*=>\s*'#{@cw.metric_name}'/
expect(@result.stdout).to match(regex)
end
it 'namespace is corect' do
regex = /namespace\s*=>\s*'#{@cw.namespace}'/
expect(@result.stdout).to match(regex)
end
it 'statistic is correct' do
regex = /statistic\s*=>\s*'#{@cw.statistic}'/
expect(@result.stdout).to match(regex)
end
it 'period is corrrect' do
regex = /period\s*=>\s*'#{@cw.period}'/
expect(@result.stdout).to match(regex)
end
it 'threshold is correct' do
regex = /threshold\s*=>\s*'#{@cw.threshold}'/
expect(@result.stdout).to match(regex)
end
it 'comparison_operator' do
regex = /comparison_operator\s*=>\s*'#{@cw.comparison_operator}'/
expect(@result.stdout).to match(regex)
end
it 'dimensions' do
expect(@cw.dimensions.all?{ |d| /#{d.value}/.match(@result.stdout) }).to eq(true)
end
it 'evaluation_periods' do
regex = /evaluation_periods\s*=>\s*'#{@cw.evaluation_periods}'/
expect(@result.stdout).to match(regex)
end
it 'alarm_actions' do
regex = /alarm_actions\s*=>\s*\['#{@asg_config[:alarm_actions]}'\]/
expect(@result.stdout).to match(regex)
end
end
context 'autoscaling group' do
before(:all) do
ENV['AWS_REGION'] = @default_region
name = @asg_config[:asg_name]
options = {:name => name}
@result = TestExecutor.puppet_resource('ec2_autoscalinggroup', options, '--modulepath ../')
@asg = find_autoscaling_group(name)
end
it 'min_size' do
regex = /min_size\s*=>\s*'#{@asg.min_size}'/
expect(@result.stdout).to match(regex)
end
it 'max_size' do
regex = /max_size\s*=>\s*'#{@asg.max_size}'/
expect(@result.stdout).to match(regex)
end
it 'launch_configuration' do
regex = /launch_configuration\s*=>\s*'#{@asg.launch_configuration_name }'/
expect(@result.stdout).to match(regex)
end
it 'instance_count' do
regex = /instance_count\s*=>\s*'#{@asg.instances.count}'/
expect(@result.stdout).to match(regex)
end
it 'availability_zones' do
["#{@default_region}a", "#{@default_region}b"].each do |az|
regex = /'#{az}'/
expect(@result.stdout).to match(regex)
end
end
end
context 'launch_configuration' do
before(:all) do
ENV['AWS_REGION'] = @default_region
name = @asg_config[:lc_name]
options = {:name => name}
@result = TestExecutor.puppet_resource('ec2_launchconfiguration', options, '--modulepath ../')
@lc = find_launch_config(name)
end
it 'security_groups' do
response = @aws.ec2_client.describe_security_groups(group_ids: @lc.security_groups)
names = response.data.security_groups.collect(&:group_name)
names.each do |name|
expect(@result.stdout).to match(/#{name}/)
end
end
it 'key_name' do
if ENV['AWS_KEY_PAIR']
# key was supplied at creation of asg
# key should be reported
# we will need a key to run this with in CI
regex = /(key_name)(\s*)(=>)(\s*)('#{@lc.key_name}')/
expect(@result.stdout).to match(regex)
else
# no key was supplied on creation of asg
# should not report key
regex = /key_name/
expect(@result.stdout).not_to match(regex)
end
end
it 'instance_type' do
regex = /instance_type\s*=>\s*'#{@lc.instance_type}'/
expect(@result.stdout).to match(regex)
end
it 'image_id' do
regex = /image_id\s*=>\s*'#{@lc.image_id}'/
expect(@result.stdout).to match(regex)
end
end
context 'scaling policy' do
before(:all) do
ENV['AWS_REGION'] = @default_region
name = @asg_config[:policy_name]
asg_name = @asg_config[:asg_name]
options = {:name => name}
@result = TestExecutor.puppet_resource('ec2_scalingpolicy', options, '--modulepath ../')
@sp = find_scaling_policy(name, asg_name)
end
it 'scaling_adjustment' do
regex = /scaling_adjustment\s*=>\s*'#{@sp.scaling_adjustment}'/
expect(@result.stdout).to match(regex)
end
it 'adjustment_type' do
regex = /adjustment_type\s*=>\s*'#{@sp.adjustment_type}'/
expect(@result.stdout).to match(regex)
end
it 'ec2_autoscaling_group' do
regex = /auto_scaling_group\s*=>\s*'#{@sp.auto_scaling_group_name}'/
expect(@result.stdout).to match(regex)
end
end
end
context 'modify cloudwatch property' do
it 'metric' do
config = @asg_config.clone
config[:metric] = 'NetworkIn'
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
cloudwatch = find_alarm(@asg_config[:alarm_name])
expect(cloudwatch.metric_name).to eq('NetworkIn')
end
it 'namespace and metric' do
config = @asg_config.clone
config[:metric] = 'AWS/ELB'
config[:namespace] = 'HealthyHostCount'
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
cloudwatch = find_alarm(@asg_config[:alarm_name])
expect(cloudwatch.metric_name).to eq('AWS/ELB')
expect(cloudwatch.namespace).to eq('HealthyHostCount')
end
it 'statistic' do
config = @asg_config.clone
config[:statistic] = 'Sum'
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
cloudwatch = find_alarm(@asg_config[:alarm_name])
expect(cloudwatch.statistic).to eq('Sum')
end
it 'period' do
config = @asg_config.clone
config[:period] = 180
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
cloudwatch = find_alarm(@asg_config[:alarm_name])
expect(cloudwatch.period).to eq(180)
end
it 'evaluation_periods' do
config = @asg_config.clone
config[:evaluation_periods] = 4
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
cloudwatch = find_alarm(@asg_config[:alarm_name])
expect(cloudwatch.evaluation_periods).to eq(4)
end
it 'threshold' do
config = @asg_config.clone
config[:threshold] = 50
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
cloudwatch = find_alarm(@asg_config[:alarm_name])
expect(cloudwatch.threshold).to eq(50.0)
end
it 'comparison_operator' do
config = @asg_config.clone
config[:comparison_operator] = 'GreaterThanThreshold'
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
cloudwatch = find_alarm(@asg_config[:alarm_name])
expect(cloudwatch.comparison_operator).to eq('GreaterThanThreshold')
end
end
context 'modify ec2_scalingpolicy' do
it 'scaling_adjustment' do
config = @asg_config.clone
config[:scaling_adjustment] = 40
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
policy = find_scaling_policy(@asg_config[:policy_name], @asg_config[:asg_name])
expect(policy.scaling_adjustment).to eq(40)
end
it 'adjustment_type' do
config = @asg_config.clone
config[:adjustment_type] = 'ExactCapacity'
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
policy = find_scaling_policy(@asg_config[:policy_name], @asg_config[:asg_name])
expect(policy.adjustment_type).to eq('ExactCapacity')
end
end
context 'modify ec2_autoscalinggroup' do
it 'min_size' do
config = @asg_config.clone
config[:min_size] = 3
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
group = find_autoscaling_group(@asg_config[:asg_name])
expect(group.min_size).to eq(3)
end
it 'max_size' do
config = @asg_config.clone
config[:max_size] = 5
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
group = find_autoscaling_group(@asg_config[:asg_name])
expect(group.max_size).to eq(5)
end
it 'launch_configuration' do
config = @asg_config.clone
config[:lc_setting] = @duplicate_asg_config[:lc_name]
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
group = find_autoscaling_group(@asg_config[:asg_name])
expect(group.launch_configuration_name).to eq(@duplicate_asg_config[:lc_name])
end
it 'availability_zones' do
config = @asg_config.clone
config[:availability_zones] = ["#{@default_region}b"]
r = PuppetManifest.new(@asg_template, config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
group = find_autoscaling_group(@asg_config[:asg_name])
expect(group.availability_zones.sort).to eq(config[:availability_zones].sort)
end
end
end
describe 'an autoscaling group in a VPC' do
before(:all) do
@template = 'autoscaling_vpc.pp.tmpl'
@template_delete = 'autoscaling_vpc_delete.pp.tmpl'
@config = {
:ensure => 'present',
:region => @default_region,
:name => "#{PuppetManifest.env_id}-#{SecureRandom.uuid}",
:min_size => 0,
:max_size => 6,
:vpc_cidr => '10.0.0.0/16',
:subnet_cidr => '10.0.0.0/24',
}
r = PuppetManifest.new(@template, @config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
end
after(:all) do
@config[:ensure] = 'absent'
r = PuppetManifest.new(@template_delete, @config).apply
expect(r[:output].any?{ |o| o.include?('Error:')}).to eq(false)
end
it 'should run idempotently' do
success = PuppetManifest.new(@template, @config).apply[:exit_status].success?
expect(success).to eq(true)
end
context 'should create' do
context 'an auto scaling group' do
it 'associated with a VPC' do
group = find_autoscaling_group("#{@config[:name]}-asg")
expect(group.vpc_zone_identifier).not_to be_nil
end
end
end
end
end
| 34.273245 | 104 | 0.592681 |
9194a69f27c7880ce638018cd8bdc4e7914234a9 | 203 | # frozen_string_literal: true
class CreateBlogs < ActiveRecord::Migration[6.0]
def change
create_table :blogs do |t|
t.string :title
t.text :body
t.timestamps
end
end
end
| 15.615385 | 48 | 0.660099 |
013a6d8847d0d9468451adcd5e86a93034a49c36 | 881 | cask "deezer" do
version "5.30.30"
sha256 "1fef7d10834d5206d8137beb0ccf440596d672578fc494d8e27495e3b612d13d"
url "https://www.deezer.com/desktop/download/artifact/darwin/x64/#{version}"
name "Deezer"
desc "Music player"
homepage "https://www.deezer.com/download"
livecheck do
url "https://www.deezer.com/desktop/download?platform=darwin&architecture=x64"
strategy :header_match
end
auto_updates true
depends_on macos: ">= :yosemite"
app "Deezer.app"
zap trash: [
"~/Library/Application Support/Caches/deezer-desktop-updater",
"~/Library/Application Support/deezer-desktop",
"~/Library/Logs/Deezer",
"~/Library/Logs/deezer-desktop",
"~/Library/Preferences/ByHost/com.deezer.*",
"~/Library/Preferences/com.deezer.deezer-desktop.plist",
"~/Library/Saved Application State/com.deezer.deezer-desktop.savedState",
]
end
| 29.366667 | 82 | 0.725312 |
1813baf06b4bbd4720c0deb83f1b9071db547d52 | 112 | require "rdcl/link/dock_cmds/dock_cmd.rb"
module RDCL
class DockCmdGetPassword < DockCmd
end
end
| 12.444444 | 42 | 0.714286 |
8763c5f4371a1c9bf1d847da14aa9db719f7e107 | 78 | class Iris::GovernanceParamsDecorator < Common::GovernanceParamsDecorator
end
| 26 | 73 | 0.871795 |
38d629fd21c002f0c3385d90d15cd750796e1495 | 385 | # frozen_string_literal: true
Sequel.migration do
up do
create_table :days do
date :date
json1 :rates
index :date, unique: true
end
drop_table :currencies
end
down do
create_table :currencies do
date :date
string :iso_code
float :rate
index %i[date iso_code], unique: true
end
drop_table :days
end
end
| 16.73913 | 43 | 0.620779 |
334ba1f7795dc29e23eaebf096c291c8b51f6c52 | 1,016 | require 'rails_helper'
RSpec.describe '/deals', type: :request do
login_user
describe 'GET /new' do
it 'renders a successful response' do
get new_deal_url
expect(response).to be_successful
end
end
describe 'POST /create' do
context 'with valid parameters' do
it 'creates a new Deal' do
expect do
post deals_url,
params: { deal: FactoryBot.attributes_for(:deal, category_ids: [FactoryBot.create(:category).id]) }
end.to change(Deal, :count).by(1)
end
end
context 'with invalid parameters' do
it 'does not create a new deal' do
expect do
post deals_url, params: { deal: FactoryBot.attributes_for(:deal, name: nil) }
end.to change(Deal, :count).by(0)
end
it "renders new page (i.e. to display the 'new' template)" do
post deals_url, params: { deal: FactoryBot.attributes_for(:deal, name: nil) }
expect(response).not_to be_successful
end
end
end
end
| 27.459459 | 114 | 0.63189 |
e2abc5c0902355964eaf27a6a77cab32bc631f91 | 3,966 | module Money
#узнать текущую базовую валюту, возвращается экземпляр класса Currency
def self.get_base
valute = Currency.find_by_is_base(true)
valute
end
#установить новую базовую валюту
def self.set_base(sym)
currency = Currency.find_by_code(sym)
raise "Валюта с указанным кодом не найдена" if currency.nil?
#если валюта с кодом sym и так является базовой,
#то ничего делать не нужно
unless currency.is_base
currency.is_base = true
currency.save
calculate
end
end
#установить курс валюты относительно текущей базовой
def self.set_currency(fromcode, rate)
from = Currency.find_by_code(fromcode)
to = get_base
raise "В базе данных нет информации о валюте #{fromcode}" unless from
raise "Не установлена базовая валюта" unless to
CalculatedExchangeRate.create(from_currency_id: from.id, to_currency_id: to.id, rate: rate) unless from.id == to.id
end
#пересчитать курсы валют относительно базовой и занести записи в CalculatedExchangeRates
def self.calculate
#получаем текущую базовую валюту
base = get_base
raise "Курсы валют не могут быть рассчитаны: не установлена базовая валюта" unless base
#cur_base_rate - текущий курс базовой валюты относительно рубля
if base.code == 'RUB'
cur_base_rate = 1
else
#если в ExchangeRates нет данных о курсе базовой валюты относительно рубля
raise "Курсы валют не могут быть рассчитаны:
нет данных о курсе базовой валюты относительно рубля" unless base.exchange_rates.last
cur_base_rate = base.exchange_rates.last.rate
end
@curriencies = Currency.all.includes(:exchange_rates)
#итерируемся по коллекции всех валют
@curriencies.each do |cur|
#cur_from_rate - курс относительно рубля для валюты,
#для котороый в данный момент присходит расчёт
if cur.code == 'RUB'
cur_from_rate = 1
else
raise "Курсы валют не могут быть рассчитаны:
нет данных о курсе валюты #{cur.code}: #{cur.name} относительно рубля" unless cur.exchange_rates.last
cur_from_rate = cur.exchange_rates.last.rate
end
set_currency(cur.code, (cur_from_rate/cur_base_rate).to_f)
end
end
#узнать курс валюты относительно любой другой (по умолчанию - базовой)
#на любую дату и время (по умолчанию - на текущий момент)
def self.get_currency(fromcode, options = {})
from = Currency.find_by_code(fromcode)
to = options[:to] ? Currency.find_by_code(options[:to]) : Money.get_base
datetime = options[:datetime] || DateTime.now
raise "В базе данных нет валюты из которой идёт конвертация" unless from
raise "В базе данных нет валюты в которую идёт конвертация" unless to
# если конвертируем валюту в саму себя
if from.id == to.id
1.0
else
#данные по котировке валюты из которой конвертируем
ratefrom = CalculatedExchangeRate.where("from_currency_id = ? and created_at < ?", from.id, datetime).last
#данные по котировке валюты в которую конвертируем
rateto = CalculatedExchangeRate.where("from_currency_id = ? and created_at < ?", to.id, datetime).last
#конвертируем в базовую валюту
if (!ratefrom.nil? && ratefrom.to_currency_id == to.id)
ratefrom.rate.to_f.round(4)
#конвертируем из базовой валюты
elsif (!rateto.nil? && rateto.to_currency_id == from.id)
(1/rateto.rate).to_f.round(4)
#конвертируем не из базовой в небазовую
elsif (!ratefrom.nil? && !rateto.nil? && ratefrom.to_currency_id == rateto.to_currency_id)
(ratefrom.rate/rateto.rate).to_f.round(4)
#в CalculatedExchangeRates не найдены необходимые записи
else
raise "В базе данных недостаточно данных"
end
end
end
end
| 37.415094 | 120 | 0.675996 |
b9b008eec1d6c615c398e04307ff930dce8dafd8 | 1,074 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe MidataService do
let(:service) { described_class.new }
let(:pbs_camp_unit_id) { '1328' }
# describe '#pull_camp_unit', vcr: true do
# subject(:camp_unit) { service.pull_camp_unit(pbs_camp_unit_id) }
# it { is_expected.to have_attributes(pbs_id: pbs_camp_unit_id) }
# it { is_expected.to be_persisted }
# end
describe '#fetch_camp_unit_data', vcr: true do
subject(:camp_unit_data) { service.fetch_camp_unit_data(pbs_camp_unit_id) }
it { expect(camp_unit_data.dig('events', 0, 'id')).to eq(pbs_camp_unit_id) }
it { expect(camp_unit_data.dig('events', 0, 'links', 'sub_camps')).to eq([1322, 1329]) }
end
describe '#fetch_participations', vcr: true do
subject(:event_participations_data) { service.fetch_participations(group_id, event_id) }
let(:group_id) { 749 }
let(:event_id) { 1328 }
it { expect(event_participations_data['current_page']).to eq 1 }
it { expect(event_participations_data['event_participations'].length).to eq 2 }
end
end
| 32.545455 | 92 | 0.713222 |
ff03fb146f02b6a3d52720dd485709a2232a658c | 271 | class DeleteColumnSearches < ActiveRecord::Migration
def change
remove_column :searches, :point, :string
remove_column :searches, :large_area, :string
remove_column :searches, :middle_area, :string
remove_column :searches, :food_code, :string
end
end
| 30.111111 | 52 | 0.752768 |
1a68d7d461a5cf518a125a259dfc0f2685507916 | 1,538 | module Fog
module Compute
class Telefonica
class Real
def list_tenants
response = @identity_connection.request(:expects => [200, 204],
:headers => {'Content-Type' => 'application/json',
'Accept' => 'application/json',
'X-Auth-Token' => @auth_token},
:method => 'GET',
:path => '/v2.0/tenants')
response.body = Fog::JSON.decode(response.body)
response
end
end
class Mock
def list_tenants
response = Excon::Response.new
response.status = [200, 204][rand(2)]
response.body = {
'tenants_links' => [],
'tenants' => [
{'id' => '1',
'description' => 'Has access to everything',
'enabled' => true,
'name' => 'admin'},
{'id' => '2',
'description' => 'Normal tenant',
'enabled' => true,
'name' => 'default'},
{'id' => '3',
'description' => 'Disabled tenant',
'enabled' => false,
'name' => 'disabled'}
]
}
response
end
end
end
end
end
| 34.954545 | 100 | 0.350455 |
f76ecc8c2299138aa78ea25d049272d7e05893f4 | 2,484 | module Provider::Admin::DashboardsHelper
include ApplicationHelper
# @param name [Symbol]
# @param params [Hash]
def dashboard_widget(name, params = {})
widget = DashboardWidgetPresenter.new(name, params)
widget.render
end
def friendly_service_name(service)
if service.name =~ /api$/i
service.name
else
service.name + ' API'
end
end
def sign_class(widget)
widget.percentual_change > 0 ? 'u-plus' : 'u-minus'
end
def dashboard_navigation_link(link_text, path, options = {})
link_to path, class: css_class({
'DashboardNavigation-link': true,
'u-notice': options.fetch(:notice, false)
}) do
icon_name = options.fetch(:icon_name, nil)
link_text = link_text.prepend "#{icon(icon_name)} " if icon_name
link_text.html_safe
end
end
def dashboard_secondary_link(link_text, path, options = {})
safe_wrap_with_parenthesis(dashboard_navigation_link(link_text, path, options))
end
def dashboard_collection_link(singular_name, collection, path, options = {})
link_text = pluralize(number_to_human(collection.size), singular_name, options.fetch(:plural, nil))
dashboard_navigation_link(link_text, path, options)
end
def dashboard_secondary_collection_link(singular_name, collection, path, options = {})
safe_wrap_with_parenthesis(dashboard_collection_link(singular_name, collection, path, options))
end
def safe_wrap_with_parenthesis(html)
" (#{h html})".html_safe
end
def show_pending_accounts_on_dashboard?
current_account.buyers.pending.exists?
end
def show_account_plans_on_dashboard?
current_account.settings.account_plans.allowed? && current_account.settings.account_plans_ui_visible? && current_account.account_plans.not_custom.size > 1
end
def show_forum_on_dashboard?
current_account.forum_enabled? && current_account.forum.recent_topics.any?
end
def show_subscriptions_on_dashboard?(service)
can?(:manage, :service_contracts) && current_account.settings.service_plans.allowed? && current_account.settings.service_plans_ui_visible? && current_account.service_plans.not_custom.size > 1
end
def show_service_plans_on_dashboard?(service)
can?(:manage, :service_plans) && service.service_plans.not_custom.size > 1
end
def show_end_users_on_dashboard?(service)
can?(:manage, :end_users) && service.end_users_allowed? && current_account.settings.end_user_plans_ui_visible?
end
end
| 32.684211 | 195 | 0.744767 |
6aa76a5236ca63920abdceadefaf6f90d61dfcfd | 25,219 | #
# Author:: John Keiser <[email protected]>
# Copyright:: Copyright 2015-2016, John Keiser.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "exceptions"
require_relative "delayed_evaluator"
require_relative "chef_class"
require_relative "log"
class Chef
#
# Type and validation information for a property on a resource.
#
# A property named "x" manipulates the "@x" instance variable on a
# resource. The *presence* of the variable (`instance_variable_defined?(@x)`)
# tells whether the variable is defined; it may have any actual value,
# constrained only by validation.
#
# Properties may have validation, defaults, and coercion, and have full
# support for lazy values.
#
# @see Chef::Resource.property
# @see Chef::DelayedEvaluator
#
class Property
#
# Create a reusable property type that can be used in multiple properties
# in different resources.
#
# @param options [Hash<Symbol,Object>] Validation options. See Chef::Resource.property for
# the list of options.
#
# @example
# Property.derive(default: 'hi')
#
def self.derive(**options)
new(**options)
end
# This is to support #deprecated_property_alias, by emitting an alias and a
# deprecatation warning when called.
#
# @param from [String] Name of the deprecated property
# @param to [String] Name of the correct property
# @param message [String] Deprecation message to show to the cookbook author
# @param declared_in [Class] Class this property comes from
#
def self.emit_deprecated_alias(from, to, message, declared_in)
declared_in.class_eval <<-EOM, __FILE__, __LINE__ + 1
def #{from}(value=NOT_PASSED)
Chef.deprecated(:property, "#{message}")
#{to}(value)
end
def #{from}=(value)
Chef.deprecated(:property, "#{message}")
#{to} = value
end
EOM
end
#
# Create a new property.
#
# @param options [Hash<Symbol,Object>] Property options, including
# control options here, as well as validation options (see
# Chef::Mixin::ParamsValidate#validate for a description of validation
# options).
# @option options [Symbol] :name The name of this property.
# @option options [Class] :declared_in The class this property comes from.
# @option options [String] :description A description of the property.
# @option options [Symbol] :instance_variable_name The instance variable
# tied to this property. Must include a leading `@`. Defaults to `@<name>`.
# `nil` means the property is opaque and not tied to a specific instance
# variable.
# @option options [String] :introduced The release that introduced this property
# @option options [Boolean] :desired_state `true` if this property is part of desired
# state. Defaults to `true`.
# @option options [Boolean] :identity `true` if this property is part of object
# identity. Defaults to `false`.
# @option options [Boolean] :name_property `true` if this
# property defaults to the same value as `name`. Equivalent to
# `default: lazy { name }`, except that #property_is_set? will
# return `true` if the property is set *or* if `name` is set.
# @option options [Boolean] :nillable `true` opt-in to Chef-13 style behavior where
# attempting to set a nil value will really set a nil value instead of issuing
# a warning and operating like a getter [DEPRECATED]
# @option options [Object] :default The value this property
# will return if the user does not set one. If this is `lazy`, it will
# be run in the context of the instance (and able to access other
# properties) and cached. If not, the value will be frozen with Object#freeze
# to prevent users from modifying it in an instance.
# @option options [String] :default_description The description of the default value
# used in docs. Particularly useful when a default is computed or lazily eval'd.
# @option options [Boolean] :skip_docs This property should not be included in any
# documentation output
# @option options [Proc] :coerce A proc which will be called to
# transform the user input to canonical form. The value is passed in,
# and the transformed value returned as output. Lazy values will *not*
# be passed to this method until after they are evaluated. Called in the
# context of the resource (meaning you can access other properties).
# @option options [Boolean] :required `true` if this property
# must be present; `false` otherwise. This is checked after the resource
# is fully initialized.
# @option options [String] :deprecated If set, this property is deprecated and
# will create a deprecation warning.
#
def initialize(**options)
options = options.inject({}) { |memo, (key, value)| memo[key.to_sym] = value; memo }
@options = options
options[:name] = options[:name].to_sym if options[:name]
options[:instance_variable_name] = options[:instance_variable_name].to_sym if options[:instance_variable_name]
# Replace name_attribute with name_property
if options.key?(:name_attribute)
# If we have both name_attribute and name_property and they differ, raise an error
if options.key?(:name_property)
raise ArgumentError, "name_attribute and name_property are functionally identical and both cannot be specified on a property at once. Use just one on property #{self}"
end
# replace name_property with name_attribute in place
options = Hash[options.map { |k, v| k == :name_attribute ? [ :name_property, v ] : [ k, v ] }]
@options = options
end
if options.key?(:default) && options.key?(:name_property)
raise ArgumentError, "A property cannot be both a name_property/name_attribute and have a default value. Use one or the other on property #{self}"
end
# Recursively freeze the default if it isn't a lazy value.
unless default.is_a?(DelayedEvaluator)
visitor = lambda do |obj|
case obj
when Hash
obj.each_value { |value| visitor.call(value) }
when Array
obj.each { |value| visitor.call(value) }
end
obj.freeze
end
visitor.call(default)
end
# Validate the default early, so the user gets a good error message, and
# cache it so we don't do it again if so
begin
# If we can validate it all the way to output, do it.
@stored_default = input_to_stored_value(nil, default, is_default: true)
rescue Chef::Exceptions::CannotValidateStaticallyError
# If the validation is not static (i.e. has procs), we will have to
# coerce and validate the default each time we run
end
end
def to_s
"#{name || "<property type>"}#{declared_in ? " of resource #{declared_in.resource_name}" : ""}"
end
#
# The name of this property.
#
# @return [String]
#
def name
options[:name]
end
#
# The class this property was defined in.
#
# @return [Class]
#
def declared_in
options[:declared_in]
end
#
# A description of this property.
#
# @return [String]
#
def description
options[:description]
end
#
# When this property was introduced
#
# @return [String]
#
def introduced
options[:introduced]
end
#
# The instance variable associated with this property.
#
# Defaults to `@<name>`
#
# @return [Symbol]
#
def instance_variable_name
if options.key?(:instance_variable_name)
options[:instance_variable_name]
elsif name
:"@#{name}"
end
end
#
# The raw default value for this resource.
#
# Does not coerce or validate the default. Does not evaluate lazy values.
#
# Defaults to `lazy { name }` if name_property is true; otherwise defaults to
# `nil`
#
def default
return options[:default] if options.key?(:default)
return Chef::DelayedEvaluator.new { name } if name_property?
nil
end
#
# A desciption of the default value of this property.
#
# @return [String]
#
def default_description
options[:default_description]
end
#
# Whether this is part of the resource's natural identity or not.
#
# @return [Boolean]
#
def identity?
options[:identity]
end
#
# Whether this is part of desired state or not.
#
# Defaults to true.
#
# @return [Boolean]
#
def desired_state?
return true unless options.key?(:desired_state)
options[:desired_state]
end
#
# Whether this is name_property or not.
#
# @return [Boolean]
#
def name_property?
options[:name_property]
end
#
# Whether this property has a default value.
#
# @return [Boolean]
#
def has_default?
options.key?(:default) || name_property?
end
#
# Whether this property is required or not.
#
# @return [Boolean]
#
def required?
options[:required]
end
#
# Whether this property should be skipped for documentation purposes.
#
# Defaults to false.
#
# @return [Boolean]
#
def skip_docs?
options.fetch(:skip_docs, false)
end
#
# Whether this property is sensitive or not.
#
# Defaults to false.
#
# @return [Boolean]
#
def sensitive?
options.fetch(:sensitive, false)
end
#
# Validation options. (See Chef::Mixin::ParamsValidate#validate.)
#
# @return [Hash<Symbol,Object>]
#
def validation_options
@validation_options ||= options.reject do |k, v|
%i{declared_in name instance_variable_name desired_state identity default name_property coerce required nillable sensitive description introduced deprecated default_description skip_docs}.include?(k)
end
end
#
# Handle the property being called.
#
# The base implementation does the property get-or-set:
#
# ```ruby
# resource.myprop # get
# resource.myprop value # set
# ```
#
# Subclasses may implement this with any arguments they want, as long as
# the corresponding DSL calls it correctly.
#
# @param resource [Chef::Resource] The resource to get the property from.
# @param value The value to set (or NOT_PASSED if it is a get).
#
# @return The current value of the property. If it is a `set`, lazy values
# will be returned without running, validating or coercing. If it is a
# `get`, the non-lazy, coerced, validated value will always be returned.
#
def call(resource, value = NOT_PASSED)
if value == NOT_PASSED
get(resource)
else
set(resource, value)
end
end
#
# Get the property value from the resource, handling lazy values,
# defaults, and validation.
#
# - If the property's value is lazy, it is evaluated, coerced and validated.
# - If the property has no value, and is required, raises ValidationFailed.
# - If the property has no value, but has a lazy default, it is evaluated,
# coerced and validated. If the evaluated value is frozen, the resulting
# - If the property has no value, but has a default, the default value
# will be returned and frozen. If the default value is lazy, it will be
# evaluated, coerced and validated, and the result stored in the property.
# - If the property has no value, but is name_property, `resource.name`
# is retrieved, coerced, validated and stored in the property.
# - Otherwise, `nil` is returned.
#
# @param resource [Chef::Resource] The resource to get the property from.
#
# @return The value of the property.
#
# @raise Chef::Exceptions::ValidationFailed If the value is invalid for
# this property, or if the value is required and not set.
#
def get(resource, nil_set: false)
# If it's set, return it (and evaluate any lazy values)
value = nil
if is_set?(resource)
value = get_value(resource)
value = stored_value_to_output(resource, value)
else
# We are getting the default value.
if has_default?
# If we were able to cache the stored_default, grab it.
if defined?(@stored_default)
value = @stored_default
else
# Otherwise, we have to validate it now.
value = input_to_stored_value(resource, default, is_default: true)
end
value = stored_value_to_output(resource, value)
# If the value is mutable (non-frozen), we set it on the instance
# so that people can mutate it. (All constant default values are
# frozen.)
if !value.frozen? && !value.nil?
set_value(resource, value)
end
end
end
if value.nil? && required?
raise Chef::Exceptions::ValidationFailed, "#{name} is a required property"
else
value
end
end
#
# Set the value of this property in the given resource.
#
# Non-lazy values are coerced and validated before being set. Coercion
# and validation of lazy values is delayed until they are first retrieved.
#
# @param resource [Chef::Resource] The resource to set this property in.
# @param value The value to set.
#
# @return The value that was set, after coercion (if lazy, still returns
# the lazy value)
#
# @raise Chef::Exceptions::ValidationFailed If the value is invalid for
# this property.
#
def set(resource, value)
value = set_value(resource, input_to_stored_value(resource, value))
if options.key?(:deprecated)
Chef.deprecated(:property, options[:deprecated])
end
if value.nil? && required?
raise Chef::Exceptions::ValidationFailed, "#{name} is a required property"
else
value
end
end
#
# Find out whether this property has been set.
#
# This will be true if:
# - The user explicitly set the value
# - The property has a default, and the value was retrieved.
#
# From this point of view, it is worth looking at this as "what does the
# user think this value should be." In order words, if the user grabbed
# the value, even if it was a default, they probably based calculations on
# it. If they based calculations on it and the value changes, the rest of
# the world gets inconsistent.
#
# @param resource [Chef::Resource] The resource to get the property from.
#
# @return [Boolean]
#
def is_set?(resource)
value_is_set?(resource)
end
#
# Reset the value of this property so that is_set? will return false and the
# default will be returned in the future.
#
# @param resource [Chef::Resource] The resource to get the property from.
#
def reset(resource)
reset_value(resource)
end
#
# Coerce an input value into canonical form for the property.
#
# After coercion, the value is suitable for storage in the resource.
# You must validate values after coercion, however.
#
# Does no special handling for lazy values.
#
# @param resource [Chef::Resource] The resource we're coercing against
# (to provide context for the coerce).
# @param value The value to coerce.
#
# @return The coerced value.
#
# @raise Chef::Exceptions::ValidationFailed If the value is invalid for
# this property.
#
def coerce(resource, value)
if options.key?(:coerce)
# nil is never coerced
unless value.nil?
value = exec_in_resource(resource, options[:coerce], value)
end
end
value
end
#
# Validate a value.
#
# Calls Chef::Mixin::ParamsValidate#validate with #validation_options as
# options.
#
# @param resource [Chef::Resource] The resource we're validating against
# (to provide context for the validation).
# @param value The value to validate.
#
# @raise Chef::Exceptions::ValidationFailed If the value is invalid for
# this property.
#
def validate(resource, value)
# nils are not validated unless we have an explicit default value
if !value.nil? || has_default?
if resource
resource.validate({ name => value }, { name => validation_options })
else
name = self.name || :property_type
Chef::Mixin::ParamsValidate.validate({ name => value }, { name => validation_options })
end
end
end
#
# Derive a new Property that is just like this one, except with some added or
# changed options.
#
# @param options [Hash<Symbol,Object>] List of options that would be passed
# to #initialize.
#
# @return [Property] The new property type.
#
def derive(**modified_options)
# Since name_property, name_attribute and default override each other,
# if you specify one of them in modified_options it overrides anything in
# the original options.
options = self.options
if modified_options.key?(:name_property) ||
modified_options.key?(:name_attribute) ||
modified_options.key?(:default)
options = options.reject { |k, v| k == :name_attribute || k == :name_property || k == :default }
end
self.class.new(options.merge(modified_options))
end
#
# Emit the DSL for this property into the resource class (`declared_in`).
#
# Creates a getter and setter for the property.
#
def emit_dsl
# We don't create the getter/setter if it's a custom property; we will
# be using the existing getter/setter to manipulate it instead.
return unless instance_variable_name
# Properties may override existing properties up the inheritance heirarchy, but
# properties must not override inherited methods like Object#hash. When the Resource is
# placed into the resource collection the ruby Hash object will call the
# Object#hash method on the resource, and overriding that with a property will cause
# very confusing results.
if property_redefines_method?
resource_name = declared_in.respond_to?(:resource_name) ? declared_in.resource_name : declared_in
raise ArgumentError, "Property `#{name}` of resource `#{resource_name}` overwrites an existing method. A different name should be used for this property."
end
# We prefer this form because the property name won't show up in the
# stack trace if you use `define_method`.
declared_in.class_eval <<-EOM, __FILE__, __LINE__ + 1
def #{name}(value=NOT_PASSED)
raise "Property `#{name}` of `\#{self}` was incorrectly passed a block. Possible property-resource collision. To call a resource named `#{name}` either rename the property or else use `declare_resource(:#{name}, ...)`" if block_given?
self.class.properties[#{name.inspect}].call(self, value)
end
def #{name}=(value)
raise "Property `#{name}` of `\#{self}` was incorrectly passed a block. Possible property-resource collision. To call a resource named `#{name}` either rename the property or else use `declare_resource(:#{name}, ...)`" if block_given?
self.class.properties[#{name.inspect}].set(self, value)
end
EOM
end
#
# The options this Property will use for get/set behavior and validation.
#
# @see #initialize for a list of valid options.
#
attr_reader :options
#
# Find out whether this type accepts nil explicitly.
#
# A type accepts nil explicitly if "is" allows nil, it validates as nil, *and* is not simply
# an empty type.
#
# A type is presumed to accept nil if it does coercion (which must handle nil).
#
# These examples accept nil explicitly:
# ```ruby
# property :a, [ String, nil ]
# property :a, [ String, NilClass ]
# property :a, [ String, proc { |v| v.nil? } ]
# ```
#
# This does not (because the "is" doesn't exist or doesn't have nil):
#
# ```ruby
# property :x, String
# ```
#
# These do not, even though nil would validate fine (because they do not
# have "is"):
#
# ```ruby
# property :a
# property :a, equal_to: [ 1, 2, 3, nil ]
# property :a, kind_of: [ String, NilClass ]
# property :a, respond_to: [ ]
# property :a, callbacks: { "a" => proc { |v| v.nil? } }
# ```
#
# @param resource [Chef::Resource] The resource we're coercing against
# (to provide context for the coerce).
#
# @return [Boolean] Whether this value explicitly accepts nil.
#
# @api private
def explicitly_accepts_nil?(resource)
options.key?(:coerce) ||
(options.key?(:is) && Chef::Mixin::ParamsValidate.send(:_pv_is, { name => nil }, name, options[:is]))
rescue Chef::Exceptions::ValidationFailed, Chef::Exceptions::CannotValidateStaticallyError
false
end
# @api private
def get_value(resource)
if instance_variable_name
resource.instance_variable_get(instance_variable_name)
else
resource.send(name)
end
end
# @api private
def set_value(resource, value)
if instance_variable_name
resource.instance_variable_set(instance_variable_name, value)
else
resource.send(name, value)
end
end
# @api private
def value_is_set?(resource)
if instance_variable_name
resource.instance_variable_defined?(instance_variable_name)
else
true
end
end
# @api private
def reset_value(resource)
if instance_variable_name
if value_is_set?(resource)
resource.remove_instance_variable(instance_variable_name)
end
else
raise ArgumentError, "Property #{name} has no instance variable defined and cannot be reset"
end
end
private
def property_redefines_method?
# We only emit deprecations if this property already exists as an instance method.
# Weeding out class methods avoids unnecessary deprecations such Chef::Resource
# defining a `name` property when there's an already-existing `name` method
# for a Module.
return false unless declared_in.instance_methods.include?(name)
# Only emit deprecations for some well-known classes. This will still
# allow more advanced users to subclass their own custom resources and
# override their own properties.
return false unless [ Object, BasicObject, Kernel, Chef::Resource ].include?(declared_in.instance_method(name).owner)
# Allow top-level Chef::Resource proprties, such as `name`, to be overridden.
# As of this writing, `name` is the only Chef::Resource property created with the
# `property` definition, but this will allow for future properties to be extended
# as needed.
!Chef::Resource.properties.keys.include?(name)
end
def exec_in_resource(resource, proc, *args)
if resource
if proc.arity > args.size
value = proc.call(resource, *args)
else
value = resource.instance_exec(*args, &proc)
end
else
# If we don't have a resource yet, we can't exec in resource!
raise Chef::Exceptions::CannotValidateStaticallyError, "Cannot validate or coerce without a resource"
end
end
def input_to_stored_value(resource, value, is_default: false)
if value.nil? && !is_default && !explicitly_accepts_nil?(resource)
value = default
end
unless value.is_a?(DelayedEvaluator)
value = coerce_and_validate(resource, value)
end
value
end
def stored_value_to_output(resource, value)
# Crack open lazy values before giving the result to the user
if value.is_a?(DelayedEvaluator)
value = exec_in_resource(resource, value)
value = coerce_and_validate(resource, value)
end
value
end
# Coerces and validates the value.
def coerce_and_validate(resource, value)
result = coerce(resource, value)
validate(resource, result)
result
end
end
end
| 34.311565 | 244 | 0.648598 |
4ac7c87a021b44b2a0818547cdb2f8b09e6c17cf | 3,438 | require "spec_helper"
describe Services::WaitlistMoviesCheck do
subject{described_class.new(movie)}
describe "#perform" do
When{subject.perform}
context "with an acceptable release after fetching" do
Given(:movie){create :movie, waitlist: true}
Given(:reloaded_movie){movie.reload}
Given{expect(NotifyHuginnJob).to receive(:perform_later).with("An acceptable release for #{movie.title} has been found. Will download in #{ENV['PTP_WAITLIST_DELAY_HOURS']} hours")}
Then{expect(movie.reload.releases.size).to eq 7}
And{expect(reloaded_movie.download_at).to be > DateTime.now - 1.hour + ENV["PTP_WAITLIST_DELAY_HOURS"].to_i.hours}
end
context "with a killer release after fetching" do
Given(:movie){create :movie, waitlist: true, imdb_id: "tt1189340"}
Given(:reloaded_movie){movie.reload}
Given{expect(NotifyHuginnJob).to receive(:perform_later).with("A killer release for #{movie.title} has been found. Will download immediately")}
Then{expect(movie.reload.releases.size).to eq 9}
And{expect(reloaded_movie.download_at).to be <= DateTime.now}
end
context "with no acceptable release after fetching" do
Given(:movie){create :movie, imdb_id: "tt1355683", waitlist: true}
Given(:reloaded_movie){movie.reload}
Then{expect(movie.reload.releases.size).to eq 1}
And{expect(reloaded_movie.download_at).to be_nil}
end
context "with no releases after fetching" do
Given!(:movie){create :movie, imdb_id: "2323232323", waitlist: true}
Given(:reloaded_movie){movie.reload}
Then{expect(reloaded_movie.releases.size).to eq 0}
And{expect(reloaded_movie.download_at).to be_nil}
end
context "when the movie already has a download_at value" do
Given!(:movie){create :movie, waitlist: true, download_at: Date.tomorrow}
Given(:reloaded_movie){movie.reload}
Given{expect(NotifyHuginnJob).not_to receive(:perform_later)}
Then{expect(reloaded_movie.releases.size).to eq 7}
And{expect(reloaded_movie.download_at).to eq movie.download_at}
end
context "when the movie already has a download_at value, but we have a killer release" do
Given!(:movie){create :movie, waitlist: true, download_at: Date.tomorrow, imdb_id: "tt1189340"}
Given(:reloaded_movie){movie.reload}
Given{expect(NotifyHuginnJob).to receive(:perform_later).with("A killer release for #{movie.title} has been found. Will download immediately")}
Then{expect(reloaded_movie.releases.size).to eq 9}
And{expect(reloaded_movie.download_at).to be <= DateTime.now}
end
context "when the movie already has a download_at value, but we have a killer release, buuuut download_at is earlier than now" do
Given!(:movie){create :movie, waitlist: true, download_at: Date.yesterday, imdb_id: "tt1189340"}
Given(:reloaded_movie){movie.reload}
Then{expect(reloaded_movie.releases.size).to eq 9}
And{expect(reloaded_movie.download_at).to eq Date.yesterday}
end
context "with a movie with no title" do
Given(:movie){create :movie, waitlist: true, imdb_id: "tt1189340", title: nil}
Given(:reloaded_movie){movie.reload}
Given{expect(NotifyHuginnJob).not_to receive(:perform_later)}
Then{expect(movie.reload.releases.size).to eq 9}
And{expect(reloaded_movie.download_at).to be <= DateTime.now}
end
end
end
| 40.928571 | 186 | 0.716696 |
bbe7138c1c602bd22bbfe94cd7af22bcdec6c3d1 | 262 | class CreateUsers < ActiveRecord::Migration
def self.up
create_table :users do |t|
t.string :name
t.string :email
t.string :login
t.string :password
t.timestamps
end
end
def self.down
drop_table :users
end
end
| 15.411765 | 43 | 0.629771 |
d577d8b51149956d0476bc657638b74f1fea911b | 576 | cask 'freesmug-chromium' do
version '47.0.2526.111'
sha256 '71d47d02f868684774b70f4243b1030f7a3efb5557e4684acab246b541bf68ab'
# sourceforge.net is the official download host per the vendor homepage
url "http://downloads.sourceforge.net/sourceforge/osxportableapps/Chromium_OSX_#{version}.dmg"
appcast 'http://sourceforge.net/projects/osxportableapps/rss?path=/Chromium',
checkpoint: '366664e93ad47982687fdff3c41e93017f85bf7e18bd88ad2d1d7bba9621f6be'
name 'Chromium'
homepage 'http://www.freesmug.org/chromium'
license :gpl
app 'Chromium.app'
end
| 38.4 | 96 | 0.793403 |
e8d792122d5c874b11d908717eefdae12903c62e | 30 | module AggregationsHelper
end
| 10 | 25 | 0.9 |
e2089b6486747b97c4eb0e4c15752a5367fabf59 | 4,946 | #!/usr/bin/ruby
#
# Author:: [email protected] (Sérgio Gomes)
#
# Copyright:: Copyright 2010, Google Inc. All Rights Reserved.
#
# License:: Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example demonstrates how to handle policy violation errors. To create
# an ad group, run add_ad_group.rb.
#
# Tags: AdGroupAdService.mutate
require 'rubygems'
gem 'soap4r', '= 1.5.8'
require 'adwords4r'
require 'base64'
API_VERSION = 201008
def handle_policy_violation_error()
# AdWords::AdWordsCredentials.new will read a credentials file from
# ENV['HOME']/adwords.properties when called without parameters.
adwords = AdWords::API.new
ad_group_ad_srv = adwords.get_service('AdGroupAd', API_VERSION)
ad_group_id = 'INSERT_AD_GROUP_ID_HERE'.to_i
# Create text ad.
# The 'module' method being called here provides a shortcut to the
# module containing the classes for this service. This helps us avoid
# typing the full class name every time we need to create an object.
text_ad = ad_group_ad_srv.module::TextAd.new
text_ad.headline = 'Mars Cruise!!!'
text_ad.description1 = 'Visit the Red Planet in style.'
text_ad.description2 = 'Low-gravity fun for everyone!'
text_ad.url = 'http://www.example.com'
text_ad.displayUrl = 'www.example.com'
text_ad_operation = ad_group_ad_srv.module::AdGroupAdOperation.new
operand = ad_group_ad_srv.module::AdGroupAd.new
operand.adGroupId = ad_group_id
operand.ad = text_ad
text_ad_operation.operand = operand
text_ad_operation.operator = 'ADD'
operations = [text_ad_operation]
# Validate ad.
begin
# Enable "validate only" for the length of this block
adwords.validate_only do
ad_group_ad_srv.mutate(operations)
end
rescue AdWords::Error::ApiError => e
errors = []
if e.inner_fault and e.inner_fault.detail and
e.inner_fault.detail.apiExceptionFault
fault = e.inner_fault.detail.apiExceptionFault
if fault and fault.errors
errors = fault.errors
end
end
errors.each do |error|
if error.class.name.include? 'PolicyViolationError'
operation_index = AdWords::Utils.operation_index_for_error(error)
operation = operations[operation_index]
puts "Ad with headline '%s' violated %s policy '%s'." %
[operation.operand.ad.headline,
error.isExemptable ? 'exemptable' : 'non-exemptable',
error.externalPolicyName]
if error.isExemptable
# Add exemption request to the operation.
puts "Adding exemption request for policy name '%s' on text '%s'." %
[error.key.policyName, error.key.violatingText]
exemption_request = ad_group_ad_srv.module::ExemptionRequest.new
exemption_request.key = error.key
operation.exemptionRequests = [] if operation.exemptionRequests.nil?
operation.exemptionRequests << exemption_request
else
# Remove non-exemptable operation
puts "Removing the operation from the request."
operations.delete(operation)
end
else
# Non-policy error returned, re-throw exception.
raise e
end
end
end
# Add ads.
if operations.size > 0
response = ad_group_ad_srv.mutate(operations)
if response and response.rval and response.rval.value
ads = response.rval.value
puts "Added #{ads.length} ad(s) to ad group #{ad_group_id}."
ads.each do |ad|
puts " Ad id is #{ad.ad.id}, type is #{ad.ad.ad_Type} and status is " +
"\"#{ad.status}\"."
end
else
puts "No ads were added."
end
end
end
if __FILE__ == $0
# To enable logging of SOAP requests, set the ADWORDS4R_DEBUG environment
# variable to 'true'. This can be done either from your operating system
# environment or via code, as done below.
ENV['ADWORDS4R_DEBUG'] = 'false'
begin
handle_policy_violation_error()
# Connection error. Likely transitory.
rescue Errno::ECONNRESET, SOAP::HTTPStreamError, SocketError => e
puts 'Connection Error: %s' % e
puts 'Source: %s' % e.backtrace.first
# API Error.
rescue AdWords::Error::ApiError => e
puts 'SOAP Error: %s (code: %d)' % [e.soap_faultstring_ex, e.code_ex]
puts 'Trigger: %s' % e.trigger_ex unless e.trigger_ex.nil?
puts 'Source: %s' % e.backtrace.first
end
end
| 35.582734 | 80 | 0.683785 |
ac4dc654dde6868804a40d3ad12a548d3d6ad192 | 7,811 | module Spree
class Variant < Spree::Base
acts_as_paranoid
acts_as_list
include Spree::DefaultPrice
belongs_to :product, touch: true, class_name: 'Spree::Product', inverse_of: :variants
belongs_to :tax_category, class_name: 'Spree::TaxCategory'
delegate_belongs_to :product, :name, :description, :slug, :available_on,
:shipping_category_id, :meta_description, :meta_keywords,
:shipping_category
has_many :inventory_units, inverse_of: :variant
has_many :line_items, inverse_of: :variant
has_many :orders, through: :line_items
has_many :stock_items, dependent: :destroy, inverse_of: :variant
has_many :stock_locations, through: :stock_items
has_many :stock_movements, through: :stock_items
has_and_belongs_to_many :option_values, join_table: :spree_option_values_variants
has_many :images, -> { order(:position) }, as: :viewable, dependent: :destroy, class_name: "Spree::Image"
has_many :prices,
class_name: 'Spree::Price',
dependent: :destroy,
inverse_of: :variant
before_validation :set_cost_currency
validate :check_price
validates :cost_price, numericality: { greater_than_or_equal_to: 0, allow_nil: true }
validates :price, numericality: { greater_than_or_equal_to: 0, allow_nil: true }
validates_uniqueness_of :sku, allow_blank: true, conditions: -> { where(deleted_at: nil) }
after_create :create_stock_items
after_create :set_position
after_create :set_master_out_of_stock, unless: :is_master?
after_touch :clear_in_stock_cache
scope :in_stock, -> { joins(:stock_items).where('count_on_hand > ? OR track_inventory = ?', 0, false) }
def self.active(currency = nil)
joins(:prices).where(deleted_at: nil).where('spree_prices.currency' => currency || Spree::Config[:currency]).where('spree_prices.amount IS NOT NULL')
end
def self.having_orders
joins(:line_items).distinct
end
def tax_category
if self[:tax_category_id].nil?
product.tax_category
else
TaxCategory.find(self[:tax_category_id])
end
end
def cost_price=(price)
self[:cost_price] = Spree::LocalizedNumber.parse(price) if price.present?
end
def weight=(weight)
self[:weight] = Spree::LocalizedNumber.parse(weight) if weight.present?
end
# returns number of units currently on backorder for this variant.
def on_backorder
inventory_units.with_state('backordered').size
end
def is_backorderable?
Spree::Stock::Quantifier.new(self).backorderable?
end
def options_text
values = self.option_values.sort do |a, b|
a.option_type.position <=> b.option_type.position
end
values.to_a.map! do |ov|
"#{ov.option_type.presentation}: #{ov.presentation}"
end
values.to_sentence({ words_connector: ", ", two_words_connector: ", " })
end
# Default to master name
def exchange_name
is_master? ? name : options_text
end
def descriptive_name
is_master? ? name + ' - Master' : name + ' - ' + options_text
end
# use deleted? rather than checking the attribute directly. this
# allows extensions to override deleted? if they want to provide
# their own definition.
def deleted?
!!deleted_at
end
# Product may be created with deleted_at already set,
# which would make AR's default finder return nil.
# This is a stopgap for that little problem.
def product
Spree::Product.unscoped { super }
end
def options=(options = {})
options.each do |option|
set_option_value(option[:name], option[:value])
end
end
def set_option_value(opt_name, opt_value)
# no option values on master
return if self.is_master
option_type = Spree::OptionType.where(name: opt_name).first_or_initialize do |o|
o.presentation = opt_name
o.save!
end
current_value = self.option_values.detect { |o| o.option_type.name == opt_name }
unless current_value.nil?
return if current_value.name == opt_value
self.option_values.delete(current_value)
else
# then we have to check to make sure that the product has the option type
unless self.product.option_types.include? option_type
self.product.option_types << option_type
end
end
option_value = Spree::OptionValue.where(option_type_id: option_type.id, name: opt_value).first_or_initialize do |o|
o.presentation = opt_value
o.save!
end
self.option_values << option_value
self.save
end
def option_value(opt_name)
self.option_values.detect { |o| o.option_type.name == opt_name }.try(:presentation)
end
def price_in(currency)
prices.select{ |price| price.currency == currency }.first || Spree::Price.new(variant_id: self.id, currency: currency)
end
def amount_in(currency)
price_in(currency).try(:amount)
end
def price_modifier_amount_in(currency, options = {})
return 0 unless options.present?
options.keys.map { |key|
m = "#{key}_price_modifier_amount_in".to_sym
if self.respond_to? m
self.send(m, currency, options[key])
else
0
end
}.sum
end
def price_modifier_amount(options = {})
return 0 unless options.present?
options.keys.map { |key|
m = "#{options[key]}_price_modifier_amount".to_sym
if self.respond_to? m
self.send(m, options[key])
else
0
end
}.sum
end
def name_and_sku
"#{name} - #{sku}"
end
def sku_and_options_text
"#{sku} #{options_text}".strip
end
def in_stock?
Rails.cache.fetch(in_stock_cache_key) do
total_on_hand > 0
end
end
def can_supply?(quantity=1)
Spree::Stock::Quantifier.new(self).can_supply?(quantity)
end
def total_on_hand
Spree::Stock::Quantifier.new(self).total_on_hand
end
# Shortcut method to determine if inventory tracking is enabled for this variant
# This considers both variant tracking flag and site-wide inventory tracking settings
def should_track_inventory?
self.track_inventory? && Spree::Config.track_inventory_levels
end
private
def set_master_out_of_stock
if product.master && product.master.in_stock?
product.master.stock_items.update_all(:backorderable => false)
product.master.stock_items.each { |item| item.reduce_count_on_hand_to_zero }
end
end
# Ensures a new variant takes the product master price when price is not supplied
def check_price
if price.nil? && Spree::Config[:require_master_price]
raise 'No master variant found to infer price' unless (product && product.master)
raise 'Must supply price for variant or master.price for product.' if self == product.master
self.price = product.master.price
end
if currency.nil?
self.currency = Spree::Config[:currency]
end
end
def set_cost_currency
self.cost_currency = Spree::Config[:currency] if cost_currency.nil? || cost_currency.empty?
end
def create_stock_items
StockLocation.where(propagate_all_variants: true).each do |stock_location|
stock_location.propagate_variant(self)
end
end
def set_position
self.update_column(:position, product.variants.maximum(:position).to_i + 1)
end
def in_stock_cache_key
"variant-#{id}-in_stock"
end
def clear_in_stock_cache
Rails.cache.delete(in_stock_cache_key)
end
end
end
| 29.927203 | 155 | 0.665344 |
213a7dc12b6cc65c72a3e7257fe59afbd048dce2 | 1,463 | # -*- encoding: utf-8 -*-
# stub: rails-deprecated_sanitizer 1.0.3 ruby lib
Gem::Specification.new do |s|
s.name = "rails-deprecated_sanitizer".freeze
s.version = "1.0.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Kasper Timm Hansen".freeze]
s.date = "2014-09-25"
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/rails/rails-deprecated_sanitizer".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "3.0.4".freeze
s.summary = "Deprecated sanitizer API extracted from Action View.".freeze
s.installed_by_version = "3.0.4" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>.freeze, [">= 4.2.0.alpha"])
s.add_development_dependency(%q<bundler>.freeze, ["~> 1.6"])
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
else
s.add_dependency(%q<activesupport>.freeze, [">= 4.2.0.alpha"])
s.add_dependency(%q<bundler>.freeze, ["~> 1.6"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
end
else
s.add_dependency(%q<activesupport>.freeze, [">= 4.2.0.alpha"])
s.add_dependency(%q<bundler>.freeze, ["~> 1.6"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
end
end
| 38.5 | 112 | 0.667806 |
61ebe45f48754e92b3c340257ca40bafaf1c812d | 1,365 | # frozen_string_literal: true
require_relative 'lib/space_school/version'
Gem::Specification.new do |spec|
spec.name = 'space_school'
spec.version = SpaceSchool::VERSION
spec.authors = ['Carlos Torrealba']
spec.email = ['[email protected]']
spec.summary = 'Write a short summary, because RubyGems requires one.'
spec.description = 'Write a longer description or delete this line.'
spec.homepage = 'https://github.com/hexagonadev/space_school'
spec.license = 'MIT'
spec.required_ruby_version = Gem::Requirement.new('>= 2.7.0')
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata['homepage_uri'] = spec.homepage
spec.metadata['source_code_uri'] = 'https://github.com/hexagonadev/space_school'
spec.metadata['changelog_uri'] = 'https://github.com/hexagonadev/space_school/blob/main/README.md'
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path(__dir__, __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
end
| 42.65625 | 100 | 0.686447 |
1de1448ffcc7df8081cd7ebd28c10226754dc54f | 4,159 | module Fog
module AWS
class AutoScaling
class Real
require 'fog/aws/parsers/auto_scaling/describe_auto_scaling_instances'
# Returns a description of each Auto Scaling instance in the
# instance_ids list. If a list is not provided, the service returns the
# full details of all instances.
#
# This action supports pagination by returning a token if there are
# more pages to retrieve. To get the next page, call this action again
# with the returned token as the NextToken parameter.
#
# ==== Parameters
# * options<~Hash>:
# * 'InstanceIds'<~Array> - The list of Auto Scaling instances to
# describe. If this list is omitted, all auto scaling instances are
# described. The list of requested instances cannot contain more
# than 50 items. If unknown instances are requested, they are
# ignored with no error.
# * 'MaxRecords'<~Integer> - The aximum number of Auto Scaling
# instances to be described with each call.
# * 'NextToken'<~String> - The token returned by a previous call to
# indicate that there is more data available.
#
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
# * 'ResponseMetadata'<~Hash>:
# * 'RequestId'<~String> - Id of request
# * 'DescribeAutoScalingInstancesResponse'<~Hash>:
# * 'AutoScalingInstances'<~Array>:
# * autoscalinginstancedetails<~Hash>:
# * 'AutoScalingGroupName'<~String> - The name of the Auto
# Scaling Group associated with this instance.
# * 'AvailabilityZone'<~String> - The availability zone in
# which this instance resides.
# * 'HealthStatus'<~String> - The health status of this
# instance. "Healthy" means that the instance is healthy
# and should remain in service. "Unhealthy" means that the
# instance is unhealthy. Auto Scaling should terminate and
# replace it.
# * 'InstanceId'<~String> - The instance's EC2 instance ID.
# * 'LaunchConfigurationName'<~String> - The launch
# configuration associated with this instance.
# * 'LifecycleState'<~String> - The life cycle state of this
# instance.
# * 'NextToken'<~String> - Acts as a paging mechanism for large
# result sets. Set to a non-empty string if there are
# additional results waiting to be returned. Pass this in to
# subsequent calls to return additional results.
#
# ==== See Also
# http://docs.amazonwebservices.com/AutoScaling/latest/APIReference/API_DescribeAutoScalingInstances.html
#
def describe_auto_scaling_instances(options = {})
if instance_ids = options.delete('InstanceIds')
options.merge!(AWS.indexed_param('InstanceIds.member.%d', [*instance_ids]))
end
request({
'Action' => 'DescribeAutoScalingInstances',
:parser => Fog::Parsers::AWS::AutoScaling::DescribeAutoScalingInstances.new
}.merge!(options))
end
end
class Mock
def describe_auto_scaling_instances(options = {})
results = { 'AutoScalingInstances' => [] }
self.data[:auto_scaling_groups].each do |asg_name, asg_data|
asg_data['Instances'].each do |instance|
results['AutoScalingInstances'] << {
'AutoScalingGroupName' => asg_name
}.merge!(instance)
end
end
response = Excon::Response.new
response.status = 200
response.body = {
'DescribeAutoScalingInstancesResult' => results,
'ResponseMetadata' => { 'RequestId' => Fog::AWS::Mock.request_id }
}
response
end
end
end
end
end
| 43.322917 | 113 | 0.57586 |
e8572c4003a7ac9e808919d6698d27dac63dd0b0 | 1,934 | class Calendar
def self.holidays_between(from, to)
if same_period_as_last_query?(from, to)
@holidays
else
get_holidays(from, to)
end
end
def self.personal_events(user_access_token, email, from, to)
params = { :parameters => {
"calendarId" => email,
"timeMin" => from.to_datetime,
"timeMax" => to.to_datetime,
"q" => ENV['PERSONAL_CALENDAR_MARK']
}
}
get_calendar_events(user_access_token, params, "creator")
end
private
def self.get_holidays(from, to)
params = { :parameters => {
"calendarId" => holidays_calendar_id,
"timeMin" => from.to_datetime,
"timeMax" => to.to_datetime
}
}
@holidays = get_calendar_events(admin_access_token, params, "organizer")
end
def self.get_calendar_events(token, params, calendar_role)
client = Google::APIClient.new
client.authorization.access_token = token
service = client.discovered_api('calendar', 'v3')
response = client.execute({ :api_method => service.events.list }.merge(params))
parse(response.data, params[:parameters]["calendarId"], calendar_role)
end
def self.parse(google_data, filter, role_to_filter_by)
formatted_events = {}
full_day_events_filtered = google_data.items.select { |event|
event.send(role_to_filter_by).email == filter && event.start.date.present?
}
full_day_events_filtered.each do |event|
(Date.parse(event.start.date)..Date.parse(event.end.date) - 1).each do |date|
formatted_events[date] = event.summary
end
end
formatted_events
end
def self.admin_access_token
User.admins.last.access_token_for_api
end
def self.holidays_calendar_id
ENV['HOLIDAYS_CALENDAR_ID']
end
def self.same_period_as_last_query?(from, to)
if @from == from && @to == to
true
else
@from = from
@to = to
false
end
end
end
| 24.794872 | 83 | 0.661324 |
1c1867d5ff6c4175c138852133f98c2c75bb50d1 | 10,246 | require 'spec_helper'
require 'ddtrace/contrib/analytics_examples'
require 'securerandom'
require 'rake'
require 'rake/tasklib'
require 'ddtrace'
require 'ddtrace/contrib/rake/patcher'
RSpec.describe Datadog::Contrib::Rake::Instrumentation do
let(:tracer) { get_test_tracer }
let(:configuration_options) { { tracer: tracer, enabled: true } }
let(:spans) { tracer.writer.spans }
let(:span) { spans.first }
before(:each) do
skip('Rake integration incompatible.') unless Datadog::Contrib::Rake::Integration.compatible?
# Reset options (that might linger from other tests)
Datadog.configuration[:rake].reset_options!
# Patch Rake
Datadog.configure do |c|
c.use :rake, configuration_options
end
end
around do |example|
# Reset before and after each example; don't allow global state to linger.
Datadog.registry[:rake].reset_configuration!
example.run
Datadog.registry[:rake].reset_configuration!
# We don't want instrumentation enabled during the rest of the test suite...
Datadog.configure { |c| c.use :rake, enabled: false }
end
def reset_task!(task_name)
if Rake::Task.task_defined?(task_name)
Rake::Task[task_name].reenable
Rake::Task[task_name].clear
# Rake prior to version 12.0 doesn't clear args when #clear is invoked.
# Perform a more invasive reset, to make sure its reusable.
if Gem::Version.new(Rake::VERSION) < Gem::Version.new('12.0')
Rake::Task[task_name].instance_variable_set(:@arg_names, nil)
end
end
end
let(:task_name) { :test_rake_instrumentation }
let(:task_body) { proc { |task, args| spy.call(task, args) } }
let(:task_arg_names) { [] }
let(:task_class) do
stub_const('RakeInstrumentationTestTask', Class.new(Rake::TaskLib)).tap do |task_class|
tb = task_body
task_class.send(:define_method, :initialize) do |name = task_name, *args|
task(name, *args, &tb)
end
end
end
let(:task) { Rake::Task[task_name] }
let(:spy) { double('spy') }
describe '#invoke' do
before(:each) do
::Rake.application.instance_variable_set(:@top_level_tasks, [task_name.to_s])
expect(tracer).to receive(:shutdown!).with(no_args).once.and_call_original
end
shared_examples_for 'a single task execution' do
before(:each) do
expect(spy).to receive(:call) do |invocation_task, invocation_args|
expect(invocation_task).to eq(task)
expect(invocation_args.to_hash).to eq(args_hash)
end
expect(task).to receive(:shutdown_tracer!).with(no_args).twice.and_call_original
task.invoke(*args)
end
let(:invoke_span) { spans.find { |s| s.name == Datadog::Contrib::Rake::Ext::SPAN_INVOKE } }
let(:execute_span) { spans.find { |s| s.name == Datadog::Contrib::Rake::Ext::SPAN_EXECUTE } }
it do
expect(spans).to have(2).items
end
describe '\'rake.invoke\' span' do
it do
expect(invoke_span.name).to eq(Datadog::Contrib::Rake::Ext::SPAN_INVOKE)
expect(invoke_span.resource).to eq(task_name.to_s)
expect(invoke_span.parent_id).to eq(0)
end
it_behaves_like 'analytics for integration' do
let(:span) { invoke_span }
let(:analytics_enabled_var) { Datadog::Contrib::Rake::Ext::ENV_ANALYTICS_ENABLED }
let(:analytics_sample_rate_var) { Datadog::Contrib::Rake::Ext::ENV_ANALYTICS_SAMPLE_RATE }
end
end
describe '\'rake.execute\' span' do
it do
expect(execute_span.name).to eq(Datadog::Contrib::Rake::Ext::SPAN_EXECUTE)
expect(execute_span.resource).to eq(task_name.to_s)
expect(execute_span.parent_id).to eq(invoke_span.span_id)
expect(execute_span.get_tag(Datadog::Ext::Analytics::TAG_SAMPLE_RATE)).to be nil
end
end
end
shared_examples 'an error occurrence' do
before(:each) do
expect(spy).to receive(:call) do
raise 'oops'
end
expect(task).to receive(:shutdown_tracer!).with(no_args).twice.and_call_original
end
it { expect { task.invoke(*args) }.to raise_error('oops') }
end
context 'for a task' do
let(:args_hash) { {} }
let(:task_arg_names) { args_hash.keys }
let(:args) { args_hash.values }
let(:define_task!) do
reset_task!(task_name)
Rake::Task.define_task(task_name, *task_arg_names, &task_body)
end
before(:each) { define_task! }
it 'returns task return value' do
allow(spy).to receive(:call)
expect(task.invoke(*args)).to contain_exactly(task_body)
end
context 'without args' do
it_behaves_like 'a single task execution' do
describe '\'rake.invoke\' span tags' do
it do
expect(invoke_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_TASK_ARG_NAMES)).to eq([].to_s)
expect(invoke_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_INVOKE_ARGS)).to eq(['?'].to_s)
end
end
describe '\'rake.execute\' span tags' do
it do
expect(execute_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_TASK_ARG_NAMES)).to be nil
expect(execute_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_EXECUTE_ARGS)).to eq({}.to_s)
end
end
end
it_behaves_like 'an error occurrence'
end
context 'with args' do
let(:args_hash) { { one: 1, two: 2, three: 3 } }
it_behaves_like 'a single task execution' do
describe '\'rake.invoke\' span tags' do
it do
expect(invoke_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_TASK_ARG_NAMES)).to eq([:one, :two, :three].to_s)
expect(invoke_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_INVOKE_ARGS)).to eq(['?'].to_s)
end
end
describe '\'rake.execute\' span tags' do
it do
expect(execute_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_TASK_ARG_NAMES)).to be nil
expect(execute_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_EXECUTE_ARGS)).to eq(
{ one: '?', two: '?', three: '?' }.to_s
)
end
end
end
it_behaves_like 'an error occurrence'
end
context 'with a prerequisite task' do
let(:prerequisite_task_name) { :test_rake_instrumentation_prerequisite }
let(:prerequisite_task_body) { proc { |task, args| prerequisite_spy.call(task, args) } }
let(:prerequisite_spy) { double('prerequisite spy') }
let(:prerequisite_task) { Rake::Task[prerequisite_task_name] }
let(:define_task!) do
reset_task!(task_name)
reset_task!(prerequisite_task_name)
Rake::Task.define_task(prerequisite_task_name, &prerequisite_task_body)
Rake::Task.define_task(task_name => prerequisite_task_name, &task_body)
end
before(:each) do
expect(prerequisite_spy).to receive(:call) do |invocation_task, invocation_args|
expect(invocation_task).to eq(prerequisite_task)
expect(invocation_args.to_hash).to eq({})
end.ordered
expect(spy).to receive(:call) do |invocation_task, invocation_args|
expect(invocation_task).to eq(task)
expect(invocation_args.to_hash).to eq(args_hash)
end.ordered
expect(task).to receive(:shutdown_tracer!).with(no_args).twice.and_call_original
expect(prerequisite_task).to receive(:shutdown_tracer!).with(no_args).once.and_call_original
task.invoke(*args)
end
let(:invoke_span) { spans.find { |s| s.name == Datadog::Contrib::Rake::Ext::SPAN_INVOKE } }
let(:prerequisite_task_execute_span) do
spans.find do |s|
s.name == Datadog::Contrib::Rake::Ext::SPAN_EXECUTE \
&& s.resource == prerequisite_task_name.to_s
end
end
let(:task_execute_span) do
spans.find do |s|
s.name == Datadog::Contrib::Rake::Ext::SPAN_EXECUTE \
&& s.resource == task_name.to_s
end
end
it do
expect(spans).to have(3).items
end
describe '\'rake.invoke\' span' do
it do
expect(invoke_span.name).to eq(Datadog::Contrib::Rake::Ext::SPAN_INVOKE)
expect(invoke_span.resource).to eq(task_name.to_s)
expect(invoke_span.parent_id).to eq(0)
expect(invoke_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_TASK_ARG_NAMES)).to eq([].to_s)
expect(invoke_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_INVOKE_ARGS)).to eq(['?'].to_s)
end
end
describe 'prerequisite \'rake.execute\' span' do
it do
expect(prerequisite_task_execute_span.name).to eq(Datadog::Contrib::Rake::Ext::SPAN_EXECUTE)
expect(prerequisite_task_execute_span.resource).to eq(prerequisite_task_name.to_s)
expect(prerequisite_task_execute_span.parent_id).to eq(invoke_span.span_id)
expect(prerequisite_task_execute_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_TASK_ARG_NAMES)).to be nil
expect(prerequisite_task_execute_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_EXECUTE_ARGS)).to eq({}.to_s)
end
end
describe 'task \'rake.execute\' span' do
it do
expect(task_execute_span.name).to eq(Datadog::Contrib::Rake::Ext::SPAN_EXECUTE)
expect(task_execute_span.resource).to eq(task_name.to_s)
expect(task_execute_span.parent_id).to eq(invoke_span.span_id)
expect(task_execute_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_TASK_ARG_NAMES)).to be nil
expect(task_execute_span.get_tag(Datadog::Contrib::Rake::Ext::TAG_EXECUTE_ARGS)).to eq({}.to_s)
end
end
end
context 'defined by a class' do
let(:define_task!) do
reset_task!(task_name)
task_class.new(task_name, *task_arg_names)
end
it_behaves_like 'a single task execution'
it_behaves_like 'an error occurrence'
end
end
end
end
| 37.808118 | 123 | 0.637322 |
382486e01a25cc8ccbdb4981ffd4f5c959aa1ddb | 174 | # frozen_string_literal: true
require 'blake/version'
require 'blake/main'
module Blake
def self.digest(input, *args)
Blake::Main.new(*args).digest(input)
end
end
| 14.5 | 40 | 0.724138 |
2138dc7db2a9319ee9613bd464ee56b11d1d5729 | 2,354 | # frozen_string_literal: true
# Copyright The OpenTelemetry Authors
#
# SPDX-License-Identifier: Apache-2.0
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'opentelemetry/instrumentation/sinatra/version'
Gem::Specification.new do |spec|
spec.name = 'opentelemetry-instrumentation-sinatra'
spec.version = OpenTelemetry::Instrumentation::Sinatra::VERSION
spec.authors = ['OpenTelemetry Authors']
spec.email = ['[email protected]']
spec.summary = 'Sinatra instrumentation for the OpenTelemetry framework'
spec.description = 'Sinatra instrumentation for the OpenTelemetry framework'
spec.homepage = 'https://github.com/open-telemetry/opentelemetry-ruby'
spec.license = 'Apache-2.0'
spec.files = ::Dir.glob('lib/**/*.rb') +
::Dir.glob('*.md') +
['LICENSE', '.yardopts']
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 2.4.3'
spec.add_dependency 'opentelemetry-api', '~> 0.15.0'
spec.add_development_dependency 'appraisal', '~> 2.2.0'
spec.add_development_dependency 'bundler', '>= 1.17'
spec.add_development_dependency 'minitest', '~> 5.0'
spec.add_development_dependency 'opentelemetry-sdk', '~> 0.0'
spec.add_development_dependency 'rack-test', '~> 1.1.0'
spec.add_development_dependency 'rubocop', '~> 0.73.0'
spec.add_development_dependency 'simplecov', '~> 0.17.1'
spec.add_development_dependency 'sinatra', '~> 2.0.7'
spec.add_development_dependency 'webmock', '~> 3.7.6'
spec.add_development_dependency 'yard', '~> 0.9'
spec.add_development_dependency 'yard-doctest', '~> 0.1.6'
if spec.respond_to?(:metadata)
spec.metadata['changelog_uri'] = "https://open-telemetry.github.io/opentelemetry-ruby/opentelemetry-instrumentation-sinatra/v#{OpenTelemetry::Instrumentation::Sinatra::VERSION}/file.CHANGELOG.html"
spec.metadata['source_code_uri'] = 'https://github.com/open-telemetry/opentelemetry-ruby/tree/main/instrumentation/sinatra'
spec.metadata['bug_tracker_uri'] = 'https://github.com/open-telemetry/opentelemetry-ruby/issues'
spec.metadata['documentation_uri'] = "https://open-telemetry.github.io/opentelemetry-ruby/opentelemetry-instrumentation-sinatra/v#{OpenTelemetry::Instrumentation::Sinatra::VERSION}"
end
end
| 48.040816 | 201 | 0.728972 |
03ac1f27fe70e8f73b70ba26f8d766e8dcd2d3ee | 124 | require "capybara/spec/spec_helper"
require "pry"
RSpec.configure do |config|
Capybara::SpecHelper.configure(config)
end
| 17.714286 | 40 | 0.790323 |
8791f8e38ed00363a48b2df0326ef3d4802b49f0 | 1,905 | describe Travis::Yml::Schema::Def::Notification::Hipchat, 'structure' do
subject { Travis::Yml.schema[:definitions][:notification][:hipchat] }
# it { puts JSON.pretty_generate(subject) }
it do
should include(
'$id': :hipchat,
title: 'Hipchat',
normal: true,
see: kind_of(Hash),
anyOf: [
{
type: :object,
properties: {
rooms: {
'$ref': '#/definitions/type/secures',
summary: kind_of(String)
},
format: {
type: :string,
enum: [
'html',
'text'
],
summary: kind_of(String)
},
notify: {
type: :boolean,
summary: kind_of(String)
},
if: {
'$ref': '#/definitions/type/condition'
},
on_pull_requests: {
type: :boolean,
summary: kind_of(String)
},
template: {
'$ref': '#/definitions/notification/templates'
},
on_success: {
'$ref': '#/definitions/notification/frequency'
},
on_failure: {
'$ref': '#/definitions/notification/frequency'
},
enabled: {
type: :boolean,
summary: kind_of(String)
},
disabled: {
type: :boolean,
summary: kind_of(String)
}
},
additionalProperties: false,
normal: true,
prefix: {
key: :rooms
},
changes: [
{
change: :enable,
}
]
},
{
'$ref': '#/definitions/type/secures'
},
{
type: :boolean
}
]
)
end
end
| 24.423077 | 72 | 0.3979 |
620b2a34133571a869d83b5cf021d0bef9e0cf78 | 5,274 | require 'rails_helper'
describe 'Board with milestone', :feature, :js do
let(:user) { create(:user) }
let(:project) { create(:empty_project, :public) }
let!(:milestone) { create(:milestone, project: project) }
let!(:issue) { create(:closed_issue, project: project) }
let!(:issue_milestone) { create(:closed_issue, project: project, milestone: milestone) }
before do
project.team << [user, :master]
gitlab_sign_in(user)
end
context 'new board' do
before do
visit namespace_project_boards_path(project.namespace, project)
end
it 'creates board with milestone' do
create_board_with_milestone
expect(find('.tokens-container')).to have_content(milestone.title)
wait_for_requests
find('.card', match: :first)
expect(all('.board').last).to have_selector('.card', count: 1)
end
end
context 'update board' do
let!(:milestone_two) { create(:milestone, project: project) }
let!(:board) { create(:board, project: project, milestone: milestone) }
before do
visit namespace_project_boards_path(project.namespace, project)
end
it 'defaults milestone filter' do
page.within '#js-multiple-boards-switcher' do
find('.dropdown-menu-toggle').click
wait_for_requests
click_link board.name
end
expect(find('.tokens-container')).to have_content(milestone.title)
find('.card', match: :first)
expect(all('.board').last).to have_selector('.card', count: 1)
end
it 'sets board to any milestone' do
update_board_milestone('Any Milestone')
expect(page).not_to have_css('.js-visual-token')
expect(find('.tokens-container')).not_to have_content(milestone.title)
find('.card', match: :first)
expect(page).to have_selector('.board', count: 3)
expect(all('.board').last).to have_selector('.card', count: 2)
end
it 'sets board to upcoming milestone' do
update_board_milestone('Upcoming')
expect(find('.tokens-container')).not_to have_content(milestone.title)
find('.board', match: :first)
expect(all('.board')[1]).to have_selector('.card', count: 0)
end
it 'does not allow milestone in filter to be editted' do
find('.filtered-search').native.send_keys(:backspace)
page.within('.tokens-container') do
expect(page).to have_selector('.value')
end
end
it 'does not render milestone in hint dropdown' do
find('.filtered-search').click
page.within('#js-dropdown-hint') do
expect(page).not_to have_button('Milestone')
end
end
end
context 'removing issue from board' do
let(:label) { create(:label, project: project) }
let!(:issue) { create(:labeled_issue, project: project, labels: [label], milestone: milestone) }
let!(:board) { create(:board, project: project, milestone: milestone) }
let!(:list) { create(:list, board: board, label: label, position: 0) }
before do
visit namespace_project_boards_path(project.namespace, project)
end
it 'removes issues milestone when removing from the board' do
wait_for_requests
first('.card .card-number').click
click_button('Remove from board')
visit namespace_project_issue_path(project.namespace, project, issue)
expect(page).to have_content('removed milestone')
page.within('.milestone.block') do
expect(page).to have_content('None')
end
end
end
context 'new issues' do
let(:label) { create(:label, project: project) }
let!(:list1) { create(:list, board: board, label: label, position: 0) }
let!(:board) { create(:board, project: project, milestone: milestone) }
let!(:issue) { create(:issue, project: project) }
before do
visit namespace_project_boards_path(project.namespace, project)
end
it 'creates new issue with boards milestone' do
wait_for_requests
page.within(first('.board')) do
find('.btn-default').click
find('.form-control').set('testing new issue with milestone')
click_button('Submit issue')
wait_for_requests
click_link('testing new issue with milestone')
end
expect(page).to have_content(milestone.title)
end
it 'updates issue with milestone from add issues modal' do
wait_for_requests
click_button 'Add issues'
page.within('.add-issues-modal') do
card = find('.card', :first)
expect(page).to have_selector('.card', count: 1)
card.click
click_button 'Add 1 issue'
end
click_link(issue.title)
expect(page).to have_content(milestone.title)
end
end
def create_board_with_milestone
page.within '#js-multiple-boards-switcher' do
find('.dropdown-menu-toggle').click
click_link 'Create new board'
find('#board-new-name').set 'test'
click_button 'Milestone'
click_link milestone.title
click_button 'Create'
end
end
def update_board_milestone(milestone_title)
page.within '#js-multiple-boards-switcher' do
find('.dropdown-menu-toggle').click
click_link 'Edit board milestone'
click_link milestone_title
click_button 'Save'
end
end
end
| 26.502513 | 100 | 0.659651 |
d55e5b07f4aa36de47f86bc9578768f2d808ad8a | 11,569 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2022_01_29_182526) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "active_admin_comments", id: :serial, force: :cascade do |t|
t.string "namespace"
t.text "body"
t.string "resource_type"
t.integer "resource_id"
t.string "author_type"
t.integer "author_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["author_type", "author_id"], name: "index_active_admin_comments_on_author_type_and_author_id"
t.index ["namespace"], name: "index_active_admin_comments_on_namespace"
t.index ["resource_type", "resource_id"], name: "index_active_admin_comments_on_resource_type_and_resource_id"
end
create_table "admin_flashes", id: :serial, force: :cascade do |t|
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "message", limit: 255
t.datetime "expires"
end
create_table "courses", id: :serial, force: :cascade do |t|
t.string "title", limit: 255
t.text "description"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "position", null: false
t.string "slug"
t.string "identifier_uuid", default: "", null: false
t.integer "path_id"
t.index ["identifier_uuid"], name: "index_courses_on_identifier_uuid", unique: true
t.index ["path_id"], name: "index_courses_on_path_id"
t.index ["slug"], name: "index_courses_on_slug"
end
create_table "flags", force: :cascade do |t|
t.integer "flagger_id", null: false
t.bigint "project_submission_id", null: false
t.text "reason", default: "", null: false
t.integer "status", default: 0, null: false
t.integer "taken_action", default: 0, null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.integer "resolved_by_id"
t.index ["flagger_id"], name: "index_flags_on_flagger_id"
t.index ["project_submission_id"], name: "index_flags_on_project_submission_id"
end
create_table "friendly_id_slugs", id: :serial, force: :cascade do |t|
t.string "slug", null: false
t.integer "sluggable_id", null: false
t.string "sluggable_type", limit: 50
t.string "scope"
t.datetime "created_at"
t.index ["slug", "sluggable_type", "scope"], name: "index_friendly_id_slugs_on_slug_and_sluggable_type_and_scope", unique: true
t.index ["slug", "sluggable_type"], name: "index_friendly_id_slugs_on_slug_and_sluggable_type"
t.index ["sluggable_id"], name: "index_friendly_id_slugs_on_sluggable_id"
t.index ["sluggable_type"], name: "index_friendly_id_slugs_on_sluggable_type"
end
create_table "lesson_completions", id: :serial, force: :cascade do |t|
t.integer "lesson_id"
t.integer "user_id"
t.datetime "created_at"
t.datetime "updated_at"
t.string "lesson_identifier_uuid", default: "", null: false
t.integer "course_id"
t.integer "path_id"
t.index ["course_id"], name: "index_lesson_completions_on_course_id"
t.index ["lesson_id", "user_id"], name: "index_lesson_completions_on_lesson_id_and_user_id", unique: true
t.index ["lesson_identifier_uuid"], name: "index_lesson_completions_on_lesson_identifier_uuid"
t.index ["path_id"], name: "index_lesson_completions_on_path_id"
t.index ["user_id"], name: "index_lesson_completions_on_user_id"
end
create_table "lessons", id: :serial, force: :cascade do |t|
t.string "title", limit: 255
t.string "url", limit: 255
t.integer "position", null: false
t.text "description"
t.boolean "is_project", default: false
t.integer "section_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.text "content"
t.string "slug"
t.boolean "accepts_submission", default: false, null: false
t.boolean "has_live_preview", default: false, null: false
t.boolean "choose_path_lesson", default: false, null: false
t.string "identifier_uuid", default: "", null: false
t.bigint "course_id"
t.boolean "installation_lesson", default: false
t.index ["course_id"], name: "index_lessons_on_course_id"
t.index ["identifier_uuid", "course_id"], name: "index_lessons_on_identifier_uuid_and_course_id", unique: true
t.index ["installation_lesson"], name: "index_lessons_on_installation_lesson"
t.index ["position"], name: "index_lessons_on_position"
t.index ["slug", "section_id"], name: "index_lessons_on_slug_and_section_id", unique: true
t.index ["url"], name: "index_lessons_on_url"
end
create_table "notifications", force: :cascade do |t|
t.string "recipient_type", null: false
t.bigint "recipient_id", null: false
t.string "type", null: false
t.jsonb "params"
t.datetime "read_at"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "url", null: false
t.text "message", null: false
t.string "title", null: false
t.index ["read_at"], name: "index_notifications_on_read_at"
t.index ["recipient_type", "recipient_id"], name: "index_notifications_on_recipient"
end
create_table "path_prerequisites", force: :cascade do |t|
t.bigint "path_id", null: false
t.bigint "prerequisite_id", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["path_id", "prerequisite_id"], name: "index_path_prerequisites_on_path_id_and_prerequisite_id", unique: true
t.index ["path_id"], name: "index_path_prerequisites_on_path_id"
t.index ["prerequisite_id"], name: "index_path_prerequisites_on_prerequisite_id"
end
create_table "paths", id: :serial, force: :cascade do |t|
t.string "title"
t.string "description"
t.integer "position"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "slug"
t.boolean "default_path", default: false, null: false
t.string "identifier_uuid", default: "", null: false
t.index ["identifier_uuid"], name: "index_paths_on_identifier_uuid", unique: true
t.index ["slug"], name: "index_paths_on_slug", unique: true
end
create_table "points", force: :cascade do |t|
t.string "discord_id", null: false
t.integer "points", default: 0, null: false
t.index ["discord_id"], name: "index_points_on_discord_id", unique: true
end
create_table "project_submissions", id: :serial, force: :cascade do |t|
t.string "repo_url"
t.string "live_preview_url", default: "", null: false
t.integer "user_id"
t.integer "lesson_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.boolean "is_public", default: true, null: false
t.integer "cached_votes_total", default: 0
t.datetime "discarded_at"
t.datetime "discard_at"
t.index ["discarded_at"], name: "index_project_submissions_on_discarded_at"
t.index ["is_public"], name: "index_project_submissions_on_is_public"
t.index ["lesson_id"], name: "index_project_submissions_on_lesson_id"
t.index ["user_id", "lesson_id"], name: "index_project_submissions_on_user_id_and_lesson_id", unique: true, where: "(discarded_at IS NULL)"
t.index ["user_id"], name: "index_project_submissions_on_user_id"
end
create_table "sections", id: :serial, force: :cascade do |t|
t.string "title", limit: 255
t.integer "position", null: false
t.integer "course_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.text "description"
t.string "identifier_uuid", default: "", null: false
t.index ["course_id"], name: "index_sections_on_course_id"
t.index ["identifier_uuid"], name: "index_sections_on_identifier_uuid", unique: true
t.index ["position"], name: "index_sections_on_position"
end
create_table "success_stories", id: :serial, force: :cascade do |t|
t.string "student_name"
t.string "avatar_path_name"
t.text "story_content"
t.string "job_title"
t.string "social_media_link"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "user_providers", id: :serial, force: :cascade do |t|
t.integer "user_id"
t.string "provider"
t.string "uid"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.index ["user_id"], name: "index_user_providers_on_user_id"
end
create_table "users", id: :serial, force: :cascade do |t|
t.string "email", limit: 255, default: "", null: false
t.string "encrypted_password", limit: 255, default: "", null: false
t.string "reset_password_token", limit: 255
t.datetime "reset_password_sent_at"
t.datetime "remember_created_at"
t.integer "sign_in_count", default: 0
t.datetime "current_sign_in_at"
t.datetime "last_sign_in_at"
t.string "current_sign_in_ip", limit: 255
t.string "last_sign_in_ip", limit: 255
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "username", limit: 255
t.text "learning_goal"
t.string "confirmation_token", limit: 255
t.datetime "confirmed_at"
t.datetime "confirmation_sent_at"
t.string "unconfirmed_email", limit: 255
t.boolean "admin", default: false, null: false
t.string "avatar"
t.integer "path_id", default: 1
t.boolean "banned", default: false, null: false
t.index ["confirmation_token"], name: "index_users_on_confirmation_token", unique: true
t.index ["email"], name: "index_users_on_email", unique: true
t.index ["reset_password_token"], name: "index_users_on_reset_password_token", unique: true
t.index ["username"], name: "index_users_on_username"
end
create_table "votes", id: :serial, force: :cascade do |t|
t.string "votable_type"
t.integer "votable_id"
t.string "voter_type"
t.integer "voter_id"
t.boolean "vote_flag"
t.string "vote_scope"
t.integer "vote_weight"
t.datetime "created_at"
t.datetime "updated_at"
t.index ["votable_id", "votable_type", "vote_scope"], name: "index_votes_on_votable_id_and_votable_type_and_vote_scope"
t.index ["voter_id", "voter_type", "vote_scope"], name: "index_votes_on_voter_id_and_voter_type_and_vote_scope"
end
add_foreign_key "flags", "project_submissions"
add_foreign_key "flags", "users", column: "flagger_id"
add_foreign_key "lesson_completions", "lessons", on_delete: :cascade
add_foreign_key "lessons", "courses"
add_foreign_key "path_prerequisites", "paths"
add_foreign_key "path_prerequisites", "paths", column: "prerequisite_id"
add_foreign_key "project_submissions", "lessons"
add_foreign_key "project_submissions", "users"
end
| 43.82197 | 143 | 0.716311 |
39249053ee0329eb547e4cb8bc08d9e24c1b4f22 | 406 | # frozen_string_literal: true
class AddCertAndKeyToServerlessDomainCluster < ActiveRecord::Migration[5.2]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
def change
add_column :serverless_domain_cluster, :encrypted_key, :text
add_column :serverless_domain_cluster, :encrypted_key_iv, :string, limit: 255
add_column :serverless_domain_cluster, :certificate, :text
end
end
| 29 | 81 | 0.795567 |
7a2ba5036e07bd82326e92a2c9c04d25e38e3cc7 | 3,781 | # frozen_string_literal: true
require "active_support"
require "active_support/callbacks"
require "active_support/inflector"
require "active_support/core_ext/hash/keys"
require "active_support/core_ext/module/delegation"
require "active_support/core_ext/string/inflections"
require "short_circu_it"
require "json_log_converter"
require "technologic/version"
require "technologic/event"
require "technologic/subscriber/base"
require "technologic/fatal_subscriber"
require "technologic/error_subscriber"
require "technologic/warn_subscriber"
require "technologic/info_subscriber"
require "technologic/debug_subscriber"
require "technologic/logger"
require "technologic/config_options"
require "technologic/setup"
module Technologic
extend ActiveSupport::Concern
SEVERITIES = %i[debug info warn error fatal].freeze
EXCEPTION_SEVERITIES = %i[error fatal].freeze
ACTIVEJOB_WORKAROUND_FIRST_VERSION = Gem::Version.new("6.1.0")
included do
delegate :_tl_instrument, :surveil, to: :class
protected :_tl_instrument, :surveil
SEVERITIES.each do |severity|
delegate severity, to: :class
protected severity # rubocop:disable Style/AccessModifierDeclarations
end
EXCEPTION_SEVERITIES.each do |severity|
method_name = "#{severity}!"
delegate method_name, to: :class
protected method_name # rubocop:disable Style/AccessModifierDeclarations
end
end
protected
# DEP-2021-01-14
# Remove this method
def instrument(*args, **opts, &block)
# Targeted workaround for ActiveJob#instrument in Rails 6.1+
return super if defined?(ActiveJob) && self.class <= ActiveJob::Base && ActiveJob.version >= ACTIVEJOB_WORKAROUND_FIRST_VERSION
self.class.instrument(*args, **opts, &block)
end
module ClassMethods
# DEP-2021-01-14
# Remove this method
def instrument(*args, **opts, &block)
ActiveSupport::Deprecation.warn("Technologic.instrument is deprecated. Instead, use the corresponding severity-level convenience method (#info, #error etc)")
_tl_instrument(*args, **opts, &block)
end
def surveil(event, severity: :info, **data, &block)
raise LocalJumpError unless block_given?
raise ArgumentError, "Invalid severity: #{severity}" unless severity.to_sym.in?(SEVERITIES)
_tl_instrument(severity, "#{event}_started", **data)
_tl_instrument(severity, "#{event}_finished", &block)
end
SEVERITIES.each do |severity|
define_method(severity) { |event, **data, &block| _tl_instrument(severity, event, **data, &block) }
end
EXCEPTION_SEVERITIES.each do |severity|
define_method("#{severity}!") do |exception = StandardError, message = nil, **data, &block|
if exception.is_a?(Exception)
_tl_instrument(
severity,
exception.class.name.demodulize,
**{
message: exception.message,
additional_message: message,
}.compact,
**data,
&block
)
raise exception
else
_tl_instrument severity, exception.name.demodulize, message: message, **data, &block
raise exception, message
end
end
end
protected
def _tl_instrument(severity, event, **data, &block)
ActiveSupport::Notifications.instrument("#{event}.#{name}.#{severity}", data, &block).tap do
# If a block was defined, :instrument will return the value of the block.
# Otherwise, :instrument will return nil, since it didn't do anything.
# Returning true here allows us to do fun things like `info :subscription_created and return subscription`
return true unless block_given?
end
end
end
end
require "technologic/railtie" if defined?(Rails)
| 31.508333 | 163 | 0.703518 |
bb65fe7db85b1e6a2a7fe143e6b6e5d5beba583d | 249 | module Moneybird::Resource
class Webhook
include Moneybird::Resource
extend Moneybird::Resource::ClassMethods
has_attributes %i(
id
administration_id
url
last_http_status
last_http_body
)
end
end
| 14.647059 | 44 | 0.670683 |
1c540779e480727cd315265277662c4a2084facd | 1,644 | require 'spec_helper'
RSpec.describe Elasticband::Filter::Near do
describe '#to_h' do
subject { described_class.new(options).to_h }
context 'with `:latitude` and `:longitude` option' do
let(:options) { { latitude: 12.5, longitude: -34.6 } }
it 'contains the given latitude and longitude options' do
is_expected.to eq(
geo_distance: {
location: { lat: 12.5, lon: -34.6 },
distance: '100km',
distance_type: :arc
}
)
end
end
context 'with `:on` option' do
let(:options) { { on: :loc, latitude: 12.5, longitude: -34.6 } }
it 'contains the given on option' do
is_expected.to eq(
geo_distance: {
loc: { lat: 12.5, lon: -34.6 },
distance: '100km',
distance_type: :arc
}
)
end
end
context 'with `:distance` option' do
let(:options) { { distance: '5km', latitude: 12.5, longitude: -34.6 } }
it 'contains the given distance option' do
is_expected.to eq(
geo_distance: {
location: { lat: 12.5, lon: -34.6 },
distance: '5km',
distance_type: :arc
}
)
end
end
context 'with `:type` option' do
let(:options) { { distance: '5km', latitude: 12.5, longitude: -34.6, type: :plane } }
it 'contains the given type option' do
is_expected.to eq(
geo_distance: {
location: { lat: 12.5, lon: -34.6 },
distance: '5km',
distance_type: :plane
}
)
end
end
end
end
| 25.6875 | 91 | 0.515815 |
626b16b0303e466fb0674331c9994a2dcbe32e7d | 592 | require "grpc"
module Lnrpc
class MacaroonInterceptor < GRPC::ClientInterceptor
def initialize(macaroon_hex)
@macaroon = macaroon_hex
end
def inject_macaroon_metadata(request:, call:, method:, metadata:)
if !metadata.has_key?('macaroon') && !metadata.has_key?(:macaroon)
metadata[:macaroon] = @macaroon
end
yield
end
alias :request_response :inject_macaroon_metadata
alias :client_streamer :inject_macaroon_metadata
alias :server_streamer :inject_macaroon_metadata
alias :bidi_streamer :inject_macaroon_metadata
end
end
| 25.73913 | 72 | 0.728041 |
3944ef1d5e9111fd8a747de7793c857394b803a7 | 915 |
def build
# Fetch the code:
system "go get github.com/spf13/cobra"
system "go get github.com/summerwind/h2spec"
# This builds `h2spec` into the current directory
system "go build ~/go/src/github.com/summerwind/h2spec/cmd/h2spec/h2spec.go"
end
def test
server do
system("./h2spec", "-p", "7272")
end
end
private
def server
require 'async'
require 'async/container'
require 'async/http/server'
require 'async/io/host_endpoint'
endpoint = Async::IO::Endpoint.tcp('127.0.0.1', 7272)
container = Async::Container.new
Console.logger.info(self){"Starting server..."}
container.run(count: 1) do
server = Async::HTTP::Server.for(endpoint, protocol: Async::HTTP::Protocol::HTTP2, scheme: "https") do |request|
Protocol::HTTP::Response[200, {'content-type' => 'text/plain'}, ["Hello World"]]
end
Async do
server.run
end
end
yield if block_given?
ensure
container&.stop
end
| 20.333333 | 114 | 0.700546 |
21cd61dd6a9366f600cebddc809ce1dd73919dfa | 346 | class Answer < ApplicationRecord
belongs_to :question
validates :body, presence: true
validate :validate_answers_number, on: :create
scope :correct, -> { where(correct: true) }
private
def validate_answers_number
errors.add(:base, 'you can\'t have less than 1 and more than 4 answers') if question.answers.count > 4
end
end
| 23.066667 | 106 | 0.728324 |
d5fcb63b13ba13b11b6f7e0321338f9dcef498e2 | 1,977 | require 'spec_helper'
describe 'API Description' do
context 'with no additional options' do
subject do
Class.new(Grape::API) do
add_swagger_documentation
end
end
it 'describes the API with defaults' do
routes = subject.endpoints.first.routes
expect(routes.count).to eq 2
expect(routes.first.route_description).to eq 'Swagger compatible API description'
expect(routes.first.route_params).to eq('locale' => { desc: 'Locale of API documentation', type: 'Symbol', required: false })
expect(routes.last.route_description).to eq 'Swagger compatible API description for specific API'
expect(routes.last.route_params).to eq('name' => { desc: 'Resource name of mounted API', type: 'String', required: true },
'locale' => { desc: 'Locale of API documentation', type: 'Symbol', required: false })
end
end
context 'with additional options' do
subject do
Class.new(Grape::API) do
add_swagger_documentation \
api_documentation: { desc: 'First', params: { x: 1 }, xx: 11 },
specific_api_documentation: { desc: 'Second', params: { y: 42 }, yy: 4242 }
end
end
it 'describes the API with defaults' do
routes = subject.endpoints.first.routes
expect(routes.count).to eq 2
expect(routes.first.route_description).to eq 'First'
expect(routes.first.route_params).to eq(x: 1, 'locale' => { desc: 'Locale of API documentation', type: 'Symbol', required: false })
expect(routes.first.route_xx).to eq(11)
expect(routes.last.route_description).to eq 'Second'
expect(routes.last.route_params).to eq('name' => { desc: 'Resource name of mounted API', type: 'String', required: true }, y: 42,
'locale' => { desc: 'Locale of API documentation', type: 'Symbol', required: false })
expect(routes.last.route_yy).to eq(4242)
end
end
end
| 44.931818 | 137 | 0.64087 |
4a20caf7f25bd414ad924ec6673ef48a507705f7 | 73 | require 'rubygems'
require 'spec'
Spec::Runner.configure do |config|
end | 14.6 | 34 | 0.767123 |
bf0d93eb219a44cd6d959784c3219a881c352b30 | 57 | module BadgesEngine
module ApplicationHelper
end
end
| 11.4 | 26 | 0.824561 |
ab12b35db1974fedcae3bcb1591ff22e1bb7bc87 | 833 | Pod::Spec.new do |s|
s.name = "ApplicationEventObserver"
s.version = "2.0"
s.summary = "Application event notification (e.g. UIApplicationDidBecomeActiveNotification) handling in Swift."
s.description = <<-DESC
Application event notification (e.g. UIApplicationDidBecomeActiveNotification) handling in Swift.
You don't have to use `NSNotificationCenter`.
DESC
s.homepage = "https://github.com/sgr-ksmt/ApplicationEventObserver"
s.license = "MIT"
s.author = "Suguru Kishimoto"
s.platform = :ios, "10.0"
s.ios.deployment_target = "10.0"
s.swift_version = "5.0"
s.source = { :git => "https://github.com/sgr-ksmt/ApplicationEventObserver.git", :tag => s.version.to_s }
s.source_files = "ApplicationEventObserver/**/*.swift"
end
| 34.708333 | 118 | 0.655462 |
084b9392c8f252bb946c3c29be1cd52cee34dff8 | 1,464 | # frozen_string_literal: true
require File.expand_path("../lib/stimulus_reflex/version", __FILE__)
Gem::Specification.new do |gem|
gem.name = "stimulus_reflex"
gem.license = "MIT"
gem.version = StimulusReflex::VERSION
gem.authors = ["Nathan Hopkins"]
gem.email = ["[email protected]"]
gem.homepage = "https://github.com/stimulusreflex/stimulus_reflex"
gem.summary = "Build reactive applications with the Rails tooling you already know and love."
gem.post_install_message = <<~MESSAGE
Get support for StimulusReflex and CableReady on Discord:
https://discord.gg/stimulus-reflex
MESSAGE
gem.metadata = {
"bug_tracker_uri" => "https://github.com/stimulusreflex/stimulus_reflex/issues",
"changelog_uri" => "https://github.com/stimulusreflex/stimulus_reflex/CHANGELOG.md",
"documentation_uri" => "https://docs.stimulusreflex.com",
"homepage_uri" => gem.homepage,
"source_code_uri" => gem.homepage
}
gem.files = Dir["app/**/*", "lib/**/*", "bin/*", "[A-Z]*"]
gem.test_files = Dir["test/**/*.rb"]
gem.add_dependency "rack"
gem.add_dependency "nokogiri"
gem.add_dependency "rails", ">= 5.2"
gem.add_dependency "redis"
gem.add_dependency "cable_ready", "5.0.0.pre3"
gem.add_development_dependency "bundler", "~> 2.0"
gem.add_development_dependency "pry-nav"
gem.add_development_dependency "pry"
gem.add_development_dependency "rake"
gem.add_development_dependency "standardrb", "~> 1.0"
end
| 34.046512 | 95 | 0.714481 |
876fbbc9681490dd03091068cdd152244569a484 | 3,986 | module Cryptopunks
class Design ## todo/fix - move to its own file!!!
end # class Design
##############
## todo/check:
## find a better way to (auto?) include more designs?
class DesignSeries ## find a better name for class (just use Series?) - why? why not?
def self.build( dir )
data = {}
paths = Dir.glob( "#{dir}/**.txt" )
paths.each do |path|
basename = File.basename( path, File.extname( path ) )
text = File.open( path, 'r:utf-8' ) { |f| f.read }
## todo/check: auto-parse "ahead of time" here
## or keep "raw" text - why? why not?
data[ basename ] = text
end
data
end
def initialize( dir )
@dir = dir # e.g. "#{Cryptopunks.root}/config/more"
end
def data
## note: lazy load / build on first demand only
@data ||= self.class.build( @dir )
end
def [](key) data[ key ]; end
def size() data.size; end
def keys() data.keys; end
def to_h() data; end ## todo/check: use to_hash() - why? why not?
end # class DesignSeries
class Image
def self.read( path ) ## convenience helper
img = ChunkyPNG::Image.from_file( path )
new( img )
end
def initialize( initial=nil, design: nil,
colors: nil )
if initial
## pass image through as-is
img = initial
else
## todo/fix:
## move design code into design class!!!
## for now assume design is a string
## split into parts
## original/alien-male or original@alien-male
## more/alien-female or more@alien-female
## original/human-male+darker or original@human-male!darker ????
## human-male!darker ?????
## keep @ as separator too - why? why not?
parts = design.split( %r{[@/]} )
parts.unshift( '*' ) if parts.size == 1 ## assume "all-in-one" series (use * as name/id/placeholder)
series_key = parts[0]
design_composite = parts[1]
## todo/check - find a way for unambigious (color) variant key
## use unique char e.g. +*!# or such
more_parts = design_composite.split( %r{[!+]} )
design_key = more_parts[0]
variant_key = more_parts[1] ## color variant for now (for humans) e.g. lighter/light/dark/darker
series = if ['*','**','_','__'].include?( series_key )
DESIGNS ## use all-series-in-one collection
else
case series_key
when 'original' then DESIGNS_ORIGINAL
when 'more' then DESIGNS_MORE
else raise ArgumentError, "unknown design series >#{series_key}<; sorry"
end
end
design = series[ design_key ]
raise ArgumentError, "unknow design >#{design_key}< in series >#{series_key}<; sorry" if design.nil?
if colors.nil? ## try to auto-fill in colors
## note: (auto-)remove _male,_female qualifier if exist
colors_key = design_key.sub( '-male', '' ).sub( '-female', '' )
colors = COLORS[ colors_key ]
## allow / support color scheme variants (e.g. lighter/light/dark/darker) etc.
if colors.is_a?(Hash)
if variant_key
colors = colors[ variant_key ]
raise ArgumentError, "no colors defined for variant >#{variant_key}< for design >#{design_key}< in series >#{series_key}<; sorry" if colors.nil?
else ## note: use (fallback to) first color scheme if no variant key present
colors = colors[ colors.keys[0] ]
end
end
raise ArgumentError, "no (default) colors defined for design >#{design_key}< in series >#{series_key}<; sorry" if colors.nil?
end
## note: unwrap inner image before passing on to super c'tor
img = Pixelart::Image.parse( design, colors: colors ).image
end
super( img.width, img.height, img )
end
end # class Image
end # module Cryptopunks | 32.942149 | 159 | 0.576518 |
265f696507aaab87acaa26b09e66209de509b8bb | 5,863 | module SamlIdpAuthConcern
extend ActiveSupport::Concern
extend Forwardable
included do
# rubocop:disable Rails/LexicallyScopedActionFilter
before_action :validate_saml_request, only: :auth
before_action :validate_service_provider_and_authn_context, only: :auth
before_action :store_saml_request, only: :auth
before_action :check_sp_active, only: :auth
# rubocop:enable Rails/LexicallyScopedActionFilter
end
private
def check_sp_active
return if current_service_provider&.active?
redirect_to sp_inactive_error_url
end
def validate_service_provider_and_authn_context
@saml_request_validator = SamlRequestValidator.new
@result = @saml_request_validator.call(
service_provider: current_service_provider,
authn_context: requested_authn_contexts,
authn_context_comparison: saml_request.requested_authn_context_comparison,
nameid_format: name_id_format,
)
return if @result.success?
analytics.track_event(Analytics::SAML_AUTH, @result.to_h)
render 'saml_idp/auth/error', status: :bad_request
end
def name_id_format
@name_id_format ||= specified_name_id_format || default_name_id_format
end
def specified_name_id_format
if recognized_name_id_format? || current_service_provider&.use_legacy_name_id_behavior
saml_request.name_id_format
end
end
def recognized_name_id_format?
Saml::Idp::Constants::VALID_NAME_ID_FORMATS.include?(saml_request.name_id_format)
end
def default_name_id_format
if current_service_provider&.email_nameid_format_allowed
return Saml::Idp::Constants::NAME_ID_FORMAT_EMAIL
end
Saml::Idp::Constants::NAME_ID_FORMAT_PERSISTENT
end
def store_saml_request
ServiceProviderRequestHandler.new(
url: request_url,
session: session,
protocol_request: saml_request,
protocol: FederatedProtocols::Saml,
).call
end
def requested_authn_contexts
@requested_authn_contexts ||= saml_request.requested_authn_contexts.presence ||
[default_aal_context]
end
def default_aal_context
if current_service_provider&.default_aal
Saml::Idp::Constants::AUTHN_CONTEXT_AAL_TO_CLASSREF[current_service_provider.default_aal]
else
Saml::Idp::Constants::DEFAULT_AAL_AUTHN_CONTEXT_CLASSREF
end
end
def default_ial_context
if current_service_provider&.ial
Saml::Idp::Constants::AUTHN_CONTEXT_IAL_TO_CLASSREF[current_service_provider.ial]
else
Saml::Idp::Constants::IAL1_AUTHN_CONTEXT_CLASSREF
end
end
def requested_aal_authn_context
saml_request.requested_aal_authn_context || default_aal_context
end
def requested_ial_authn_context
saml_request.requested_ial_authn_context || default_ial_context
end
def link_identity_from_session_data
IdentityLinker.new(current_user, current_issuer).
link_identity(ial: ial_context.ial_for_identity_record)
end
def identity_needs_verification?
ial2_requested? && current_user.decorate.identity_not_verified?
end
def_delegators :ial_context, :ial2_requested?
def ial_context
@ial_context ||= IalContext.new(
ial: requested_ial_authn_context,
service_provider: current_service_provider,
)
end
def active_identity
current_user.last_identity
end
def encode_authn_response(principal, opts)
build_asserted_attributes(principal)
super(principal, opts)
end
def attribute_asserter(principal)
AttributeAsserter.new(
user: principal,
service_provider: current_service_provider,
name_id_format: name_id_format,
authn_request: saml_request,
decrypted_pii: decrypted_pii,
user_session: user_session,
)
end
def decrypted_pii
cacher = Pii::Cacher.new(current_user, user_session)
cacher.fetch
end
def build_asserted_attributes(principal)
asserter = attribute_asserter(principal)
asserter.build
end
def saml_response
encode_response(
current_user,
name_id_format: name_id_format,
authn_context_classref: requested_aal_authn_context,
reference_id: active_identity.session_uuid,
encryption: encryption_opts,
signature: saml_response_signature_options,
signed_response_message: current_service_provider&.signed_response_message_requested,
)
end
def encryption_opts
query_params = UriService.params(request.original_url)
if query_params[:skip_encryption].present? && current_service_provider&.skip_encryption_allowed
nil
elsif current_service_provider&.encrypt_responses?
cert = saml_request.service_provider.matching_cert ||
current_service_provider&.ssl_certs&.first
{
cert: cert,
block_encryption: current_service_provider&.block_encryption,
key_transport: 'rsa-oaep-mgf1p',
}
end
end
def saml_response_signature_options
endpoint = SamlEndpoint.new(request)
{
x509_certificate: endpoint.x509_certificate,
secret_key: endpoint.secret_key,
}
end
def current_service_provider
return @_sp if defined?(@_sp)
@_sp = ServiceProvider.find_by(issuer: current_issuer)
end
def current_issuer
@_issuer ||= saml_request.service_provider&.identifier
end
def request_url
url = URI.parse request.original_url
query_params = Rack::Utils.parse_nested_query url.query
unless query_params['SAMLRequest']
orig_saml_request = saml_request.options[:get_params][:SAMLRequest]
query_params['SAMLRequest'] = orig_saml_request
end
unless query_params['RelayState']
orig_relay_state = saml_request.options[:get_params][:RelayState]
query_params['RelayState'] = orig_relay_state if orig_relay_state
end
url.query = Rack::Utils.build_query(query_params).presence
url.to_s
end
end
| 28.740196 | 99 | 0.757291 |
280cc28ce21a03e69b50ee7eedb87c6f36a80df7 | 128 | # frozen_string_literal: true
class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
end
| 21.333333 | 52 | 0.835938 |
e28a95099f228c679ba9f09332f762a54c27cfec | 642 | # frozen_string_literal: true
module Users
class OmniauthCallbacksController < Devise::OmniauthCallbacksController
# You should configure your model like this:
# devise :omniauthable, omniauth_providers: [:twitter]
# You should also create an action method in this controller like this:
# def twitter
# end
# More info at:
# https://github.com/plataformatec/devise#omniauth
# def passthru
# super
# end
# def failure
# super
# end
# protected
# The path used when OmniAuth fails
# def after_omniauth_failure_path_for(scope)
# super(scope)
# end
end
end
| 20.709677 | 75 | 0.672897 |
ac93b5aecddb5ed26a2542115cca58b6626a3790 | 352 | require 'test_helper'
class EventsControllerTest < ActionDispatch::IntegrationTest
test "should get new" do
get events_new_url
assert_response :success
end
test "should get show" do
get events_show_url
assert_response :success
end
test "should get index" do
get events_index_url
assert_response :success
end
end
| 17.6 | 60 | 0.741477 |
7992869aa7779b3ad4bef51c2e9f29532d49bfe4 | 1,949 | class Bison < Formula
desc "Parser generator"
homepage "https://www.gnu.org/software/bison/"
# X.Y.9Z are beta releases that sometimes get accidentally uploaded to the release FTP
url "https://ftp.gnu.org/gnu/bison/bison-3.7.3.tar.xz"
mirror "https://ftpmirror.gnu.org/bison/bison-3.7.3.tar.xz"
sha256 "88d9e36856b004c0887a12ba00ea3c47db388519629483dd8c3fce9694d4da6f"
license "GPL-3.0-or-later"
version_scheme 1
livecheck do
url :stable
end
bottle do
sha256 "f5bfd5e40ae8b0501e398e2ea29f95e8b80bdbabab2daedcb40c19cf5c4e3491" => :catalina
sha256 "8a99fb499af4af8e49d93e8eae8b48e26a4c30400a9d7cc66e70ea0621b47e7e" => :mojave
sha256 "20c775058c2bc15b2f7ccf27cf3b2817a98ea13cf656fa69f1877af2ea62cf04" => :high_sierra
sha256 "0876f2a7b92a29c4412aac61623681785bfc742bc79f94083326ccdbe4f2d341" => :x86_64_linux
end
keg_only :provided_by_macos
uses_from_macos "m4"
def install
# https://www.mail-archive.com/[email protected]/msg13512.html
ENV.deparallelize unless OS.mac?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.y").write <<~EOS
%{ #include <iostream>
using namespace std;
extern void yyerror (char *s);
extern int yylex ();
%}
%start prog
%%
prog: // empty
| prog expr '\\n' { cout << "pass"; exit(0); }
;
expr: '(' ')'
| '(' expr ')'
| expr expr
;
%%
char c;
void yyerror (char *s) { cout << "fail"; exit(0); }
int yylex () { cin.get(c); return c; }
int main() { yyparse(); }
EOS
system "#{bin}/bison", "test.y"
system ENV.cxx, "test.tab.c", "-o", "test"
assert_equal "pass", shell_output("echo \"((()(())))()\" | ./test")
assert_equal "fail", shell_output("echo \"())\" | ./test")
end
end
| 31.435484 | 94 | 0.626988 |
ffc8a8d49ce9ab83254208c2f77ef4b58aea1e3d | 857 | require 'singleton'
module MinimalMatch
# Array::Anything. it will always be equal to whatever you compare it to
class Anything < MinimalMatchObject
include MatchMultiplying
# it matches anything OTHER than another minimal matchobject
def === who_cares
# i think there is a prettier way to do this with coerce
# basically Anything is equal to anything OTHER than a different
# minimal match objecA
if who_cares.kind_of? MinimalMatchObject and not who_cares.eql? self
false
else
true
end
end
alias :== :===
def to_s
"<ANYTHING>"
end
alias :inspect :to_s
def coerce other
return self, other
end
end
Anything.__send__ :include, Singleton
def anything
Anything.instance()
end
module_function :anything
end
# vim: set ts=2 sw=2 tw=0 :
| 23.162162 | 74 | 0.670945 |
ab7d7b2c64b4f4cc4589dba4d70494326792a726 | 23,537 | require_relative '../utilities/stream_comms'
require_relative '../utilities/mob'
require_relative 'all'
module Paidgeeks
module RubyFC
module Engine
# The game state changer processes all messages that affect the game state. It
# is what implements the memoization that allows game playback. As such, the
# methods in this class form the official internal game API and the game-to-fleet
# API. See the notes below.
#
# Notes:
#
# Messages that are sent to fleets after being processed are
# renamed to end with "_notify", and this is what forms 99% of the
# game-to-fleet API.
#
# Game coordinator will also send messages to the fleet that are not well-
# defined in this file: game_config, begin_tick, and end_tick.
#
# Please note that the GameStateChanger class is, itself STATELESS. Keep it
# that way. Also, note that GameStateChanger trusts its input. Callers are
# responsible for ensuring that all messages are actionable as-is.
class GameStateChanger
# Send a message to a fleet. This is here to ensure that all messages
# are journaled (see game_state_changer_logging.rb)
# Returns:
# - The message sent to the fleet, unencoded
def self.msg_to_fleet(gs, fm, msg)
fm.queue_output(Paidgeeks.encode(msg))
end
# Send a warning message to a fleet. This is not really a game state change,
# but it will generate a fleet message. It's basically syntactic sugar for
# msg_to_fleet. Note the name does not end in _msg, so this method will
# NOT be journaled (but msg_to_fleet will, due to the logging aspect).
# Parameters:
# - msg => A Hash: {
# "type" => "warn_fleet",
# "original_message" => the original message hash that caused this warning,
# "warning" => text string describing the warning (e.g. "source_ship invalid")
# "fleet_source" => false | true,
# "fid" => fleet id,
# }
def self.warn_fleet(gs, msg)
fleet = gs.fleets[msg["fid"]]
msg_to_fleet(gs, fleet[:manager], msg)
end
# Tick and update the time. This also, somewhat unexpectedly, resets the "...in the last tick" state members.
# Parameters:
# - msg => A Hash: {
# "type" => "tick"
# "fleet_source" => false | true,
# }
def self.tick_msg(gs, msg)
gs.tick += 1
gs.time = gs.tick * gs.config[:seconds_per_tick]
gs.tick_scan_reports = []
gs.munition_intercepts = []
end
# Update tick acknowledged for a fleet
# Parameters:
# - msg => A Hash: {
# "type" => "tick_acknowledged",
# "tick" => tick,
# "fid" => fid,
# "fleet_source" => false | true,
# }
def self.tick_acknowledged_msg(gs, msg)
gs.fleets[msg["fid"]][:last_ack_tick] = msg["tick"]
end
# Set fleet metatdata (from the fleet itself)
# Parameters:
# - msg => A Hash: {
# "type" => "set_fleet_metadata",
# "author" => "Author's name",
# "fleet_name" => "Name of the fleet",
# "fid" => fid,
# "fleet_source" => false | true,
# }
def self.set_fleet_metadata_msg(gs, msg)
fleet = gs.fleets[msg["fid"]]
fm = fleet[:manager]
fm.fleet_metadata["author"] = msg["author"]
fm.fleet_metadata["fleet_name"] = msg["fleet_name"]
end
# Add a fleet. This also creates the fleet manager to manage the fleet.
# Parameters:
# - msg => A Hash: {
# "type" => "add_fleet"
# "fid" => fleet id
# "ff" => fleet file to load
# "last_ack_tick" => initial value for the last acknowledged tick from this fleet
# "log_stream" => IO instance for fleet logging
# "fleet_source" => false | true,
# }
def self.add_fleet_msg(gs, msg)
mgr = Paidgeeks::RubyFC::Engine::FleetManager.new(msg["ff"], msg["fid"], msg["log_stream"])
gs.add_fleet(msg["fid"], mgr, msg["last_ack_tick"], msg["log_stream"])
end
# Disqualify a fleet, also destroys all fleet's mobs
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "disqualify_fleet",
# "error" => descriptive string of the error
# "backtrace" => String containing the call stack at the time of the error
# "inspected_args" => Array of any relevant arguments pertaining to the error
# "fleet_source" => false | true,
# }
def self.disqualify_fleet_msg(gs, msg)
fleet = gs.fleets[msg["fid"]]
fleet[:manager].fleet_state = :error
fleet[:manager].fleet_metadata[:error] = msg["error"]
fleet[:manager].fleet_metadata[:backtrace] = msg["backtrace"]
fleet[:manager].fleet_metadata[:inspected_args] = msg["inspected_args"]
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "disqualify_fleet_notify"}))
mids = fleet[:mobs].to_a
mids.each do |mid|
delete_mob_msg(gs, {
"type" => "delete_mob",
"mid" => mid,
"reason" => "fleet disqualified"
})
end
end
# Update the fleet state
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "fleet_state",
# "fid" => fleet id,
# "state" => Fleet state, see Fleet class docs for possible values.
# "fleet_source" => false | true,
# }
def self.fleet_state_msg(gs, msg)
fleet = gs.fleets[msg["fid"]]
fleet[:manager].fleet_state = msg["state"].to_sym
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "fleet_state_notify"}))
end
# Integrate a mob (kinematic repositioning). This also does game rule enforcement for position:
# all mobs must remain on the playing field. To prevent mobs from "hiding" just on the other side
# (and complicating scanning logic), motion is not wrapped around a-la pacman. Instead, mobs will
# just stop at the barrier created by the boundary.
#
# Also, note that the msg parameter is modified directly by this function. This is not
# typical behavor for the gsc, since it normally will trust any input. In this case, it
# does not trust the positional data (x and y position). This is due to the Mob class
# being playing-field agnostic but the game needs to enforce the rules somewhere. The other
# sensible option would be to create a rules enforcement class that proxies access to the
# game state changer and therefore ensures that all the rules are being followed. That
# feels like overkill at this point.
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "integrate_mob",
# "x_pos" => x position (units)
# "y_pos" => y position (units),
# "heading" => heading (radians),
# "velocity" => velocity (units per second),
# "turn_rate" => turn rate (radians per second),
# "valid_time" => the time this data is valid (seconds),
# "turn_start_time" => If turn_rate != 0, this is the time to start turning in order to reach a specific heading,
# "turn_stop_time" => If turn_rate != 0, this is the time to stop turning in order to reach a specific heading,
# "turn_stop" => The time to stop turning in order to reach a specific heading,
# "fid" => fleet id,
# "mid" => mob id,
# "fleet_source" => false | true,
# }
# Returns:
# - mob => The mob that was just integrated
def self.integrate_mob_msg(gs, msg)
# clamp x and y to be inside the playfield (this is a case of legal motion creating illegal game state)
if msg["x_pos"] < 0.0
msg = msg.merge({"x_pos" => 0.0})
end
if msg["x_pos"] >= gs.config[:field_width]
msg = msg.merge({"x_pos" => gs.config[:field_width] - 1.0})
end
if msg["y_pos"] < 0
msg = msg.merge({"y_pos" => 0.0})
end
if msg["y_pos"] >= gs.config[:field_height]
msg = msg.merge({"y_pos" => gs.config[:field_height] - 1.0})
end
# store updated values
mob = gs.mobs[msg["mid"]]
mob.x_pos = msg["x_pos"]
mob.y_pos= msg["y_pos"]
mob.heading = msg["heading"]
mob.velocity = msg["velocity"]
mob.turn_rate = msg["turn_rate"]
mob.valid_time = msg["valid_time"]
mob.turn_start_time = msg["turn_start_time"]
mob.turn_stop_time = msg["turn_stop_time"]
mob.turn_stop = msg["turn_stop"]
# notify fleet
fleet = gs.fleets[msg["fid"]]
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "integrate_mob_notify"}))
# return the mob
mob
end
# Report results of mission. This not really a game state change, but
# it does need to be recorded and run through the same general processing
# as every other message.
def self.mission_report_msg(gs, msg)
# nothing to do
end
# Reduce fleet credits
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "reduce_credits",
# "amount" => Amount of credits to subtract, (negative values will increase credits!)
# "fid" => fleet id losing the credits
# "fleet_source" => false | true,
# }
def self.reduce_credits_msg(gs, msg)
fleet = gs.fleets[msg["fid"]]
fleet[:credits] -= msg["amount"]
msg = msg.merge({
"new_balance" => fleet[:credits],
"type" => "reduce_credits_notify",
})
msg_to_fleet(gs, fleet[:manager], msg)
end
# Set fleet credits
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "set_credits",
# "amount" => Amount of credits
# "fid" => fleet id losing the credits
# "fleet_source" => false | true,
# }
def self.set_credits_msg(gs, msg)
fleet = gs.fleets[msg["fid"]]
fleet[:credits] = msg["amount"]
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "set_credits_notify"}))
end
# Reduce a mobs energy
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "reduce_energy",
# "amount" => Amount of energy to subtract (negative values will increase energy!)
# "mid" => mob id
# "fleet_source" => false | true,
# }
def self.reduce_energy_msg(gs, msg)
mob = gs.mobs[msg["mid"]]
mob.energy -= msg["amount"]
fleet = gs.fleets[mob.fid]
msg = msg.merge({
"new_energy" => mob.energy,
"type" => "reduce_energy_notify"
})
msg_to_fleet(gs, fleet[:manager], msg)
end
# Set a mobs energy
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "set_energy",
# "new_energy" => Amount of energy
# "mid" => mob id
# "fleet_source" => false | true,
# }
def self.set_energy_msg(gs, msg)
mob = gs.mobs[msg["mid"]]
mob.energy = msg["new_energy"]
fleet = gs.fleets[mob.fid]
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "set_energy_notify"}))
end
# Create a new mob
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash {
# "type" => "create_mob",
# "template" => A Class used as a template for the mob,
# "create_time" => Time of creation for the mob
# "x_pos" => position of new mob,
# "y_pos" => position of new mob,
# "heading" => heading of new mob,
# "velocity" => velocity of new mob,
# "turn_rate" => turn rate of new mob,
# "valid_time" => time of validity for the new mob,
# "turn_start_time" => turn start time of new mob,
# "turn_stop_time" => turn stop time of new mob,
# "turn_stop" => turn stop of new mob,
# "fid" => fleet id of new mob
# "mid" => mob id of new mob (must be unique for entire game!),
# "energy" => starting energy fot mob
# "hitpoints" => starting hit points for mob
# "last_scan_tick" => tick of the last scan this mom performed,
# "target_mid" => mid of target. This is not required to be a valid mid,
# "launch_param" => launch parameter for the mob, can be anything.
# "fleet_source" => false | true,
# }
def self.create_mob_msg(gs, msg)
mob = Paidgeeks::RubyFC::Mob.from_msg(msg)
gs.mobs[mob.mid] = mob
gs.fleets[mob.fid][:mobs].add(mob.mid)
fleet = gs.fleets[mob.fid]
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "create_mob_notify"}))
end
# Delete a mob
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "delete_mob",
# "mid" => Mob's mid,
# "reason" => A string reason for deleting the mob
# "fleet_source" => false | true,
# }
def self.delete_mob_msg(gs, msg)
mob = gs.mobs[msg["mid"]]
fleet = gs.fleets[mob.fid]
fleet[:mobs].delete(mob.mid)
gs.mobs.delete(mob.mid)
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "delete_mob_notify"}))
end
# Notify fleet one if its munitions intercepted something, this doesn't really change the gamestate,
# but it does follow the *_notify pattern for fleet notification for interesting events. It also
# notifies the mission that someone scored a hit.
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "munition_intercept",
# "munition_mid" => mid of the interceptor,
# "target_mid" => mid of the target,
# "remaining_target_hitpoints" => hitpoints remaining on target after intercept, will be <=0 if target destroyed
# "fleet_source" => false | true,
# }
def self.munition_intercept_msg(gs, msg)
mob = gs.mobs[msg["munition_mid"]]
fleet = gs.fleets[mob.fid]
gs.mission.event_munition_intercept(gs, msg["munition_mid"], msg["target_mid"]) if !gs.mission.nil?
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "munition_intercept_notify"}))
gs.munition_intercepts << msg
end
# Notify fleet of munition target updates. This doesn't really change the gamestate,
# but it does follow the *_notify pattern for fleet notification for interesting events.
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "missile_target_update",
# "munition_mid" => mun_mob.mid,
# "target_mid" => target_mob.mid,
# "x_pos" => target_mob.x_pos,
# "y_pos" => target_mob.y_pos,
# "heading" => target_mob.heading,
# "velocity" => target_mob.velocity,
# "valid_time" => target_mob.valid_time,
# "template" => target_mob.template.class.name,
# "fleet_source" => false | true,
# }
def self.missile_target_update_msg(gs, msg)
mob = gs.mobs[msg["munition_mid"]]
fleet = gs.fleets[mob.fid]
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "missile_target_update_notify"}))
end
# Reduce mobs hitpoints
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "reduce_hitpoints",
# "mid" => mid of the mob to modify
# "amount" => number of hitpoints to subtract, set to negative to increase hitpoints
# "fleet_source" => false | true,
# }
def self.reduce_hitpoints_msg(gs, msg)
mob = gs.mobs[msg["mid"]]
mob.hitpoints -= msg["amount"]
fleet = gs.fleets[mob.fid]
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "reduce_hitpoints_notify"}))
end
# Scan
# Parameters:
# - msg => A Hash: {
# "type" => "scan",
# "source_ship" => mid of the scanning ship,
# "azimuth" => absolute azimuth, in degrees, with 0 => North and 90 => East, must be a Float (0.0, not 0)
# "range" => The max range of the scan, must be > 0 (see config field_width and field_height) for default playing field dimensions, must be a Float (0.0 not 0)
# "fleet_source" => false | true,
# }
def self.scan_msg(gs, msg)
source_ship = gs.mobs[msg["source_ship"]]
source_ship.last_scan_tick = gs.tick
x = source_ship.x_pos
y = source_ship.y_pos
range = msg["range"]
range_squared = range * range
# Get the ship template class; for the playback system, the template
# is a String, not a class. In that case, transform it into the
# ship class.
template = source_ship.template.is_a?(String) ? Paidgeeks.class_from_string(source_ship.template) : source_ship.template
half_theta = 0.5 * (template.scanned_area / range) # scan twice this wide
center = msg["azimuth"]
if center > 180.0 # negative angles to left, makes math below work
center -= 360.0
end
center = Paidgeeks.deg_to_rad(center)
start = Paidgeeks::normalize_to_circle(center - half_theta)
stop = Paidgeeks::normalize_to_circle(center + half_theta)
reports = []
scan_slices = Proc.new do |slice_pairs|
gs.mobs.each do |mid, mob|
next if mob.fid == source_ship.fid
rel_ang = Paidgeeks::normalize_to_circle(Paidgeeks::relative_angle(x, y, mob.x_pos, mob.y_pos))
#puts("#{source_ship.mid} to #{mid} => #{Paidgeeks.rad_to_deg(rel_ang)} #{Paidgeeks.rad_to_deg start} => #{Paidgeeks.rad_to_deg center} => #{Paidgeeks.rad_to_deg stop}")
range2 = Paidgeeks.range2(x,y,mob.x_pos,mob.y_pos)
slice_pairs.each do |pair|
if rel_ang >= pair[0] and rel_ang < pair[1] and range2 <= range_squared
reports << {
"mid" => mid,
"x_pos" => mob.x_pos,
"y_pos" => mob.y_pos,
"heading" => mob.heading,
"velocity" => mob.velocity,
"turn_rate" => 0.0,
"valid_time" => mob.valid_time,
"turn_start_time" => 0.0,
"turn_stop_time" => 0.0,
"turn_stop" => 0.0,
"template" => mob.template.kind_of?(String) ? mob.template : mob.template.name,
"fid" => mob.fid,
}
end # inside slice pair
end # slice pairs
end # each mob
end # Proc
if stop < start # crosses north
scan_slices.call([[start, Paidgeeks::TWOPI],[0.0, stop]])
else # does not cross north
scan_slices.call([[start, stop]])
end
scan_report = {
"type" => "scan_report",
"scan_msg" => msg,
"last_scan_tick" => source_ship.last_scan_tick,
"scan_width" => Paidgeeks.rad_to_deg(2.0*half_theta),
"reports" => reports,
}
fleet = gs.fleets[source_ship.fid]
msg_to_fleet(gs, fleet[:manager], scan_report)
gs.tick_scan_reports << scan_report
end
# Set mob speed.
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "set_speed",
# "mid" => Mob id
# "speed" => The new speed
# "fid" => fleet id
# "fleet_source" => false | true,
# }
def self.set_speed_msg(gs, msg)
mob = gs.mobs[msg["mid"]]
mob.velocity = msg["speed"]
fleet = gs.fleets[mob.fid]
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "set_speed_notify"}))
end
# Turn mob to a new heading
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "turn_to",
# "mid" => mob id of the ship to turn,
# "heading" => final heading, in degrees, must be a Float (0.0, not 0) and [0.0, 360.0)
# "direction" => "clockwise" or "counterclockwise"
# "fid" => fleet id
# "fleet_source" => false | true,
# }
def self.turn_to_msg(gs, msg)
mob = gs.mobs[msg["mid"]]
mob = mob.turn_to(Paidgeeks::deg_to_rad(msg["heading"]), msg["direction"].to_sym)
gs.mobs[mob.mid] = mob
fleet = gs.fleets[mob.fid]
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "turn_to_notify"}))
end
# Turn mob forever so it flies in a circle
#
# This will generate a _notify message to the fleet.
#
# Parameters:
# - msg => A Hash: {
# "type" => "turn_to",
# "mid" => mob id of the ship to turn,
# "rate" => Turn rate, in degrees/second, must be a Float (0.0, not 0)
# "direction" => "clockwise" or "counterclockwise"
# "fid" => fleet id
# "fleet_source" => false | true,
# }
def self.turn_forever_msg(gs, msg)
mob = gs.mobs[msg["mid"]]
mob = mob.turn_forever(Paidgeeks::deg_to_rad(msg["rate"]), msg["direction"].to_sym)
fleet = gs.fleets[mob.fid]
gs.mobs[mob.mid] = mob
msg_to_fleet(gs, fleet[:manager], msg.merge({"type" => "turn_forever_notify"}))
end
end
end
end
end
require_relative '../logging/game_engine/game_state_changer_logging.rb'
| 41.220665 | 183 | 0.541955 |
0379bfeec333723f74a30939b86ff0b323e3d4f0 | 137 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_libra_session'
| 34.25 | 75 | 0.80292 |
334254d0997d616e591140c9a0e62a1476391ed6 | 22,859 | # frozen_string_literal: true
require "test_helper"
require "database/setup"
class ActiveStorage::ManyAttachedTest < ActiveSupport::TestCase
include ActiveJob::TestHelper
setup do
@user = User.create!(name: "Josh")
end
teardown { ActiveStorage::Blob.all.each(&:delete) }
test "attaching existing blobs to an existing record" do
@user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg")
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
end
test "attaching existing blobs from signed IDs to an existing record" do
@user.highlights.attach create_blob(filename: "funky.jpg").signed_id, create_blob(filename: "town.jpg").signed_id
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
end
test "attaching new blobs from Hashes to an existing record" do
@user.highlights.attach(
{ io: StringIO.new("STUFF"), filename: "funky.jpg", content_type: "image/jpg" },
{ io: StringIO.new("THINGS"), filename: "town.jpg", content_type: "image/jpeg" })
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
end
test "attaching new blobs from uploaded files to an existing record" do
@user.highlights.attach fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4")
assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
assert_equal "video.mp4", @user.highlights.second.filename.to_s
end
test "attaching existing blobs to an existing, changed record" do
@user.name = "Tina"
assert @user.changed?
@user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg")
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
assert_not @user.highlights.first.persisted?
assert_not @user.highlights.second.persisted?
assert @user.will_save_change_to_name?
@user.save!
assert_equal "funky.jpg", @user.highlights.reload.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
end
test "attaching existing blobs from signed IDs to an existing, changed record" do
@user.name = "Tina"
assert @user.changed?
@user.highlights.attach create_blob(filename: "funky.jpg").signed_id, create_blob(filename: "town.jpg").signed_id
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
assert_not @user.highlights.first.persisted?
assert_not @user.highlights.second.persisted?
assert @user.will_save_change_to_name?
@user.save!
assert_equal "funky.jpg", @user.highlights.reload.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
end
test "attaching new blobs from Hashes to an existing, changed record" do
@user.name = "Tina"
assert @user.changed?
@user.highlights.attach(
{ io: StringIO.new("STUFF"), filename: "funky.jpg", content_type: "image/jpg" },
{ io: StringIO.new("THINGS"), filename: "town.jpg", content_type: "image/jpeg" })
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
assert_not @user.highlights.first.persisted?
assert_not @user.highlights.second.persisted?
assert @user.will_save_change_to_name?
@user.save!
assert_equal "funky.jpg", @user.highlights.reload.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
end
test "attaching new blobs from uploaded files to an existing, changed record" do
@user.name = "Tina"
assert @user.changed?
@user.highlights.attach fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4")
assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
assert_equal "video.mp4", @user.highlights.second.filename.to_s
assert_not @user.highlights.first.persisted?
assert_not @user.highlights.second.persisted?
assert @user.will_save_change_to_name?
@user.save!
assert_equal "racecar.jpg", @user.highlights.reload.first.filename.to_s
assert_equal "video.mp4", @user.highlights.second.filename.to_s
end
test "attaching existing blobs to an existing record one at a time" do
@user.highlights.attach create_blob(filename: "funky.jpg")
@user.highlights.attach create_blob(filename: "town.jpg")
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
@user.reload
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
end
test "updating an existing record to attach existing blobs" do
@user.update! highlights: [ create_file_blob(filename: "racecar.jpg"), create_file_blob(filename: "video.mp4") ]
assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
assert_equal "video.mp4", @user.highlights.second.filename.to_s
end
test "updating an existing record to attach existing blobs from signed IDs" do
@user.update! highlights: [ create_blob(filename: "funky.jpg").signed_id, create_blob(filename: "town.jpg").signed_id ]
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
end
test "successfully updating an existing record to attach new blobs from uploaded files" do
@user.highlights = [ fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4") ]
assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
assert_equal "video.mp4", @user.highlights.second.filename.to_s
assert_not ActiveStorage::Blob.service.exist?(@user.highlights.first.key)
assert_not ActiveStorage::Blob.service.exist?(@user.highlights.second.key)
@user.save!
assert ActiveStorage::Blob.service.exist?(@user.highlights.first.key)
assert ActiveStorage::Blob.service.exist?(@user.highlights.second.key)
end
test "unsuccessfully updating an existing record to attach new blobs from uploaded files" do
assert_not @user.update(name: "", highlights: [ fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4") ])
assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
assert_equal "video.mp4", @user.highlights.second.filename.to_s
assert_not ActiveStorage::Blob.service.exist?(@user.highlights.first.key)
assert_not ActiveStorage::Blob.service.exist?(@user.highlights.second.key)
end
test "replacing existing, dependent attachments on an existing record via assign and attach" do
[ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |old_blobs|
@user.highlights.attach old_blobs
@user.highlights = []
assert_not @user.highlights.attached?
perform_enqueued_jobs do
@user.highlights.attach create_blob(filename: "whenever.jpg"), create_blob(filename: "wherever.jpg")
end
assert_equal "whenever.jpg", @user.highlights.first.filename.to_s
assert_equal "wherever.jpg", @user.highlights.second.filename.to_s
assert_not ActiveStorage::Blob.exists?(old_blobs.first.id)
assert_not ActiveStorage::Blob.exists?(old_blobs.second.id)
assert_not ActiveStorage::Blob.service.exist?(old_blobs.first.key)
assert_not ActiveStorage::Blob.service.exist?(old_blobs.second.key)
end
end
test "replacing existing, independent attachments on an existing record via assign and attach" do
@user.vlogs.attach create_blob(filename: "funky.mp4"), create_blob(filename: "town.mp4")
@user.vlogs = []
assert_not @user.vlogs.attached?
assert_no_enqueued_jobs only: ActiveStorage::PurgeJob do
@user.vlogs.attach create_blob(filename: "whenever.mp4"), create_blob(filename: "wherever.mp4")
end
assert_equal "whenever.mp4", @user.vlogs.first.filename.to_s
assert_equal "wherever.mp4", @user.vlogs.second.filename.to_s
end
test "successfully updating an existing record to replace existing, dependent attachments" do
[ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |old_blobs|
@user.highlights.attach old_blobs
perform_enqueued_jobs do
@user.update! highlights: [ create_blob(filename: "whenever.jpg"), create_blob(filename: "wherever.jpg") ]
end
assert_equal "whenever.jpg", @user.highlights.first.filename.to_s
assert_equal "wherever.jpg", @user.highlights.second.filename.to_s
assert_not ActiveStorage::Blob.exists?(old_blobs.first.id)
assert_not ActiveStorage::Blob.exists?(old_blobs.second.id)
assert_not ActiveStorage::Blob.service.exist?(old_blobs.first.key)
assert_not ActiveStorage::Blob.service.exist?(old_blobs.second.key)
end
end
test "successfully updating an existing record to replace existing, independent attachments" do
@user.vlogs.attach create_blob(filename: "funky.mp4"), create_blob(filename: "town.mp4")
assert_no_enqueued_jobs only: ActiveStorage::PurgeJob do
@user.update! vlogs: [ create_blob(filename: "whenever.mp4"), create_blob(filename: "wherever.mp4") ]
end
assert_equal "whenever.mp4", @user.vlogs.first.filename.to_s
assert_equal "wherever.mp4", @user.vlogs.second.filename.to_s
end
test "unsuccessfully updating an existing record to replace existing attachments" do
@user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg")
assert_no_enqueued_jobs do
assert_not @user.update(name: "", highlights: [ fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4") ])
end
assert_equal "racecar.jpg", @user.highlights.first.filename.to_s
assert_equal "video.mp4", @user.highlights.second.filename.to_s
assert_not ActiveStorage::Blob.service.exist?(@user.highlights.first.key)
assert_not ActiveStorage::Blob.service.exist?(@user.highlights.second.key)
end
test "updating an existing record to attach one new blob and one previously-attached blob" do
[ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
@user.highlights.attach blobs.first
perform_enqueued_jobs do
assert_no_changes -> { @user.highlights_attachments.first.id } do
@user.update! highlights: blobs
end
end
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
assert ActiveStorage::Blob.service.exist?(@user.highlights.first.key)
end
end
test "updating an existing record to remove dependent attachments" do
[ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
@user.highlights.attach blobs
assert_enqueued_with job: ActiveStorage::PurgeJob, args: [ blobs.first ] do
assert_enqueued_with job: ActiveStorage::PurgeJob, args: [ blobs.second ] do
@user.update! highlights: []
end
end
assert_not @user.highlights.attached?
end
end
test "updating an existing record to remove independent attachments" do
[ create_blob(filename: "funky.mp4"), create_blob(filename: "town.mp4") ].tap do |blobs|
@user.vlogs.attach blobs
assert_no_enqueued_jobs only: ActiveStorage::PurgeJob do
@user.update! vlogs: []
end
assert_not @user.vlogs.attached?
end
end
test "analyzing a new blob from an uploaded file after attaching it to an existing record" do
perform_enqueued_jobs do
@user.highlights.attach fixture_file_upload("racecar.jpg")
end
assert @user.highlights.reload.first.analyzed?
assert_equal 4104, @user.highlights.first.metadata[:width]
assert_equal 2736, @user.highlights.first.metadata[:height]
end
test "analyzing a new blob from an uploaded file after attaching it to an existing record via update" do
perform_enqueued_jobs do
@user.update! highlights: [ fixture_file_upload("racecar.jpg") ]
end
assert @user.highlights.reload.first.analyzed?
assert_equal 4104, @user.highlights.first.metadata[:width]
assert_equal 2736, @user.highlights.first.metadata[:height]
end
test "analyzing a directly-uploaded blob after attaching it to an existing record" do
perform_enqueued_jobs do
@user.highlights.attach directly_upload_file_blob(filename: "racecar.jpg")
end
assert @user.highlights.reload.first.analyzed?
assert_equal 4104, @user.highlights.first.metadata[:width]
assert_equal 2736, @user.highlights.first.metadata[:height]
end
test "analyzing a directly-uploaded blob after attaching it to an existing record via update" do
perform_enqueued_jobs do
@user.update! highlights: [ directly_upload_file_blob(filename: "racecar.jpg") ]
end
assert @user.highlights.reload.first.analyzed?
assert_equal 4104, @user.highlights.first.metadata[:width]
assert_equal 2736, @user.highlights.first.metadata[:height]
end
test "attaching existing blobs to a new record" do
User.new(name: "Jason").tap do |user|
user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg")
assert user.new_record?
assert_equal "funky.jpg", user.highlights.first.filename.to_s
assert_equal "town.jpg", user.highlights.second.filename.to_s
user.save!
assert_equal "funky.jpg", user.highlights.first.filename.to_s
assert_equal "town.jpg", user.highlights.second.filename.to_s
end
end
test "attaching an existing blob from a signed ID to a new record" do
User.new(name: "Jason").tap do |user|
user.avatar.attach create_blob(filename: "funky.jpg").signed_id
assert user.new_record?
assert_equal "funky.jpg", user.avatar.filename.to_s
user.save!
assert_equal "funky.jpg", user.reload.avatar.filename.to_s
end
end
test "attaching new blobs from Hashes to a new record" do
User.new(name: "Jason").tap do |user|
user.highlights.attach(
{ io: StringIO.new("STUFF"), filename: "funky.jpg", content_type: "image/jpg" },
{ io: StringIO.new("THINGS"), filename: "town.jpg", content_type: "image/jpg" })
assert user.new_record?
assert user.highlights.first.new_record?
assert user.highlights.second.new_record?
assert user.highlights.first.blob.new_record?
assert user.highlights.second.blob.new_record?
assert_equal "funky.jpg", user.highlights.first.filename.to_s
assert_equal "town.jpg", user.highlights.second.filename.to_s
assert_not ActiveStorage::Blob.service.exist?(user.highlights.first.key)
assert_not ActiveStorage::Blob.service.exist?(user.highlights.second.key)
user.save!
assert user.highlights.first.persisted?
assert user.highlights.second.persisted?
assert user.highlights.first.blob.persisted?
assert user.highlights.second.blob.persisted?
assert_equal "funky.jpg", user.reload.highlights.first.filename.to_s
assert_equal "town.jpg", user.highlights.second.filename.to_s
assert ActiveStorage::Blob.service.exist?(user.highlights.first.key)
assert ActiveStorage::Blob.service.exist?(user.highlights.second.key)
end
end
test "attaching new blobs from uploaded files to a new record" do
User.new(name: "Jason").tap do |user|
user.highlights.attach fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4")
assert user.new_record?
assert user.highlights.first.new_record?
assert user.highlights.second.new_record?
assert user.highlights.first.blob.new_record?
assert user.highlights.second.blob.new_record?
assert_equal "racecar.jpg", user.highlights.first.filename.to_s
assert_equal "video.mp4", user.highlights.second.filename.to_s
assert_not ActiveStorage::Blob.service.exist?(user.highlights.first.key)
assert_not ActiveStorage::Blob.service.exist?(user.highlights.second.key)
user.save!
assert user.highlights.first.persisted?
assert user.highlights.second.persisted?
assert user.highlights.first.blob.persisted?
assert user.highlights.second.blob.persisted?
assert_equal "racecar.jpg", user.reload.highlights.first.filename.to_s
assert_equal "video.mp4", user.highlights.second.filename.to_s
assert ActiveStorage::Blob.service.exist?(user.highlights.first.key)
assert ActiveStorage::Blob.service.exist?(user.highlights.second.key)
end
end
test "creating a record with existing blobs attached" do
user = User.create!(name: "Jason", highlights: [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ])
assert_equal "funky.jpg", user.reload.highlights.first.filename.to_s
assert_equal "town.jpg", user.reload.highlights.second.filename.to_s
end
test "creating a record with an existing blob from signed IDs attached" do
user = User.create!(name: "Jason", highlights: [
create_blob(filename: "funky.jpg").signed_id, create_blob(filename: "town.jpg").signed_id ])
assert_equal "funky.jpg", user.reload.highlights.first.filename.to_s
assert_equal "town.jpg", user.reload.highlights.second.filename.to_s
end
test "creating a record with new blobs from uploaded files attached" do
User.new(name: "Jason", highlights: [ fixture_file_upload("racecar.jpg"), fixture_file_upload("video.mp4") ]).tap do |user|
assert user.new_record?
assert user.highlights.first.new_record?
assert user.highlights.second.new_record?
assert user.highlights.first.blob.new_record?
assert user.highlights.second.blob.new_record?
assert_equal "racecar.jpg", user.highlights.first.filename.to_s
assert_equal "video.mp4", user.highlights.second.filename.to_s
assert_not ActiveStorage::Blob.service.exist?(user.highlights.first.key)
assert_not ActiveStorage::Blob.service.exist?(user.highlights.second.key)
user.save!
assert_equal "racecar.jpg", user.highlights.first.filename.to_s
assert_equal "video.mp4", user.highlights.second.filename.to_s
end
end
test "creating a record with an unexpected object attached" do
error = assert_raises(ArgumentError) { User.create!(name: "Jason", highlights: :foo) }
assert_equal "Could not find or build blob: expected attachable, got :foo", error.message
end
test "analyzing a new blob from an uploaded file after attaching it to a new record" do
perform_enqueued_jobs do
user = User.create!(name: "Jason", highlights: [ fixture_file_upload("racecar.jpg") ])
assert user.highlights.reload.first.analyzed?
assert_equal 4104, user.highlights.first.metadata[:width]
assert_equal 2736, user.highlights.first.metadata[:height]
end
end
test "analyzing a directly-uploaded blob after attaching it to a new record" do
perform_enqueued_jobs do
user = User.create!(name: "Jason", highlights: [ directly_upload_file_blob(filename: "racecar.jpg") ])
assert user.highlights.reload.first.analyzed?
assert_equal 4104, user.highlights.first.metadata[:width]
assert_equal 2736, user.highlights.first.metadata[:height]
end
end
test "detaching" do
[ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
@user.highlights.attach blobs
assert @user.highlights.attached?
perform_enqueued_jobs do
@user.highlights.detach
end
assert_not @user.highlights.attached?
assert ActiveStorage::Blob.exists?(blobs.first.id)
assert ActiveStorage::Blob.exists?(blobs.second.id)
assert ActiveStorage::Blob.service.exist?(blobs.first.key)
assert ActiveStorage::Blob.service.exist?(blobs.second.key)
end
end
test "purging" do
[ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
@user.highlights.attach blobs
assert @user.highlights.attached?
@user.highlights.purge
assert_not @user.highlights.attached?
assert_not ActiveStorage::Blob.exists?(blobs.first.id)
assert_not ActiveStorage::Blob.exists?(blobs.second.id)
assert_not ActiveStorage::Blob.service.exist?(blobs.first.key)
assert_not ActiveStorage::Blob.service.exist?(blobs.second.key)
end
end
test "purging later" do
[ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
@user.highlights.attach blobs
assert @user.highlights.attached?
perform_enqueued_jobs do
@user.highlights.purge_later
end
assert_not @user.highlights.attached?
assert_not ActiveStorage::Blob.exists?(blobs.first.id)
assert_not ActiveStorage::Blob.exists?(blobs.second.id)
assert_not ActiveStorage::Blob.service.exist?(blobs.first.key)
assert_not ActiveStorage::Blob.service.exist?(blobs.second.key)
end
end
test "purging dependent attachment later on destroy" do
[ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ].tap do |blobs|
@user.highlights.attach blobs
perform_enqueued_jobs do
@user.destroy!
end
assert_not ActiveStorage::Blob.exists?(blobs.first.id)
assert_not ActiveStorage::Blob.exists?(blobs.second.id)
assert_not ActiveStorage::Blob.service.exist?(blobs.first.key)
assert_not ActiveStorage::Blob.service.exist?(blobs.second.key)
end
end
test "not purging independent attachment on destroy" do
[ create_blob(filename: "funky.mp4"), create_blob(filename: "town.mp4") ].tap do |blobs|
@user.vlogs.attach blobs
assert_no_enqueued_jobs do
@user.destroy!
end
end
end
test "clearing change on reload" do
@user.highlights = [ create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg") ]
assert @user.highlights.attached?
@user.reload
assert_not @user.highlights.attached?
end
test "overriding attached reader" do
@user.highlights.attach create_blob(filename: "funky.jpg"), create_blob(filename: "town.jpg")
assert_equal "funky.jpg", @user.highlights.first.filename.to_s
assert_equal "town.jpg", @user.highlights.second.filename.to_s
begin
User.class_eval do
def highlights
super.reverse
end
end
assert_equal "town.jpg", @user.highlights.first.filename.to_s
assert_equal "funky.jpg", @user.highlights.second.filename.to_s
ensure
User.send(:remove_method, :highlights)
end
end
end
| 42.253235 | 127 | 0.728422 |
7a75e63fe79937c0b35f1bde9b0b276015e3eedb | 2,644 | require "rails_helper"
class Redis
def initialize(config)
end
end
module OkComputer
describe RedisCheck do
let(:redis_config) do
{ url: "http://localhost:6379" }
end
subject { described_class.new(redis_config) }
it "is a subclass of Check" do
expect(subject).to be_a Check
end
describe "#new(redis_config)" do
it "requires a hash with redis configuration" do
expect { described_class.new }.to raise_error(ArgumentError)
end
it "stores the configuration" do
expect(described_class.new(redis_config).redis_config).to eq(redis_config)
end
end
describe "#redis" do
it "uses the redis_config" do
expect(Redis).to receive(:new).with(redis_config)
subject.redis
end
end
describe "#check" do
context "when the connection is successful" do
before do
allow(subject).to receive(:redis_info).and_return(redis_info)
end
let(:redis_info) do
{
"used_memory_human" => "1003.84K",
"uptime_in_seconds" => "272",
"connected_clients" => "2"
}
end
it { is_expected.to be_successful }
it { is_expected.to have_message "Connected to redis, 1003.84K used memory, uptime 272 secs, 2 connected client(s)" }
end
context "when the connection fails" do
let(:error_message) { "Error message" }
before do
allow(subject).to receive(:redis_info).and_raise(RedisCheck::ConnectionFailed, error_message)
end
it { is_expected.not_to be_successful }
it { is_expected.to have_message "Error: '#{error_message}'" }
end
end
describe "#redis_info" do
before do
allow(subject).to receive(:redis) { redis }
end
context "when the connection is successful" do
let(:redis) do
double("Redis", info: redis_info)
end
let(:redis_info) do
{
"used_memory_human" => "1003.84K",
"uptime_in_seconds" => "272",
"connected_clients" => "2"
}
end
it "returns a hash of the Redis INFO command" do
expect(subject.redis_info).to eq(redis_info)
end
end
context "when the connection fails" do
let(:redis) { double("Redis") }
before do
allow(redis).to receive(:info) { fail Errno::ECONNREFUSED }
end
it "raises a ConnectionFailed error" do
expect { subject.redis_info }.to raise_error(RedisCheck::ConnectionFailed)
end
end
end
end
end
| 25.669903 | 125 | 0.599092 |
e2e5287dd7f1e052a6512ef26a86a39e376a5097 | 92 | namespace :samples do
task single_id: :environment do
Samples.new.single_id
end
end
| 15.333333 | 33 | 0.75 |
e894db89188b13d6bf523204fcb405c1033e8226 | 5,874 | class Note < ApplicationRecord
class RevertError < Exception ; end
attribute :updater_id, :integer
attribute :updater_ip_addr, :inet
attr_accessor :html_id
belongs_to :post
belongs_to_creator
belongs_to_updater
has_many :versions, -> {order("note_versions.id ASC")}, :class_name => "NoteVersion", :dependent => :destroy
validates_presence_of :post_id, :creator_id, :updater_id, :x, :y, :width, :height, :body
validate :post_must_exist
validate :note_within_image
after_save :update_post
after_save :create_version
validate :post_must_not_be_note_locked
module SearchMethods
def active
where("is_active = TRUE")
end
def body_matches(query)
if query =~ /\*/ && CurrentUser.user.is_builder?
where("body ILIKE ? ESCAPE E'\\\\'", query.to_escaped_for_sql_like)
else
where("body_index @@ plainto_tsquery(E?)", query.to_escaped_for_tsquery_split)
end
end
def post_tags_match(query)
PostQueryBuilder.new(query).build(self.joins(:post)).reorder("")
end
def for_creator(user_id)
where("creator_id = ?", user_id)
end
def creator_name(name)
where("creator_id = (select _.id from users _ where lower(_.name) = ?)", name.mb_chars.downcase)
end
def search(params)
q = super
if params[:body_matches].present?
q = q.body_matches(params[:body_matches])
end
q = q.attribute_matches(:is_active, params[:is_active])
if params[:post_id].present?
q = q.where(post_id: params[:post_id].split(",").map(&:to_i))
end
if params[:post_tags_match].present?
q = q.post_tags_match(params[:post_tags_match])
end
if params[:creator_name].present?
q = q.creator_name(params[:creator_name].tr(" ", "_"))
end
if params[:creator_id].present?
q = q.where(creator_id: params[:creator_id].split(",").map(&:to_i))
end
q.apply_default_order(params)
end
end
module ApiMethods
def hidden_attributes
super + [:body_index]
end
def method_attributes
super + [:creator_name]
end
end
extend SearchMethods
include ApiMethods
def post_must_exist
if !Post.exists?(post_id)
errors.add :post, "must exist"
return false
end
end
def post_must_not_be_note_locked
if is_locked?
errors.add :post, "is note locked"
return false
end
end
def note_within_image
return false unless post.present?
if x < 0 || y < 0 || (x > post.image_width) || (y > post.image_height) || width < 0 || height < 0 || (x + width > post.image_width) || (y + height > post.image_height)
self.errors.add(:note, "must be inside the image")
return false
end
end
def is_locked?
Post.exists?(["id = ? AND is_note_locked = ?", post_id, true])
end
def rescale!(x_scale, y_scale)
self.x *= x_scale
self.y *= y_scale
self.width *= x_scale
self.height *= y_scale
save!
end
def update_post
if self.saved_changes?
if Note.where(:is_active => true, :post_id => post_id).exists?
execute_sql("UPDATE posts SET last_noted_at = ? WHERE id = ?", updated_at, post_id)
else
execute_sql("UPDATE posts SET last_noted_at = NULL WHERE id = ?", post_id)
end
end
end
def create_version
return unless saved_change_to_versioned_attributes?
if merge_version?
merge_version
else
Note.where(:id => id).update_all("version = coalesce(version, 0) + 1")
reload
create_new_version
end
end
def saved_change_to_versioned_attributes?
new_record? || saved_change_to_x? || saved_change_to_y? || saved_change_to_width? || saved_change_to_height? || saved_change_to_is_active? || saved_change_to_body?
end
def create_new_version
versions.create(
:updater_id => updater_id,
:updater_ip_addr => updater_ip_addr,
:post_id => post_id,
:x => x,
:y => y,
:width => width,
:height => height,
:is_active => is_active,
:body => body,
:version => version
)
end
def merge_version
prev = versions.last
prev.update_attributes(
:x => x,
:y => y,
:width => width,
:height => height,
:is_active => is_active,
:body => body
)
end
def merge_version?
prev = versions.last
prev && prev.updater_id == CurrentUser.user.id && prev.updated_at > 1.hour.ago && !saved_change_to_is_active?
end
def revert_to(version)
if id != version.note_id
raise RevertError.new("You cannot revert to a previous version of another note.")
end
self.x = version.x
self.y = version.y
self.post_id = version.post_id
self.body = version.body
self.width = version.width
self.height = version.height
self.is_active = version.is_active
self.updater_id = CurrentUser.id
self.updater_ip_addr = CurrentUser.ip_addr
end
def revert_to!(version)
revert_to(version)
save!
end
def copy_to(new_post)
new_note = dup
new_note.post_id = new_post.id
new_note.version = 0
width_ratio = new_post.image_width.to_f / post.image_width
height_ratio = new_post.image_height.to_f / post.image_height
new_note.x = x * width_ratio
new_note.y = y * height_ratio
new_note.width = width * width_ratio
new_note.height = height * height_ratio
new_note.save
end
def self.undo_changes_by_user(vandal_id)
transaction do
note_ids = NoteVersion.where(:updater_id => vandal_id).select("note_id").distinct.map(&:note_id)
NoteVersion.where(["updater_id = ?", vandal_id]).delete_all
note_ids.each do |note_id|
note = Note.find(note_id)
most_recent = note.versions.last
if most_recent
note.revert_to!(most_recent)
end
end
end
end
end
| 25.876652 | 171 | 0.652366 |
e9dd0b3702293a6b77c6481257bfbe3bc671706d | 386 | cask 'qgis' do
version '3.12.2'
sha256 'f7a61f85f4f278eb45d5a84483bea0ca57b344b4ed6f12d3d507927d7ce97234'
url 'https://qgis.org/downloads/macos/qgis-macos-pr.dmg'
appcast 'https://qgis.org/downloads/macos/qgis-macos-pr.sha256sum',
configuration: version.dots_to_underscores
name 'QGIS'
homepage 'https://www.qgis.org/'
app "QGIS#{version.major_minor}.app"
end
| 29.692308 | 75 | 0.746114 |
1c792ea6cdab67a7dee1dfcc3161653687630203 | 92 | class ProjectMember < ActiveRecord::Base
belongs_to :project
belongs_to :teammember
end
| 18.4 | 40 | 0.804348 |
e89e0b9e1bc99b77f5e3af86fae9f1a8cb218d2a | 5,337 | # Copyright 2020 Google, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require_relative "helper"
require_relative "../acls.rb"
describe "ACL Snippets" do
parallelize_me!
let :bucket do
create_bucket_helper "ruby_storage_sample_#{SecureRandom.hex}"
end
let(:storage_client) { Google::Cloud::Storage.new }
let(:local_file) { File.expand_path "data/file.txt", __dir__ }
let(:email) { "[email protected]" }
let(:remote_file_name) { "path/file_name.txt" }
after do
delete_bucket_helper bucket.name
end
describe "print_bucket_acl" do
it "prints a bucket's acl" do
bucket.acl.add_owner email
out, _err = capture_io do
print_bucket_acl bucket_name: bucket.name
end
assert_includes out, "ACL for #{bucket.name}:"
assert_includes out, "OWNER #{email}"
end
end
describe "print_bucket_acl_for_user" do
it "prints a bucket's acl for a user" do
bucket.acl.add_owner email
expected_output = <<~OUTPUT
Permissions for #{email}:
OWNER
OUTPUT
assert_output expected_output do
print_bucket_acl_for_user bucket_name: bucket.name,
email: email
end
end
end
describe "add_bucket_owner" do
it "adds owner permissions to a user for a bucket" do
assert_output "Added OWNER permission for #{email} to #{bucket.name}\n" do
add_bucket_owner bucket_name: bucket.name,
email: email
end
assert_includes bucket.acl.owners, email
end
end
describe "remove_bucket_acl" do
it "removes a user from a bucket's acl" do
bucket.acl.add_owner email
assert_output "Removed ACL permissions for #{email} from #{bucket.name}\n" do
remove_bucket_acl bucket_name: bucket.name,
email: email
end
refute_includes bucket.acl.owners, email
end
end
describe "add_bucket_default_owner" do
it "adds a user as default owner for a bucket" do
assert_output "Added default OWNER permission for #{email} to #{bucket.name}\n" do
add_bucket_default_owner bucket_name: bucket.name,
email: email
end
assert_includes bucket.default_acl.owners, email
end
end
describe "remove_bucket_default_acl" do
it "adds a user as default owner for a bucket" do
bucket.default_acl.add_owner email
assert_output "Removed default ACL permissions for #{email} from #{bucket.name}\n" do
remove_bucket_default_acl bucket_name: bucket.name,
email: email
end
refute_includes bucket.default_acl.owners, email
end
end
describe "print_file_acl" do
it "prints the acl for a file in a bucket" do
bucket.create_file local_file, remote_file_name
owners = bucket.file(remote_file_name).acl.owners
readers = bucket.file(remote_file_name).acl.readers
out, _err = capture_io do
print_file_acl bucket_name: bucket.name,
file_name: remote_file_name
end
assert owners.all? do |owner|
out.includes? "OWNER #{owner}"
end
assert readers.all? do |reader|
out.includes? "READER #{reader}"
end
end
end
describe "print_file_acl_for_user" do
it "prints the permissions of a given user for a file in a bucket" do
bucket.create_file local_file, remote_file_name
bucket.file(remote_file_name).acl.add_owner email
assert_output "Permissions for #{email}:\nOWNER\n" do
print_file_acl_for_user bucket_name: bucket.name,
file_name: remote_file_name,
email: email
end
end
end
describe "add_file_owner" do
it "adds a user as an owner of for a file in a bucket" do
bucket.create_file local_file, remote_file_name
assert_output "Added OWNER permission for #{email} to #{remote_file_name}\n" do
add_file_owner bucket_name: bucket.name,
file_name: remote_file_name,
email: email
end
assert_includes bucket.file(remote_file_name).acl.owners, email
end
end
describe "remove_file_acl" do
it "removes a user from the acl for file in a bucket" do
bucket.create_file local_file, remote_file_name
bucket.file(remote_file_name).acl.add_owner email
assert_output "Removed ACL permissions for #{email} from #{remote_file_name}\n" do
remove_file_acl bucket_name: bucket.name,
file_name: remote_file_name,
email: email
end
refute_includes bucket.file(remote_file_name).acl.owners, email
end
end
end
| 32.150602 | 91 | 0.654113 |
62113f5390d5c38f6c53f381c4c1eca99372b776 | 1,061 | # frozen_string_literal: true
module RuboCop
module Cop
module InSpecStyle
# @example EnforcedStyle: InSpecStyle (default)
# # linux_kernel_parameter has been deprecated as a resource. Use kernel_parameter instead
#
class LinuxKernelParameter < Cop
MSG = 'Use `kernel_parameter` instead of `linux_kernel_parameter`. '\
'This resource will be removed in InSpec 5.'
def_node_matcher :linux_kernel_parameter?, <<~PATTERN
(send nil? :linux_kernel_parameter ...)
PATTERN
def on_send(node)
return unless linux_kernel_parameter?(node)
add_offense(node, location: :selector)
end
def autocorrect(node)
lambda do |corrector|
corrector.replace(node.loc.selector, preferred_replacement)
end
end
private
def inside_spec?(root)
spec?(root)
end
def preferred_replacement
cop_config.fetch('PreferredReplacement')
end
end
end
end
end
| 25.261905 | 98 | 0.624882 |
87706b58586c1fed9ebb07835bd0e2e4944e8915 | 376 | require "bundler/setup"
require "omniauth-http-header"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 25.066667 | 66 | 0.755319 |
1154f1387adcec15630b2821f5cf8d8e66ee0093 | 151 | class AddRoleIdToUsers < ActiveRecord::Migration[5.0]
def change
add_column :users, :role_id, :integer
add_index :users, :role_id
end
end
| 21.571429 | 53 | 0.721854 |
d5772fcbcfc42ec33665687c8a150ac026f56391 | 6,470 | # frozen_string_literal: true
module Zoom
module Actions
module User
def user_list(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.permit(%i[status page_size role_id page_number include_fields next_page_token])
response = self.class.get('/users', query: params, headers: request_headers)
Utils.parse_response(response)
end
def user_create(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
require_param_keys = %i[action email type]
require_param_keys.append(:password) if params[:action] == 'autoCreate'
params.require(require_param_keys)
params.permit_value(:action, Zoom::Constants::User::CREATE_TYPES.keys)
Utils.parse_response self.class.post('/users', body: { action: params[:action], user_info: params.except(:action) }.to_json, headers: request_headers)
end
def user_get(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:id).permit(:login_type)
Utils.parse_response self.class.get("/users/#{params[:id]}", query: params.except(:id), headers: request_headers)
end
def user_update(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:id).permit(%i[first_name last_name type pmi timezone dept vanity_name host_key cms_user_id])
Utils.parse_response self.class.patch("/users/#{params[:id]}", body: params.except(:id).to_json, headers: request_headers)
end
def user_delete(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:id).permit(%i[action transfer_email transfer_meeting transfer_webinar transfer_recording])
Utils.parse_response self.class.delete("/users/#{params[:id]}", query: params.except(:id), headers: request_headers)
end
def user_assistants_list(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:user_id)
Utils.parse_response(self.class.get("/users/#{params[:user_id]}/assistants", query: params.except(:user_id), headers: request_headers))
end
def user_assistants_create(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:user_id).permit(:assistants)
Utils.parse_response self.class.post("/users/#{params[:user_id]}/assistants", body: params.except(:user_id).to_json, headers: request_headers)
end
def user_assistants_delete_all(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:user_id)
Utils.parse_response(self.class.delete("/users/#{params[:user_id]}/assistants", query: params.except(:user_id), headers: request_headers))
end
def user_assistants_delete(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(%i[user_id assistant_id])
Utils.parse_response(self.class.delete("/users/#{params[:user_id]}/assistants/#{params[:assistant_id]}", query: params.except(:user_id, :assistant_id), headers: request_headers))
end
def user_schedulers_list(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:user_id)
Utils.parse_response(self.class.get("/users/#{params[:user_id]}/schedulers", query: params.except(:user_id), headers: request_headers))
end
def user_schedulers_delete_all(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:user_id)
Utils.parse_response(self.class.delete("/users/#{params[:user_id]}/schedulers", query: params.except(:user_id), headers: request_headers))
end
def user_schedulers_delete(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(%i[user_id scheduler_id])
Utils.parse_response(self.class.delete("/users/#{params[:user_id]}/schedulers/#{params[:scheduler_id]}", query: params.except(:user_id, :scheduler_id), headers: request_headers))
end
def user_settings_get(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:id).permit(:login_type)
Utils.parse_response self.class.get("/users/#{params[:id]}/settings", query: params.except(:id), headers: request_headers)
end
def user_settings_update(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:id).permit(%i[schedule_meeting in_meeting email_notification recording telephony feature tsp])
Utils.parse_response self.class.patch("/users/#{params[:id]}/settings", body: params.except(:id).to_json, headers: request_headers)
end
def user_email_check(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:email)
Utils.parse_response(self.class.get('/users/email', query: params.slice(:email), headers: request_headers))
end
def user_recordings_list(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:id).permit(%i[page_size next_page_token mc trash from to trash_type])
Utils.parse_response self.class.get("/users/#{params[:id]}/recordings", query: params.except(:id), headers: request_headers)
end
def user_token(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:user_id).permit(%i[type ttl])
Utils.parse_response self.class.get("/users/#{params[:user_id]}/token", query: params.except(:user_id), headers: request_headers)
end
def user_permissions(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:user_id)
Utils.parse_response self.class.get("/users/#{params[:user_id]}/permissions", headers: request_headers)
end
def user_vanity_name(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:vanity_name)
Utils.parse_response self.class.get("/users/vanity_name", query: params.slice(:vanity_name), headers: request_headers)
end
def user_password_update(*args)
params = Zoom::Params.new(Utils.extract_options!(args))
params.require(:id).permit(%i[password])
Utils.parse_response self.class.patch("/users/#{params[:id]}/password", body: params.except(:id), headers: request_headers)
end
end
end
end
| 48.646617 | 186 | 0.689799 |
7a91c7d60f0d7ca6f5356a02de99c30fc2042cab | 44 | module MethodSource
VERSION = "0.8.2"
end
| 11 | 19 | 0.704545 |
1dfe84d520a73f3b75c2450f041a6328bf9ec716 | 1,508 | require 'rails_helper'
RSpec.describe InvoiceValidator do
subject { validatable_object }
let(:validatable_object) do
validatable.new(subtotal: subtotal, invoice_lines: invoice_lines)
end
let(:validatable) do
Class.new do
include ActiveModel::Model
attr_accessor :invoice_lines, :subtotal
validates_with InvoiceValidator
def self.model_name
ActiveModel::Name.new(self, nil, 'validatable')
end
end
end
let(:invoice_lines) do
[
{ variant_id: 1, price: 10, quantity: 2 },
{ variant_id: 2, price: 12, quantity: 1 }
]
end
let(:subtotal) { 32 }
describe 'invoice subtotal' do
context 'with correct subtotal' do
it { is_expected.to be_valid }
end
context 'with incorrect subtotal' do
let(:subtotal) { 10 }
it { is_expected.to be_invalid }
end
context 'without subtotal' do
let(:subtotal) { nil }
it { is_expected.to be_invalid }
end
end
describe 'invoice lines\' attributes' do
context 'with complete attributes' do
it { is_expected.to be_valid }
end
context 'with missing attributes' do
let(:invoice_lines) do
[
{ variant_id: 1, quantity: 2 }, # 1 missing
{ variant_id: 2, price: 12 }, # 1 missing
{} # 3 missing
]
end
before { validatable_object.validate }
it { is_expected.to be_invalid }
it { expect(validatable_object.errors.count).to eq 5 }
end
end
end
| 22.848485 | 69 | 0.62931 |
f7f05aaf4a409419867e15bec8231cd2f3c4aa5c | 585 | # frozen_string_literal: true
module PriceHubble
# The nested PriceHubble property condition object.
#
# @see https://docs.pricehubble.com/#types-property
class PropertyConditions < BaseEntity
# Mapped and tracked attributes
tracked_attr :bathrooms, :kitchen, :flooring, :windows, :masonry
# Define attribute types for casting
with_options(values: Property::CONDITIONS) do
typed_attr :bathrooms, :enum
typed_attr :kitchen, :enum
typed_attr :flooring, :enum
typed_attr :windows, :enum
typed_attr :masonry, :enum
end
end
end
| 27.857143 | 68 | 0.71453 |
0374e9fc93834b270f613c5479e9da29cd897c73 | 73 | require "syncbot/version"
module Syncbot
# Your code goes here...
end
| 12.166667 | 26 | 0.726027 |
e9410255b421c516649b743bda3ae53674bab45e | 5,816 | # frozen_string_literal: true
require("test_helper")
module Bootsnap
class KernelTest < Minitest::Test
include TmpdirHelper
def test_require_symlinked_file_twice
setup_symlinked_files
if RUBY_VERSION >= "3.1"
# Fixed in https://github.com/ruby/ruby/commit/79a4484a072e9769b603e7b4fbdb15b1d7eccb15 (Ruby 3.1)
assert_both_pass(<<~RUBY)
require "symlink/test"
require "real/test"
RUBY
else
assert_both_pass(<<~RUBY)
require "symlink/test"
begin
require "real/test"
rescue RuntimeError
exit 0
else
exit 1
end
RUBY
end
end
def test_require_symlinked_file_twice_aliased
setup_symlinked_files
assert_both_pass(<<~RUBY)
$LOAD_PATH.unshift(File.expand_path("symlink"))
require "test"
$LOAD_PATH.unshift(File.expand_path("a"))
require "test"
RUBY
end
def test_require_relative_symlinked_file_twice
setup_symlinked_files
if RUBY_VERSION >= "3.1"
# Fixed in https://github.com/ruby/ruby/commit/79a4484a072e9769b603e7b4fbdb15b1d7eccb15 (Ruby 3.1)
assert_both_pass(<<~RUBY)
require_relative "symlink/test"
require_relative "real/test"
RUBY
else
assert_both_pass(<<~RUBY)
require_relative "symlink/test"
begin
require_relative "real/test"
rescue RuntimeError
exit 0
else
exit 1
end
RUBY
end
end
def test_require_and_then_require_relative_symlinked_file
setup_symlinked_files
assert_both_pass(<<~RUBY)
$LOAD_PATH.unshift(File.expand_path("symlink"))
require "test"
require_relative "real/test"
RUBY
end
def test_require_relative_and_then_require_symlinked_file
setup_symlinked_files
assert_both_pass(<<~RUBY)
require_relative "real/test"
$LOAD_PATH.unshift(File.expand_path("symlink"))
require "test"
RUBY
end
def test_require_deep_symlinked_file_twice
setup_symlinked_files
if RUBY_VERSION >= "3.1"
# Fixed in https://github.com/ruby/ruby/commit/79a4484a072e9769b603e7b4fbdb15b1d7eccb15 (Ruby 3.1)
assert_both_pass(<<~RUBY)
require "symlink/dir/deep"
require "real/dir/deep"
RUBY
else
assert_both_pass(<<~RUBY)
require "symlink/dir/deep"
begin
require "real/dir/deep"
rescue RuntimeError
exit 0
else
exit 1
end
RUBY
end
end
def test_require_deep_symlinked_file_twice_aliased
setup_symlinked_files
assert_both_pass(<<~RUBY)
$LOAD_PATH.unshift(File.expand_path("symlink"))
require "dir/deep"
$LOAD_PATH.unshift(File.expand_path("a"))
require "dir/deep"
RUBY
end
def test_require_relative_deep_symlinked_file_twice
setup_symlinked_files
if RUBY_VERSION >= "3.1"
# Fixed in https://github.com/ruby/ruby/commit/79a4484a072e9769b603e7b4fbdb15b1d7eccb15 (Ruby 3.1)
assert_both_pass(<<~RUBY)
require_relative "symlink/dir/deep"
require_relative "real/dir/deep"
RUBY
else
assert_both_pass(<<~RUBY)
require_relative "symlink/dir/deep"
begin
require_relative "real/dir/deep"
rescue RuntimeError
exit 0
else
exit 1
end
RUBY
end
end
def test_require_and_then_require_relative_deep_symlinked_file
setup_symlinked_files
assert_both_pass(<<~RUBY)
$LOAD_PATH.unshift(File.expand_path("symlink"))
require "dir/deep"
require_relative "real/dir/deep"
RUBY
end
def test_require_relative_and_then_require_deep_symlinked_file
setup_symlinked_files
assert_both_pass(<<~RUBY)
require_relative "real/dir/deep"
$LOAD_PATH.unshift(File.expand_path("symlink"))
require "dir/deep"
RUBY
end
private
def assert_both_pass(source)
Help.set_file("without_bootsnap.rb", source)
unless execute("without_bootsnap.rb", "debug.txt")
flunk "expected snippet to pass WITHOUT bootsnap enabled:\n#{debug_output}"
end
Help.set_file("with_bootsnap.rb", %{require "bootsnap/setup"\n#{source}})
unless execute("with_bootsnap.rb", "debug.txt")
flunk "expected snippet to pass WITH bootsnap enabled:\n#{debug_output}"
end
end
def debug_output
File.read("debug.txt")
rescue Errno::ENOENT
end
def execute(script_path, output_path)
system(
{"BOOTSNAP_CACHE_DIR" => "tmp/cache"},
RbConfig.ruby, "-I.", script_path,
out: output_path, err: output_path
)
end
def assert_successful(source)
Help.set_file("test_case.rb", source)
Help.set_file("test_case.rb", %{require "bootsnap/setup"\n#{source}})
assert system({"BOOTSNAP_CACHE_DIR" => "tmp/cache"}, RbConfig.ruby, "-I.", "test_case.rb")
end
def setup_symlinked_files
skip("Platform doesn't support symlinks") unless File.respond_to?(:symlink)
Help.set_file("real/test.rb", <<-RUBY)
if $test_already_required
raise "test.rb required more than once"
else
$test_already_required = true
end
RUBY
Help.set_file("real/dir/deep.rb", <<-RUBY)
if $deep_already_required
raise "deep.rb required more than once"
else
$deep_already_required = true
end
RUBY
File.symlink("real", "symlink")
end
end
end
| 27.051163 | 106 | 0.622937 |
1cdf1ee68fdd9a43383841baeccca91b0cb5a24d | 4,711 | require 'dotenv'
Dotenv.load
require 'logger'
require 'date'
require 'timezone'
require 'multi_json'
require 'restless_router'
require 'core_nutrition/client/version'
require 'core_nutrition/errors'
require 'core_nutrition/logger'
require 'core_nutrition/configuration'
require 'core_nutrition/connection'
require 'core_nutrition/response'
require 'core_nutrition/utilities'
require "core_nutrition/requests"
require "core_nutrition/models"
require "core_nutrition/serializers"
module CoreNutrition
module Client
# Class accessor methods to be utilized
# throughout the gem itself.
class << self
attr_accessor :configuration
attr_accessor :routes
end
# Create a default Configuration to use
# throughout the gem
#
# @return [CoreNutrition::Configuration] Configuration object utilizing the Default
def self.configuration
@configuration ||= CoreNutrition::Configuration.new
end
# Specify configuration options. This will be applied to
# our memoized Configuration.
#
# @return [CoreNutrition::Configuration]
def self.configure
yield(self.configuration)
end
# Helper method to access the Connection object
#
# @return [CoreNutrition::Connection] Faraday Response Delegator
def self.connection
@connection ||= CoreNutrition::Connection.new(url: self.configuration.api_host) do |builder|
builder.response(:json, content_type: /\bjson/)
builder.response(:logger, self.configuration.request_logger)
builder.adapter(CoreNutrition::Connection.default_adapter)
end
# Inject Authorization
@connection.headers['Authorization'] = ("Bearer %s" % [self.configuration.access_token])
# Merge default headers
@connection.headers.merge!(self.configuration.connection_options[:headers])
@connection
end
# Helper method to peform a GET request
#
# @return [CoreNutrition::Response] Faraday Response Delegator
def self.get(url, data={}, headers={})
request = self.connection.get(url, data, headers)
CoreNutrition::Response.new(request)
end
# Helper method to perform a HEAD request
#
# @return [CoreNutrition::Response] Faraday Response Delegator
def self.head(url, data={}, headers={})
request = self.connection.head(url, data, headers)
CoreNutrition::Response.new(request)
end
# Helper method to perform a OPTIONS request
#
# @return [CoreNutrition::Response] Faraday Response Delegator
def self.options(url, headers={})
request = self.connection.http_options(url, nil, headers)
CoreNutrition::Response.new(request)
end
# Helper method to perform a PUT request
#
# @return [CoreNutrition::Response] Faraday Response Delegator
def self.put(url, data={}, headers={})
request = self.connection.put(url, data, headers)
CoreNutrition::Response.new(request)
end
# Helper method to perform a POST request
#
# @return [CoreNutrition::Response] Faraday Response Delegator
def self.post(url, data={}, headers={})
request = self.connection.post(url, data, headers)
CoreNutrition::Response.new(request)
end
def self.delete(url, data={}, headers={})
request = self.connection.delete(url, data, headers)
CoreNutrition::Response.new(request)
end
# Returns the root directory response
#
# @return [CoreNutrition::Models::Directory]
def self.directory
@directory ||= CoreNutrition::Models::Directory.retrieve
end
# Define the API routes
#
# These are the endpoints that will be used to interact
# with the API. Before you make any requests you will
# want to add the corresponding routes here.
#
# @return [RestlessRouter::Routes] A collection of Routes
def self.routes
return @routes if @routes
@routes = self.directory.links
@routes
end
# Returns the link relationship for
# a specified path.
#
# Custom link relationships are fully qualified
# URIS, but in this case we only care to reference
# the path and add the API host.
#
# @return [String]
def self.rel_for(rel)
"%s/%s" % [self.api_host, rel]
end
# Helper method to return the API HOST
#
# @return [String] API URI
def self.api_host
self.configuration.api_host
end
# Returns the client id ENV
#
# @Return [String,NilClass]
def self.client_id
ENV['CORE_CLIENT_ID']
end
# Returns the client secret ENV
#
# @return [String,NilClass]
def self.client_secret
ENV['CORE_CLIENT_SECRET']
end
end
end
| 26.615819 | 98 | 0.684143 |
1cbefb80be7894b8e20ce19758822305b52c2d28 | 2,000 | class Suil < Formula
desc "Lightweight C library for loading and wrapping LV2 plugin UIs"
homepage "https://drobilla.net/software/suil.html"
url "https://download.drobilla.net/suil-0.10.10.tar.bz2"
sha256 "750f08e6b7dc941a5e694c484aab02f69af5aa90edcc9fb2ffb4fb45f1574bfb"
license "ISC"
revision 1
head "https://gitlab.com/lv2/suil.git", branch: "master"
livecheck do
url "https://download.drobilla.net/"
regex(/href=.*?suil[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 arm64_monterey: "4b952357f77ca23c77da7b02bd5b95da858d74e33378272a7bf7c63e759fb0af"
sha256 arm64_big_sur: "11af96a8cd470b08da0bd49cb3b620ae81d89e9589c5ed44a533e2cb93d5133f"
sha256 big_sur: "02a8eed42b15c099954dce4741c71b0e5f9ae652fce48921e4920a3efc779e01"
sha256 catalina: "4a74f4c1cbf9b1e67c7fbda45e5ca67b5163757b70ee62c33a7e66b136a2d4c1"
sha256 mojave: "2bc87e39cf2cb0a66c983c01834d39c2f1cccdddbe4db28331e0dcb6cf64c3fb"
end
depends_on "pkg-config" => :build
depends_on "gtk+"
depends_on "gtk+3"
depends_on "lv2"
depends_on "qt@5"
# Disable qt5_in_gtk3 because it depends upon X11
# Can be removed if https://gitlab.com/lv2/suil/-/merge_requests/1 is merged
patch do
url "https://gitlab.com/lv2/suil/-/commit/33ea47e18ddc1eb384e75622c0e75164d351f2c0.diff"
sha256 "2f335107e26c503460965953f94410e458c5e8dd86a89ce039f65c4e3ae16ba7"
end
def install
ENV.cxx11
system "./waf", "configure", "--prefix=#{prefix}", "--no-x11",
"--gtk2-lib-name=#{shared_library("libgtk-quartz-2.0.0")}", "--gtk3-lib-name=#{shared_library("libgtk-3.0")}"
system "./waf", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <suil/suil.h>
int main()
{
return suil_ui_supported("my-host", "my-ui");
}
EOS
lv2 = Formula["lv2"].opt_include
system ENV.cc, "-I#{lv2}", "-I#{include}/suil-0", "-L#{lib}", "-lsuil-0", "test.c", "-o", "test"
system "./test"
end
end
| 35.087719 | 117 | 0.7005 |
798def4deee03814e56ce5c00b3839468a93c3cd | 54,952 | Vmdb::Application.routes.draw do
# grouped routes
adv_search_post = %w(
adv_search_button
adv_search_clear
adv_search_load_choice
adv_search_name_typed
adv_search_toggle
)
button_post = %w(
button_create
button_update
)
compare_get = %w(
compare_miq
compare_to_csv
compare_to_pdf
compare_to_txt
)
compare_post = %w(
compare_choose_base
compare_compress
compare_miq
compare_miq_all
compare_miq_differences
compare_miq_same
compare_mode
compare_remove
compare_set_state
)
dialog_runner_post = %w(
dialog_field_changed
dialog_form_button_pressed
dynamic_checkbox_refresh
dynamic_date_refresh
dynamic_radio_button_refresh
dynamic_text_box_refresh
)
discover_get_post = %w(
discover
discover_field_changed
)
drift_get = %w(
drift
drift_history
drift_to_csv
drift_to_pdf
drift_to_txt
)
drift_post = %w(
drift_all
drift_compress
drift_differences
drift_history
drift_mode
drift_same
)
exp_post = %w(
exp_button
exp_changed
exp_token_pressed
)
evm_relationship_post = %w(
evm_relationship_field_changed
evm_relationship_update
)
ownership_post = %w(
ownership
ownership_field_changed
ownership_update
)
perf_post = %w(
perf_chart_chooser
perf_top_chart
)
policy_post = %w(
policy_options
policy_show_options
policy_sim
policy_sim_add
policy_sim_remove
)
pre_prov_post = %w(
pre_prov
pre_prov_continue
)
save_post = %w(
save_default_search
)
snap_post = %w(
snap_pressed
snap_vm
)
x_post = %w(
x_button
x_history
x_search_by_name
x_settings_changed
x_show
)
controller_routes = {
:alert => {
:get => %w(
index
rss
show_list
),
:post => %w(
role_selected
start_rss
),
},
:auth_key_pair_cloud => {
:get => %w(
download_data
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
) + compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
show
show_list
tagging_edit
tag_edit_form_field_changed
) + adv_search_post + compare_post + exp_post + save_post
},
:availability_zone => {
:get => %w(
download_data
index
perf_top_chart
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
panel_control
quick_search
sections_field_changed
show
show_list
tagging_edit
tag_edit_form_field_changed
tl_chooser
wait_for_task
) + adv_search_post + compare_post + exp_post + perf_post
},
:catalog => {
:get => %w(
download_data
explorer
ot_edit
ot_show
show
),
:post => %w(
ab_group_reorder
accordion_select
ae_tree_select
ae_tree_select_discard
ae_tree_select_toggle
atomic_form_field_changed
atomic_st_edit
automate_button_field_changed
explorer
get_ae_tree_edit_key
group_create
group_form_field_changed
group_reorder_field_changed
group_update
identify_catalog
orchestration_template_add
orchestration_template_copy
orchestration_template_edit
ot_add_form_field_changed
ot_add_submit
ot_copy_submit
ot_edit_submit
ot_form_field_changed
ot_tags_edit
process_sts
prov_field_changed
reload
resolve
resource_delete
service_dialog_from_ot_submit
servicetemplate_edit
sort_ds_grid
sort_host_grid
sort_iso_img_grid
sort_pxe_img_grid
sort_vc_grid
sort_vm_grid
st_catalog_edit
st_catalog_form_field_changed
st_delete
st_edit
st_form_field_changed
st_tags_edit
st_upload_image
svc_catalog_provision
tag_edit_form_field_changed
tree_autoload_dynatree
tree_select
x_button
x_history
x_settings_changed
x_show
) +
button_post +
dialog_runner_post
},
:chargeback => {
:get => %w(
explorer
index
render_csv
render_pdf
render_txt
report_only
),
:post => %w(
accordion_select
explorer
cb_assign_field_changed
cb_assign_update
cb_rate_edit
cb_rate_form_field_changed
cb_rate_show
cb_rates_delete
cb_rates_list
cb_tier_add
cb_tier_remove
saved_report_paging
tree_autoload_dynatree
tree_select
x_button
x_show
)
},
:cloud_object_store_container => {
:get => %w(
download_data
index
show
show_list
tagging_edit
tag_edit_form_field_changed
) + compare_get,
:post => %w(
button
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
show
show_list
tagging_edit
tag_edit_form_field_changed
) + compare_post + adv_search_post + exp_post + save_post
},
:cloud_tenant => {
:get => %w(
download_data
edit
index
protect
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
protect
sections_field_changed
show
show_list
tagging_edit
tag_edit_form_field_changed
update
panel_control
) +
compare_post
},
:cloud_object_store_object => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
) + compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
show
show_list
tagging_edit
tag_edit_form_field_changed
update
) + compare_post + adv_search_post + exp_post + save_post
},
:cloud_volume => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
) + compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
show
show_list
tagging_edit
tag_edit_form_field_changed
update
) + compare_post + adv_search_post + exp_post + save_post
},
:cloud_volume_snapshot => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
) + compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
show
show_list
tagging_edit
tag_edit_form_field_changed
update
) + compare_post + adv_search_post + exp_post + save_post
},
:configuration => {
# TODO: routes for new/edit/copy buttons need to be revisited
# TODO: so they can be changed to send up POST request instead of GET
:get => %w(
change_tab
index
show
timeprofile_copy
timeprofile_edit
timeprofile_new
),
:post => %w(
button
filters_field_changed
form_field_changed
theme_changed
timeprofile_create
timeprofile_delete
timeprofile_field_changed
timeprofile_update
update
view_selected
)
},
:container => {
:get => %w(
download_data
explorer
perf_top_chart
show
tl_chooser
wait_for_task
show_list
tagging_edit
tag_edit_form_field_changed
),
:post => %w(
accordion_select
button
container_edit
container_form_field_changed
explorer
tl_chooser
wait_for_task
quick_search
reload
tree_autoload_dynatree
tree_select
container_tag
tag_edit_form_field_changed
) +
adv_search_post +
exp_post +
perf_post +
save_post +
x_post
},
:container_group => {
:get => %w(
download_data
edit
index
new
perf_top_chart
show
show_list
tagging_edit
tag_edit_form_field_changed
),
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
tl_chooser
update
wait_for_task
tagging_edit
tag_edit_form_field_changed
) +
adv_search_post +
exp_post +
perf_post +
save_post
},
:container_node => {
:get => %w(
download_data
edit
index
new
perf_top_chart
show
show_list
tagging_edit
tag_edit_form_field_changed
),
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
tl_chooser
update
wait_for_task
tagging_edit
tag_edit_form_field_changed
) +
adv_search_post +
exp_post +
perf_post +
save_post
},
:container_replicator => {
:get => %w(
download_data
edit
index
new
perf_top_chart
show
show_list
tagging_edit
tag_edit_form_field_changed
),
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
tl_chooser
update
wait_for_task
tagging_edit
tag_edit_form_field_changed
) +
adv_search_post +
exp_post +
perf_post +
save_post
},
:container_image => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
guest_applications
openscap_rule_results
openscap_html
protect
squash_toggle
),
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
update
tagging_edit
tag_edit_form_field_changed
guest_applications
openscap_rule_results
protect
squash_toggle
) + adv_search_post + exp_post + save_post
},
:container_image_registry => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
),
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
update
tagging_edit
tag_edit_form_field_changed
) + adv_search_post + exp_post + save_post
},
:container_service => {
:get => %w(
download_data
edit
index
new
perf_top_chart
show
show_list
tagging_edit
tag_edit_form_field_changed
),
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
update
wait_for_task
tagging_edit
tag_edit_form_field_changed
) +
adv_search_post +
exp_post +
perf_post +
save_post
},
:container_project => {
:get => %w(
download_data
edit
index
new
perf_top_chart
show
show_list
tagging_edit
tag_edit_form_field_changed
),
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
tl_chooser
update
wait_for_task
tagging_edit
tag_edit_form_field_changed
) +
adv_search_post +
exp_post +
perf_post +
save_post
},
:container_route => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
),
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
update
tagging_edit
tag_edit_form_field_changed
) + adv_search_post + exp_post + save_post
},
:persistent_volume => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
),
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
update
tagging_edit
tag_edit_form_field_changed
) + adv_search_post + exp_post + save_post
},
:container_build => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
),
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
update
tagging_edit
tag_edit_form_field_changed
) + adv_search_post + exp_post + save_post
},
:container_topology => {
:get => %w(
show
data
)
},
:middleware_topology => {
:get => %w(
show
data
)
},
:container_dashboard => {
:get => %w(
show
data
)
},
:dashboard => {
:get => %w(
auth_error
iframe
change_tab
index
login
logout
saml_login
maintab
render_csv
render_pdf
render_txt
render_chart
report_only
show
timeline
timeline_data
widget_to_pdf
),
:post => %w(
kerberos_authenticate
initiate_saml_login
authenticate
change_group
csp_report
timeline_data
login_retry
panel_control
reset_widgets
resize_layout
show_timeline
tl_generate
wait_for_task
widget_add
widget_close
widget_dd_done
widget_toggle_minmax
widget_zoom
window_sizes
)
},
:ems_cloud => {
:get => %w(
dialog_load
discover
download_data
ems_cloud_form_fields
protect
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
dynamic_list_refresh
dynamic_radio_button_refresh
dynamic_text_box_refresh
form_field_changed
listnav_search_selected
panel_control
protect
provider_type_field_changed
quick_search
sections_field_changed
show
show_list
tag_edit_form_field_changed
tagging_edit
tl_chooser
update
wait_for_task
) +
adv_search_post +
compare_post +
dialog_runner_post +
discover_get_post +
exp_post +
save_post
},
:ems_cluster => {
:get => %w(
columns_json
dialog_load
download_data
index
perf_top_chart
protect
rows_json
show
show_list
tagging_edit
) +
compare_get +
drift_get,
:post => %w(
button
listnav_search_selected
panel_control
protect
quick_search
sections_field_changed
show
show_list
tag_edit_form_field_changed
tagging_edit
tl_chooser
tree_autoload_dynatree
tree_autoload_quads
wait_for_task
) +
adv_search_post +
compare_post +
dialog_runner_post +
drift_post +
exp_post +
perf_post +
save_post
},
:ems_infra => {
:get => %w(
dialog_load
discover
download_data
edit
index
new
protect
show
show_list
tagging_edit
scaling
) +
compare_get,
:post => %w(
button
create
form_field_changed
listnav_search_selected
panel_control
protect
quick_search
sections_field_changed
show
show_list
tag_edit_form_field_changed
tagging_edit
tl_chooser
tree_autoload_dynatree
tree_autoload_quads
update
wait_for_task
scaling
) +
adv_search_post +
compare_post +
dialog_runner_post +
discover_get_post +
exp_post +
save_post
},
:ems_container => {
:get => %w(
download_data
edit
index
new
perf_top_chart
protect
show
show_list
tagging_edit
tag_edit_form_field_changed
) +
compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
protect
quick_search
sections_field_changed
show
show_list
tl_chooser
update
wait_for_task
tagging_edit
tag_edit_form_field_changed
) +
adv_search_post +
compare_post +
exp_post +
perf_post +
save_post
},
:ems_middleware => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
) +
compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
tl_chooser
update
wait_for_task
tagging_edit
tag_edit_form_field_changed
) +
adv_search_post +
compare_post +
exp_post +
save_post
},
:middleware_server => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
) +
compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
tl_chooser
update
wait_for_task
tagging_edit
tag_edit_form_field_changed
) +
adv_search_post +
compare_post +
exp_post +
save_post
},
:middleware_deployment => {
:get => %w(
download_data
edit
index
new
show
show_list
tagging_edit
tag_edit_form_field_changed
) +
compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
form_field_changed
listnav_search_selected
panel_control
quick_search
sections_field_changed
show
show_list
tl_chooser
update
wait_for_task
tagging_edit
tag_edit_form_field_changed
) +
adv_search_post +
compare_post +
exp_post +
save_post
},
:ems_network => {
:get => %w(
dialog_load
download_data
edit
ems_network_form_fields
index
new
protect
show
show_list
tagging_edit
tag_edit_form_field_changed
) +
compare_get,
:post => %w(
button
create
dynamic_checkbox_refresh
dynamic_list_refresh
dynamic_radio_button_refresh
dynamic_text_box_refresh
form_field_changed
listnav_search_selected
panel_control
protect
provider_type_field_changed
quick_search
sections_field_changed
show
show_list
tag_edit_form_field_changed
tagging_edit
tl_chooser
update
wait_for_task
) +
adv_search_post +
compare_post +
dialog_runner_post +
exp_post +
save_post
},
:security_group => {
:get => %w(
download_data
index
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
quick_search
panel_control
show
show_list
tag_edit_form_field_changed
tagging_edit
) +
adv_search_post +
compare_post +
exp_post
},
:floating_ip => {
:get => %w(
download_data
index
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
quick_search
panel_control
show
show_list
tag_edit_form_field_changed
tagging_edit
) +
adv_search_post +
compare_post +
exp_post
},
:cloud_subnet => {
:get => %w(
download_data
index
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
quick_search
panel_control
show
show_list
tag_edit_form_field_changed
tagging_edit
) +
adv_search_post +
compare_post +
exp_post
},
:network_router => {
:get => %w(
download_data
index
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
quick_search
panel_control
show
show_list
tag_edit_form_field_changed
tagging_edit
) +
adv_search_post +
compare_post +
exp_post
},
:flavor => {
# FIXME: Change tagging_edit to POST only; We need to remove the redirects
# in app/controllers/application_controller/tags.rb#tag that are used in
# a role of a method call.
# Then remove this route from all other controllers too.
:get => %w(
download_data
index
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
quick_search
panel_control
sections_field_changed
show
show_list
tag_edit_form_field_changed
tagging_edit
) +
adv_search_post +
compare_post +
exp_post
},
:host => {
:get => %w(
advanced_settings
dialog_load
download_data
edit
filesystem_download
filesystems
firewall_rules
timeline_data
groups
guest_applications
host_form_fields
host_services
index
list
new
patches
perf_top_chart
protect
show
show_association
show_details
show_list
start
tagging_edit
users
) +
compare_get +
discover_get_post +
drift_get,
:post => %w(
advanced_settings
button
create
drift_all
drift_compress
drift_differences
drift_mode
drift_same
filesystems
firewall_rules
firewallrules
form_field_changed
groups
guest_applications
host_services
listnav_search_selected
quick_search
panel_control
patches
protect
sections_field_changed
show
show_list
squash_toggle
tag_edit_form_field_changed
tagging_edit
tl_chooser
update
users
wait_for_task
) +
adv_search_post +
compare_post +
dialog_runner_post +
discover_get_post +
exp_post +
perf_post +
save_post
},
:miq_ae_class => {
:get => %w(
explorer
),
:post => %w(
ae_tree_select
ae_tree_select_toggle
change_tab
copy_objects
create
create_instance
create_method
create_ns
domains_priority_edit
explorer
expand_toggle
field_accept
field_delete
field_method_accept
field_method_delete
field_method_select
field_select
fields_form_field_changed
fields_seq_edit
fields_seq_field_changed
form_copy_objects_field_changed
form_field_changed
form_instance_field_changed
form_method_field_changed
form_ns_field_changed
priority_form_field_changed
reload
tree_select
tree_autoload_dynatree
update
update_fields
update_instance
update_method
update_ns
validate_method_data
x_button
x_history
x_settings_changed
x_show
)
},
:miq_ae_customization => {
:get => %w(
dialog_accordion_json
explorer
export_service_dialogs
review_import
service_dialog_json
),
:post => %w(
ab_group_reorder
ae_tree_select
ae_tree_select_toggle
accordion_select
automate_button_field_changed
cancel_import
change_tab
dialog_edit
dialog_form_field_changed
dialog_list
dialog_res_remove
dialog_res_reorder
explorer
field_value_accept
field_value_delete
field_value_select
group_create
group_form_field_changed
group_reorder_field_changed
group_update
import_service_dialogs
old_dialogs_form_field_changed
old_dialogs_list
old_dialogs_update
reload
resolve
tree_autoload_dynatree
tree_select
upload_import_file
x_button
x_history
x_settings_changed
x_show
) +
button_post
},
:miq_ae_tools => {
:get => %w(
automate_json
export_datastore
fetch_log
import_export
log
resolve
review_import
),
:post => %w(
button
cancel_import
form_field_changed
import_automate_datastore
reset_datastore
resolve
upload
upload_import_file
wait_for_task
)
},
:miq_capacity => {
:get => %w(
bottlenecks
timeline_data
index
planning
planning_report_download
util_report_download
utilization
),
:post => %w(
bottleneck_tl_chooser
change_tab
optimize_tree_select
planning
planning_option_changed
tree_autoload_dynatree
util_chart_chooser
wait_for_task
)
},
:miq_policy => {
:get => %w(
explorer
export
fetch_log
fetch_yaml
get_json
import
index
log
rsop
),
:post => %w(
accordion_select
action_edit
action_field_changed
action_get_all
action_tag_pressed
alert_delete
alert_edit
alert_field_changed
alert_get_all
alert_profile_assign
alert_profile_assign_changed
alert_profile_delete
alert_profile_edit
alert_profile_field_changed
button
condition_edit
condition_field_changed
event_edit
export
export_field_changed
import
panel_control
policy_edit
policy_get_all
policy_field_changed
profile_edit
profile_field_changed
quick_search
reload
rsop
rsop_option_changed
rsop_show_options
rsop_toggle
tree_autoload_dynatree
tree_select
upload
wait_for_task
) +
adv_search_post +
exp_post +
x_post
},
:miq_request => {
# FIXME: Change stamp to POST only; We need to remove the redirect
:get => %w(
index
post_install_callback
pre_prov
prov_copy
prov_edit
show
show_list
stamp
),
:post => %w(
button
post_install_callback
pre_prov
prov_button
prov_change_options
prov_continue
prov_edit
prov_field_changed
prov_load_tab
prov_show_option
request_copy
request_edit
retrieve_email
show_list
sort_configured_system_grid
sort_ds_grid
sort_host_grid
sort_iso_img_grid
sort_pxe_img_grid
sort_template_grid
sort_vc_grid
sort_vm_grid
sort_windows_image_grid
stamp
stamp_field_changed
vm_pre_prov
upload
) +
dialog_runner_post
},
:miq_task => {
:get => %w(
change_tab
index
jobs
tasks_show_option
),
:post => %w(
button
jobs
tasks_button
tasks_change_options
)
},
:miq_template => {
:get => %w(
edit
show
ownership
),
:post => %w(
edit
edit_vm
form_field_changed
show
) +
ownership_post
},
:ontap_file_share => {
:get => %w(
cim_base_storage_extents
create_ds
download_data
index
protect
show
show_list
snia_local_file_systems
tagging_edit
) +
compare_get,
:post => %w(
button
create_ds
create_ds_field_changed
panel_control
protect
quick_search
sections_field_changed
show
show_list
tag_edit_form_field_changed
tagging_edit
) +
adv_search_post +
compare_post +
exp_post
},
:ontap_logical_disk => {
:get => %w(
cim_base_storage_extents
download_data
index
protect
show
show_list
snia_local_file_systems
tagging_edit
) +
compare_get,
:post => %w(
button
panel_control
perf_chart_chooser
protect
quick_search
sections_field_changed
show
show_list
tag_edit_form_field_changed
tagging_edit
wait_for_task
) +
adv_search_post +
compare_post +
exp_post
},
:ontap_storage_system => {
:get => %w(
cim_base_storage_extents
create_ld
download_data
index
protect
show
show_list
snia_local_file_systems
tagging_edit
) +
compare_get,
:post => %w(
button
create_ld
create_ld_field_changed
panel_control
protect
quick_search
sections_field_changed
show
show_list
tag_edit_form_field_changed
tagging_edit
) +
adv_search_post +
compare_post +
exp_post
},
:ontap_storage_volume => {
:get => %w(
cim_base_storage_extents
download_data
index
protect
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
panel_control
protect
quick_search
sections_field_changed
show
show_list
tag_edit_form_field_changed
tagging_edit
) +
adv_search_post +
compare_post +
exp_post
},
:ops => {
:get => %w(
explorer
fetch_audit_log
fetch_build
fetch_log
fetch_production_log
log_collection_form_fields
log_protocol_changed
schedule_form_fields
show_product_update
tenant_quotas_form_fields
tenant_form_fields
ldap_regions_list
),
:post => %w(
accordion_select
activate
apply_imports
ap_ce_delete
ap_ce_select
ap_edit
ap_form_field_changed
ap_set_active_tab
aps_list
category_delete
category_edit
category_field_changed
category_update
ce_accept
ce_delete
ce_new_cat
ce_select
change_tab
cu_collection_field_changed
cu_collection_update
cu_repair
cu_repair_field_changed
db_backup
db_backup_form_field_changed
db_gc_collection
db_list
diagnostics_server_list
diagnostics_tree_select
diagnostics_worker_selected
edit_rhn
explorer
fetch_build
forest_accept
forest_delete
forest_form_field_changed
forest_select
log_depot_edit
log_depot_field_changed
log_depot_validate
orphaned_records_delete
perf_chart_chooser
product_updates_list
rbac_group_edit
rbac_group_field_changed
rbac_group_seq_edit
rbac_group_user_lookup
rbac_groups_list
rbac_role_edit
rbac_role_field_changed
rbac_roles_list
rbac_tags_edit
rbac_tenant_edit
rbac_tenants_list
rbac_tenant_manage_quotas
rbac_user_edit
rbac_user_field_changed
rbac_users_list
region_edit
region_form_field_changed
repo_default_name
restart_server
rhn_buttons
rhn_default_server
rhn_validate
schedule_edit
schedule_form_field_changed
schedule_form_filter_type_field_changed
schedules_list
schedule_update
settings_form_field_changed
settings_update
show
show_product_update
smartproxy_affinity_field_changed
tag_edit_form_field_changed
tl_chooser
tree_autoload_dynatree
tree_select
update
upload_csv
upload_form_field_changed
upload_login_logo
upload_logo
validate_replcation_worker
wait_for_task
x_button
x_show
x_settings_changed
zone_edit
zone_field_changed
ldap_region_add
ldap_region_edit
ldap_region_form_field_changed
ldap_domain_edit
ldap_domain_form_field_changed
ls_select
ldap_entry_changed
ls_delete
)
},
:orchestration_stack => {
:get => %w(
cloud_networks
download_data
retirement_info
index
outputs
parameters
resources
retire
show
show_list
tagging_edit
protect
),
:post => %w(
button
cloud_networks
outputs
listnav_search_selected
panel_control
parameters
quick_search
resources
retire
sections_field_changed
show
show_list
protect
tagging_edit
tag_edit_form_field_changed
) +
adv_search_post +
exp_post +
save_post
},
:provider_foreman => {
:get => %w(
download_data
explorer
provider_foreman_form_fields
show
show_list
tagging_edit
),
:post => %w(
accordion_select
authentication_validate
button
change_tab
delete
edit
explorer
exp_button
exp_changed
exp_token_pressed
form_field_changed
new
panel_control
provision
quick_search
refresh
reload
show
show_list
tagging
tagging_edit
tag_edit_form_field_changed
tree_autoload_dynatree
tree_select
users
wait_for_task
) +
adv_search_post +
x_post
},
:pxe => {
:get => %w(
explorer
),
:post => %w(
accordion_select
explorer
iso_datastore_create
iso_datastore_form_field_changed
iso_datastore_list
iso_image_edit
iso_img_form_field_changed
log_depot_validate
pxe_image_edit
pxe_image_type_edit
pxe_image_type_form_field_changed
pxe_image_type_list
pxe_img_form_field_changed
pxe_server_create_update
pxe_server_form_field_changed
pxe_server_list
pxe_wimg_edit
pxe_wimg_form_field_changed
reload
template_create_update
template_form_field_changed
template_list
tree_autoload_dynatree
tree_select
x_button
x_history
x_settings_changed
)
},
:report => {
:get => %w(
db_widget_dd_done
download_report
explorer
export_widgets
miq_report_edit
miq_report_new
preview_chart
preview_timeline
render_chart
report_only
sample_chart
sample_timeline
send_report_data
tree_autoload_dynatree
tree_select
),
:post => %w(
accordion_select
change_tab
create
db_edit
db_form_field_changed
db_seq_edit
db_widget_dd_done
db_widget_remove
discard_changes
explorer
export_field_changed
filter_change
form_field_changed
get_report
import_widgets
menu_editor
menu_field_changed
menu_folder_message_display
menu_update
miq_report_edit
reload
rep_change_tab
saved_report_paging
schedule_edit
schedule_form_field_changed
show_preview
show_saved
tree_autoload_dynatree
tree_select
upload
upload_widget_import_file
wait_for_task
widget_edit
widget_form_field_changed
widget_shortcut_dd_done
widget_shortcut_remove
widget_shortcut_reset
x_button
x_history
x_settings_changed
x_show
) +
exp_post
},
:repository => {
:get => %w(
download_data
edit
index
new
protect
repository_form_fields
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
create
form_field_changed
listnav_search_selected
quick_search
panel_control
protect
show
show_list
tag_edit_form_field_changed
tagging_edit
update
) +
adv_search_post +
compare_post +
exp_post +
save_post
},
:resource_pool => {
:get => %w(
download_data
index
protect
show
show_list
tagging_edit
) +
compare_get,
:post => %w(
button
listnav_search_selected
panel_control
protect
sections_field_changed
show
show_list
tagging_edit
tag_edit_form_field_changed
quick_search
) +
adv_search_post +
compare_post +
exp_post +
save_post
},
:service => {
:get => %w(
download_data
explorer
retirement_info
reconfigure_form_fields
retire
service_form_fields
show
),
:post => %w(
button
explorer
ownership_field_changed
ownership_update
reload
retire
service_edit
service_tag
tag_edit_form_field_changed
tree_autoload_dynatree
tree_select
x_button
x_history
x_settings_changed
x_show
) +
dialog_runner_post
},
# TODO: revisit this controller/route, might be removed after other storage issues are sorted out
:snia_local_file_system => {
:get => %w(show)
},
:storage => {
:get => %w(
button
debris_files
dialog_load
disk_files
download_data
files
index
perf_chart_chooser
protect
show
show_list
snapshot_files
tagging_edit
vm_ram_files
vm_misc_files
) +
compare_get,
:post => %w(
button
files
listnav_search_selected
panel_control
disk_files
perf_chart_chooser
protect
quick_search
sections_field_changed
show
show_association
show_details
show_list
tag_edit_form_field_changed
tagging_edit
wait_for_task
) +
adv_search_post +
compare_post +
dialog_runner_post +
exp_post +
save_post
},
:storage_manager => {
:get => %w(
download_data
edit
index
new
show
show_list
),
:post => %w(
button
create
form_field_changed
panel_control
quick_search
show
show_list
update
) +
adv_search_post +
exp_post
},
:support => {
:get => %w(index)
},
:vm => {
:get => %w(
download_data
edit
retirement_info
ownership
policy_sim
reconfigure
reconfigure_form_fields
retire
right_size
show
show_list
),
:post => %w(
edit_vm
form_field_changed
policy_sim
policy_sim_add
policy_sim_remove
provision
reconfigure
reconfigure_form_fields
reconfigure_update
retire
right_size
set_checked_items
show_list
vmtree_selected
) +
ownership_post +
pre_prov_post
},
:vm_cloud => {
:get => %w(
download_data
drift_to_csv
drift_to_pdf
drift_to_txt
explorer
filesystem_download
retirement_info
reconfigure_form_fields
launch_html5_console
perf_chart_chooser
protect
retire
show
tagging_edit
resize
) +
compare_get,
:post => %w(
advanced_settings
accordion_select
button
edit_vm
resize_vm
resize_field_changed
event_logs
explorer
launch_html5_console
filesystems
filesystem_drivers
form_field_changed
guest_applications
groups
html5_console
kernel_drivers
linux_initprocesses
ownership_field_changed
ownership_update
panel_control
patches
perf_chart_chooser
policies
processes
protect
prov_edit
prov_field_changed
quick_search
registry_items
reload
retire
reconfigure_update
scan_histories
sections_field_changed
security_groups
floating_ips
network_routers
cloud_subnets
show
squash_toggle
tagging_edit
tag_edit_form_field_changed
tl_chooser
tree_autoload_dynatree
tree_select
users
vm_pre_prov
wait_for_task
win32_services
) +
adv_search_post +
compare_post +
dialog_runner_post +
drift_post +
evm_relationship_post +
exp_post +
policy_post +
pre_prov_post +
x_post
},
:vm_infra => {
:get => %w(
download_data
drift_to_csv
drift_to_pdf
drift_to_txt
explorer
filesystem_download
retirement_info
reconfigure_form_fields
launch_vmware_console
launch_html5_console
perf_chart_chooser
policies
protect
retire
show
tagging_edit
) +
compare_get,
:post => %w(
accordion_select
advanced_settings
button
edit_vm
event_logs
explorer
filesystems
filesystem_drivers
form_field_changed
guest_applications
groups
kernel_drivers
linux_initprocesses
ontap_file_shares
ontap_logical_disks
ontap_storage_systems
ontap_storage_volume
ownership_field_changed
ownership_update
panel_control
patches
perf_chart_chooser
policies
protect
processes
prov_edit
prov_field_changed
quick_search
reconfigure_field_changed
reconfigure_update
registry_items
reload
retire
scan_histories
sections_field_changed
security_groups
show
sort_ds_grid
sort_host_grid
sort_iso_img_grid
sort_vc_grid
sort_template_grid
sort_vm_grid
squash_toggle
tagging_edit
tag_edit_form_field_changed
tl_chooser
tree_autoload_dynatree
tree_select
users
vmrc_console
vm_pre_prov
vm_vdi
html5_console
wait_for_task
win32_services
) +
adv_search_post +
compare_post +
dialog_runner_post +
drift_post +
evm_relationship_post +
exp_post +
policy_post +
pre_prov_post +
snap_post +
x_post
},
:vm_or_template => {
:get => %w(
download_data
drift_to_csv
drift_to_pdf
drift_to_txt
explorer
launch_html5_console
retirement_info
reconfigure_form_fields
launch_vmware_console
protect
retire
show
tagging_edit
util_report_download
utilization
vm_show
) +
compare_get,
:post => %w(
accordion_select
advanced_settings
button
console
drift_all
drift_differences
drift_history
drift_mode
drift_same
edit_vm
event_logs
explorer
filesystem_drivers
filesystems
form_field_changed
groups
guest_applications
kernel_drivers
linux_initprocesses
ontap_file_shares
ontap_logical_disks
ontap_storage_systems
ownership_field_changed
ownership_update
panel_control
patches
perf_chart_chooser
policies
processes
protect
prov_edit
prov_field_changed
quick_search
reconfigure_field_changed
reconfigure_update
registry_items
reload
retire
scan_histories
sections_field_changed
show
sort_ds_grid
sort_host_grid
sort_iso_img_grid
sort_vc_grid
squash_toggle
tagging_edit
tag_edit_form_field_changed
tl_chooser
tree_select
users
util_chart_chooser
vm_pre_prov
vmrc_console
html5_console
wait_for_task
win32_services
x_button
x_history
x_search_by_name
x_show
) +
adv_search_post +
compare_post +
dialog_runner_post +
evm_relationship_post +
exp_post +
policy_post +
pre_prov_post +
snap_post
},
}
root :to => 'dashboard#login'
get '/saml_login(/*path)' => 'dashboard#saml_login'
# Let's serve pictures directly from the DB
get '/pictures/:basename' => 'picture#show', :basename => /[\da-zA-Z]+\.[\da-zA-Z]+/
# Enablement for the REST API
# OPTIONS requests for REST API pre-flight checks
# Semantic Versioning Regex for API, i.e. vMajor.minor.patch[-pre]
apiver_regex = /v[\d]+(\.[\da-zA-Z]+)*(\-[\da-zA-Z]+)?/
match '/api/*path' => 'api#handle_options_request', :via => [:options]
get '/api(/:version)' => 'api#show', :format => 'json', :version => apiver_regex
get '/api(/:version)/:collection(/:c_id(/:subcollection(/:s_id)))' => 'api#show', :format => 'json', :version => apiver_regex
match '/api(/:version)/:collection(/:c_id(/:subcollection(/:s_id)))' => 'api#update', :format => 'json', :via => [:post, :put, :patch], :version => apiver_regex
delete '/api(/:version)/:collection(/:c_id(/:subcollection(/:s_id)))' => 'api#destroy', :format => 'json', :version => apiver_regex
controller_routes.each do |controller_name, controller_actions|
# Default route with no action to controller's index action
unless controller_name == :ems_cloud
match "#{controller_name}", :controller => controller_name, :action => :index, :via => :get
end
# One-by-one get/post routes for defined controllers
if controller_actions.kind_of?(Hash)
unless controller_actions[:get].nil?
controller_actions[:get].each do |action_name|
get "#{controller_name}/#{action_name}(/:id)",
:action => action_name,
:controller => controller_name
end
end
unless controller_actions[:post].nil?
controller_actions[:post].each do |action_name|
post "#{controller_name}/#{action_name}(/:id)",
:action => action_name,
:controller => controller_name
end
end
end
end
# pure-angular templates
get '/static/*id' => 'static#show', :format => false
# ping response for load balancing
get '/ping' => 'ping#index'
resources :ems_cloud, :as => :ems_clouds
match "/auth/:provider/callback" => "sessions#create", :via => :get
end
| 21.516053 | 164 | 0.536668 |
2843da316f38f0e1b7efde3a74947d822ab5c1be | 405 | class Meth
can Track; can Find
#=========================properties=========================
attr_accessor :type, :name, :url, :documentation
#=========================intialize==========================
def initialize(type="n/a", name, url)
self.type = type
self.name = name
self.url = url
@@all << self
end
#============================================================
end
| 28.928571 | 62 | 0.377778 |
08e6e4cb728688596b32b52bb2abb0068285428f | 1,621 | # frozen_string_literal: true
require_relative "lib/intercode_client/version"
Gem::Specification.new do |spec|
spec.name = "intercode_client"
spec.version = IntercodeClient::VERSION
spec.authors = ["Nat Budin"]
spec.email = ["[email protected]"]
spec.summary = "A Ruby client and OmniAuth strategy for Intercode"
spec.homepage = "https://github.com/neinteractiveliterature/intercode_client"
spec.license = "MIT"
spec.required_ruby_version = ">= 2.4.0"
spec.metadata["allowed_push_host"] = "https://rubygems.org"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = spec.homepage
spec.metadata["changelog_uri"] = "https://github.com/neinteractiveliterature/intercode_client"
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path(__dir__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{\A(?:test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
# Uncomment to register a new dependency of your gem
# spec.add_dependency "example-gem", "~> 1.0"
spec.add_dependency 'graphql-client'
spec.add_dependency 'json-jwt'
spec.add_dependency 'omniauth-oauth2'
spec.add_development_dependency 'pry'
# For more information and examples about making a new gem, checkout our
# guide at: https://bundler.io/guides/creating_gem.html
end
| 37.697674 | 96 | 0.702036 |
b900c4bb6be8d20045e488c0bc97436ffd59b150 | 2,217 |
###
# This Ruby source file was generated by test-to-ruby.xsl
# and is a derived work from the source document.
# The source document contained the following notice:
=begin
Copyright (c) 2001 World Wide Web Consortium,
(Massachusetts Institute of Technology, Institut National de
Recherche en Informatique et en Automatique, Keio University). All
Rights Reserved. This program is distributed under the W3C's Software
Intellectual Property License. This program is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE.
See W3C License http://www.w3.org/Consortium/Legal/ for more details.
=end
#
require File.expand_path(File.join(File.dirname(__FILE__), '..', '..', '..', 'helper'))
###
# The marginHeight attribute specifies the frame margin height, in pixels.
# Retrieve the marginHeight attribute of the first IFRAME element and examine
# it's value.
# @author NIST
# @author Rick Rivello
# see[http://www.w3.org/TR/1998/REC-DOM-Level-1-19981001/level-one-html#ID-91371294]
##
DOMTestCase('HTMLIFrameElement06') do
###
# Constructor.
# @param factory document factory, may not be null
# @throws org.w3c.domts.DOMTestIncompatibleException Thrown if test is not compatible with parser configuration
##
def setup
##
## check if loaded documents are supported for content type
##
contentType = getContentType()
preload(contentType, "iframe", false)
end
###
# Runs the test case.
# @throws Throwable Any uncaught exception causes test to fail
#
def test_HTMLIFrameElement06
nodeList = nil
testNode = nil
vmarginheight = nil
doc = nil
doc = load_document("iframe", false)
nodeList = doc.getElementsByTagName("iframe")
assertSize("Asize", 1, nodeList)
testNode = nodeList.item(0)
vmarginheight = testNode.marginHeight()
assert_equal("10", vmarginheight, "marginheightLink")
end
###
# Gets URI that identifies the test.
# @return uri identifier of test
#
def targetURI
"http://www.w3.org/2001/DOM-Test-Suite/level1/html/HTMLIFrameElement06"
end
end
| 28.792208 | 113 | 0.715832 |
1dd71e6245b147af277344a6896a8720e2879b66 | 4,179 | # Titanic service
#
# Implements server side of http:#rfc.zeromq.org/spec:9
#
# Author: Tom van Leeuwen <[email protected]>
# Based on Python example by Min RK
require './mdcliapi2.rb'
require './mdwrkapi.rb'
require 'pathname'
require 'securerandom'
require 'json'
require 'thread'
require 'awesome_print'
TITANIC_DIR = Pathname(Dir.pwd).join('.titanic')
def request_filename uuid
TITANIC_DIR.join("#{uuid}.req")
end
def reply_filename uuid
TITANIC_DIR.join("#{uuid}.rep")
end
def titanic_request pipe
worker = MajorDomoWorker.new('tcp://localhost:5555', 'titanic.request')
# Ensure message directory exists
Dir.mkdir(TITANIC_DIR) unless Dir.exist?(TITANIC_DIR)
reply = nil
loop do
request = worker.recv reply
# Generate UUID and save message to disk
uuid = SecureRandom.uuid
filename = request_filename uuid
File.open(filename, 'w') { |fh| fh.write request.to_json }
# Send UUID through to message queue
pipe.send_string uuid
# Now send UUID back to client
# Done by the worker.recv at the top of the loop
reply = ["200", uuid]
end
end
def titanic_reply
worker = MajorDomoWorker.new('tcp://localhost:5555', 'titanic.reply')
reply = nil
loop do
request = worker.recv reply
uuid = request.shift
if File.exist?(reply_filename(uuid))
reply = %w[200]
reply.concat JSON.parse(File.read(reply_filename(uuid)))
elsif File.exist?(request_filename(uuid))
reply = %w[300] # pending
else
reply = %w[400] # unknown
end
end
end
def titanic_close
worker = MajorDomoWorker.new('tcp://localhost:5555', 'titanic.close')
reply = nil
loop do
request = worker.recv reply
uuid = request.shift
File.unlink(request_filename(uuid)) if File.exist?(request_filename(uuid))
File.unlink(reply_filename(uuid)) if File.exist?(reply_filename(uuid))
reply = %w[200]
end
end
def service_success client, uuid
# Attempt to process a single request, return True if successful
return true unless File.exist?(request_filename(uuid))
request = JSON.parse(File.read(request_filename(uuid)))
service = request.shift
# Use MMI protocol to check if service is available
mmi_request = [service]
client.send('mmi.service', mmi_request)
mmi_reply = client.recv
if mmi_reply and mmi_reply.first == "200"
client.send service, request
reply = client.recv
if reply
File.open(reply_filename(uuid), 'w') { |fh| fh.write reply.to_json }
return true
end
end
false
end
context = ZMQ::Context.new
# Create MDP client session with short timeout
client = MajorDomoClient.new("tcp://localhost:5555")
client.timeout = 1000 # 1 sec
# client.retries = 1 # only 1 retry
pipe = context.socket(ZMQ::PAIR)
pipe.setsockopt ZMQ::LINGER, 0
pipe.bind("inproc://titanic")
peer = context.socket(ZMQ::PAIR)
peer.setsockopt ZMQ::LINGER, 0
peer.connect("inproc://titanic")
Thread.start do
begin
titanic_request peer
rescue Exception => e
puts e ; puts e.backtrace.join("\n")
end
end
Thread.start do
begin
titanic_reply
rescue Exception => e
puts e ; puts e.backtrace.join("\n")
end
end
Thread.start do
begin
titanic_close
rescue Exception => e
puts e ; puts e.backtrace.join("\n")
end
end
poller = ZMQ::Poller.new
poller.register(pipe, ZMQ::POLLIN)
# Ensure message directory exists
Dir.mkdir(TITANIC_DIR) unless Dir.exist?(TITANIC_DIR)
# Main dispatcher loop
queue = TITANIC_DIR.join('queue')
# Ensure queue file exists and is empty
File.open(queue, 'w') { |fh| fh.write '' }
loop do
items = poller.poll(1000)
if items > 0
uuid = ""
pipe.recv_string uuid
File.open(queue, 'a') { |fh| fh.write "-#{uuid}\n" }
end
# Brute-force dispatcher
# yeah yeah... ugly
new = []
lines = File.read(queue).split("\n")
lines.each do |line|
if line =~ /^-(.*)$/
uuid = $1
puts "I: processing request #{uuid}"
if service_success client, uuid
# mark queue entry as processed
new << uuid
else
new << line
end
else
new << line
end
end
File.open(queue, 'w') { |fh| fh.write new.join("\n") + "\n" }
end
| 21.994737 | 78 | 0.676717 |
3964f62ee24beaa5db8a6332fdfc9c57fdadb525 | 386 | module AlertParser
module Parser
class GrowthTrend < Base
def long_term_growth(value, locale)
locale.format_percent(value)
end
def long_term_growth_one_week_ago(value, locale)
locale.format_percent(value)
end
def positive?
data[:long_term_growth].to_f > data[:long_term_growth_one_week_ago].to_f
end
end
end
end
| 21.444444 | 80 | 0.678756 |
1d4b4bc2f1be7025e5b29cb49164737f0f313baf | 213 | FactoryGirl.define do
factory :index, class: Edgar::Index do
data = File.read('spec/support/index.idx')
defaults = {}
initialize_with { Edgar::Index.new(data, defaults.merge(attributes)) }
end
end
| 26.625 | 74 | 0.694836 |
7acee9e78a68387cab221698f6fdbe3f1c2101e2 | 869 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'cocoapods-ppbuild/gem_version.rb'
Gem::Specification.new do |spec|
spec.name = 'cocoapods-ppbuild'
spec.version = CocoapodsPpbuild::VERSION
spec.authors = ['彭懂']
spec.email = ['[email protected]']
spec.description = %q{工程静态库编译,提高编译速度.}
spec.summary = %q{工程静态库编译,提高编译速度.}
spec.homepage = 'https://github.com/pdcodeunder/cocoapods-ppbuild.git'
spec.license = 'MIT'
spec.files = Dir['lib/**/*']
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.3'
spec.add_development_dependency 'rake'
end
| 36.208333 | 77 | 0.662831 |
186cf508c8ff907d3205a86f7186bac6b3984cc4 | 236 | class Sparkbox < Cask
version :latest
sha256 :no_check
url 'http://t.icyblaze.com/sblatest'
appcast 'http://matrix.icyblaze.com/index.php/checkupdate/p/8'
homepage 'http://www.icyblaze.com/sparkbox'
app 'Sparkbox.app'
end
| 21.454545 | 64 | 0.724576 |
91ab2d3ae75cd1005f026b969186c16f50e7ae92 | 622 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
require File.expand_path('../shared/quo', __FILE__)
describe "Numeric#quo" do
ruby_version_is ""..."1.9" do
it_behaves_like :numeric_quo_18, :quo
end
ruby_version_is "1.9" do
it "returns the result of calling self#/ with other" do
obj = NumericSpecs::Subclass.new
obj.should_receive(:coerce).twice.and_return([19,19])
obj.should_receive(:<=>).any_number_of_times.and_return(1)
obj.should_receive(:/).and_return(20)
obj.quo(19).should == 20
end
end
end
| 29.619048 | 64 | 0.689711 |
4a3e21333e8d7f03ad54c7de18f3156a3417e4ce | 11,366 | # frozen_string_literal: true
require "mail_task"
RSpec.describe Tasks::ChangeTypeController, :postgres, type: :controller do
describe "POST tasks/change_type/:id" do
let(:user) { create(:user) }
let(:assigner) { create(:user) }
let(:root_task) { create(:root_task) }
let(:old_instructions) { "Some instructions" }
let(:new_instructions) { "New instructions" }
let(:params) { { task: { type: new_task_type, instructions: new_instructions }, id: task.id } }
let(:task) { parent_task.children.first }
subject { patch(:update, params: params) }
before do
User.authenticate!(user: user)
Colocated.singleton.add_user(user)
end
context "with the correct parameters" do
context "for a colocated task" do
let(:old_task_type_trait) { :ihp }
let(:new_task_type) { OtherColocatedTask }
let(:parent_task) do
create(
:ama_colocated_task,
old_task_type_trait,
appeal: root_task.appeal,
parent_id: root_task.id,
assigned_by: assigner,
instructions: [old_instructions]
)
end
let!(:child_task) do
create(
:ama_colocated_task,
old_task_type_trait,
appeal: parent_task.appeal,
parent: parent_task,
instructions: [old_instructions],
assigned_by: assigner,
assigned_to: create(:user, :vlj_support_user)
)
end
it "should update successfully" do
subject
expect(response.status).to eq 200
response_body = JSON.parse(response.body)["tasks"]["data"].sort_by { |hash| hash["id"].to_i }.reverse!
expect(response_body.length).to eq 3
new_parent_id = response_body.first["id"]
new_parent = response_body.find { |t| t["id"] == new_parent_id.to_s }
expect(new_parent["id"]).not_to eq parent_task.id.to_s
expect(new_parent["attributes"]["label"]).to eq new_task_type.label
expect(new_parent["attributes"]["status"]).to eq Constants.TASK_STATUSES.assigned
expect(new_parent["attributes"]["instructions"]).to include old_instructions
expect(new_parent["attributes"]["instructions"]).to include new_instructions
expect(new_parent["attributes"]["assigned_to"]["id"]).to eq parent_task.assigned_to_id
expect(new_parent["attributes"]["assigned_by"]["pg_id"]).to eq parent_task.assigned_by_id
expect(new_parent["attributes"]["appeal_id"]).to eq parent_task.appeal_id
expect(task.reload.status).to eq Constants.TASK_STATUSES.cancelled
expect(parent_task.reload.status).to eq Constants.TASK_STATUSES.cancelled
end
context "that needs reassigning" do
let(:new_task_type) { FoiaColocatedTask }
it "should update successfully" do
subject
expect(response.status).to eq 200
response_body = JSON.parse(response.body)["tasks"]["data"].sort_by { |hash| hash["id"].to_i }.reverse!
expect(response_body.length).to eq 4
expect(response_body.first["id"]).not_to eq task.id.to_s
expect(response_body.first["attributes"]["label"]).to eq FoiaTask.name.titlecase
expect(response_body.first["attributes"]["status"]).to eq task.status
expect(response_body.first["attributes"]["instructions"]).to include old_instructions
expect(response_body.first["attributes"]["instructions"]).to include new_instructions
expect(response_body.first["attributes"]["type"]).to eq FoiaTask.name
expect(response_body.first["attributes"]["assigned_to"]["id"]).to eq FoiaColocatedTask.default_assignee.id
expect(response_body.first["attributes"]["assigned_by"]["pg_id"]).to eq task.assigned_by_id
expect(response_body.first["attributes"]["appeal_id"]).to eq task.appeal_id
new_parent_id = Task.find(response_body.first["id"]).parent_id
new_parent = response_body.find { |t| t["id"] == new_parent_id.to_s }
expect(new_parent["id"]).not_to eq parent_task.id.to_s
expect(new_parent["attributes"]["label"]).to eq new_task_type.label
expect(new_parent["attributes"]["status"]).to eq parent_task.status
expect(new_parent["attributes"]["instructions"]).to include old_instructions
expect(new_parent["attributes"]["instructions"]).to include new_instructions
expect(new_parent["attributes"]["type"]).to eq FoiaColocatedTask.name
expect(new_parent["attributes"]["assigned_to"]["id"]).to eq FoiaColocatedTask.default_assignee.id
expect(new_parent["attributes"]["assigned_by"]["pg_id"]).to eq parent_task.assigned_by_id
expect(new_parent["attributes"]["appeal_id"]).to eq parent_task.appeal_id
expect(task.reload.status).to eq Constants.TASK_STATUSES.cancelled
expect(parent_task.reload.status).to eq Constants.TASK_STATUSES.cancelled
end
end
end
context "for a mail task" do
let(:old_task_type) { DeathCertificateMailTask }
let(:new_task_type) { AddressChangeMailTask }
let(:grandparent_task) do
old_task_type.create!(
appeal: root_task.appeal,
parent_id: root_task.id,
assigned_by: assigner,
assigned_to: MailTeam.singleton
)
end
let!(:parent_task) do
old_task_type.create!(
appeal: grandparent_task.appeal,
parent_id: grandparent_task.id,
assigned_by: assigner,
assigned_to: Colocated.singleton,
instructions: [old_instructions]
)
end
let!(:user_task) do
old_task_type.create!(
appeal: parent_task.appeal,
parent_id: parent_task.id,
assigned_to: create(:user),
assigned_by: assigner,
instructions: [old_instructions]
)
end
it "should update successfully" do
subject
expect(response.status).to eq 200
response_body = JSON.parse(response.body)["tasks"]["data"].sort_by { |hash| hash["id"].to_i }.reverse!
# This is the parent of the task we started, because Colocated tasks do not auto-assign user tasks.
expect(response_body.length).to eq 5
expect(response_body.first["id"]).not_to eq task.id.to_s
expect(response_body.first["attributes"]["label"]).to eq new_task_type.label
expect(response_body.first["attributes"]["status"]).to eq task.status
expect(response_body.first["attributes"]["status"]).to eq Constants.TASK_STATUSES.assigned
expect(response_body.first["attributes"]["instructions"]).to include old_instructions
expect(response_body.first["attributes"]["instructions"]).to include new_instructions
expect(response_body.first["attributes"]["assigned_to"]["id"]).to eq parent_task.assigned_to_id
expect(response_body.first["attributes"]["assigned_by"]["pg_id"]).to eq parent_task.assigned_by_id
expect(response_body.first["attributes"]["appeal_id"]).to eq task.appeal_id
# This refers to the organization task of the same type, but assigned to the Mail Team:
new_grandparent_id = Task.find(response_body.first["id"]).parent_id
new_grandparent = response_body.find { |t| t["id"] == new_grandparent_id.to_s }
expect(new_grandparent["id"]).not_to eq grandparent_task.id.to_s
expect(new_grandparent["attributes"]["status"]).to eq Constants.TASK_STATUSES.on_hold
expect(new_grandparent["attributes"]["label"]).to eq new_task_type.label
expect(new_grandparent["attributes"]["assigned_to"]["id"]).to eq grandparent_task.assigned_to_id
expect(new_grandparent["attributes"]["assigned_by"]["pg_id"]).to eq grandparent_task.assigned_by_id
expect(new_grandparent["attributes"]["appeal_id"]).to eq grandparent_task.appeal_id
expect(task.reload.status).to eq Constants.TASK_STATUSES.cancelled
expect(parent_task.reload.status).to eq Constants.TASK_STATUSES.cancelled
expect(grandparent_task.reload.status).to eq Constants.TASK_STATUSES.cancelled
end
context "that needs reassigning" do
let(:new_task_type) { CongressionalInterestMailTask }
before do
LitigationSupport.singleton.add_user(create(:user))
end
it "should reassign the task when changing the type" do
subject
expect(response.status).to eq 200
response_body = JSON.parse(response.body)["tasks"]["data"].sort_by { |hash| hash["id"].to_i }.reverse!
expect(response_body.length).to eq 5
expect(response_body.first["id"]).not_to eq task.id.to_s
expect(response_body.first["attributes"]["label"]).to eq new_task_type.label
expect(response_body.first["attributes"]["status"]).to eq task.status
expect(response_body.first["attributes"]["instructions"]).to include old_instructions
expect(response_body.first["attributes"]["instructions"]).to include new_instructions
expect(response_body.first["attributes"]["assigned_to"]["id"]).to eq LitigationSupport.singleton.id
expect(response_body.first["attributes"]["assigned_to"]["type"]).to eq LitigationSupport.singleton.type
expect(response_body.first["attributes"]["assigned_by"]["pg_id"]).to eq task.assigned_by_id
new_parent_id = Task.find(response_body.first["id"]).parent_id
new_parent = response_body.find { |t| t["id"] == new_parent_id.to_s }
expect(new_parent["id"]).not_to eq grandparent_task.id.to_s
expect(new_parent["attributes"]["label"]).to eq new_task_type.label
expect(new_parent["attributes"]["assigned_to"]["id"]).to eq grandparent_task.assigned_to_id
expect(new_parent["attributes"]["assigned_by"]["pg_id"]).to eq grandparent_task.assigned_by_id
expect(new_parent["attributes"]["appeal_id"]).to eq grandparent_task.appeal_id
expect(task.reload.status).to eq Constants.TASK_STATUSES.cancelled
expect(parent_task.reload.status).to eq Constants.TASK_STATUSES.cancelled
expect(grandparent_task.reload.status).to eq Constants.TASK_STATUSES.cancelled
end
end
end
end
context "for a non supported task type" do
let(:new_task_type) { PreRoutingFoiaColocatedTask }
let(:parent_task) do
create(
:ama_colocated_task,
:ihp,
appeal: root_task.appeal,
parent_id: root_task.id,
assigned_by: assigner,
instructions: [old_instructions]
)
end
let!(:child_task) do
create(
:ama_colocated_task,
:ihp,
appeal: parent_task.appeal,
parent: parent_task,
assigned_by: assigner,
assigned_to: create(:user),
instructions: [old_instructions]
)
end
it "returns an error" do
subject
expect(response.status).to eq(403)
end
end
end
end
| 46.016194 | 118 | 0.649305 |
acf6c1e51e5760baceea6b619b6506cd379cd3b9 | 1,005 | cask "skim" do
version "1.6.1,131"
sha256 "50139ee146d9bad491bc9de992b9bc1e195810f5119ced08d049037d2c05289e"
url "https://downloads.sourceforge.net/skim-app/Skim/Skim-#{version.before_comma}/Skim-#{version.before_comma}.dmg",
verified: "downloads.sourceforge.net/skim-app/"
name "Skim"
desc "PDF reader and note-taking application"
homepage "https://skim-app.sourceforge.io/"
livecheck do
url "https://skim-app.sourceforge.io/skim.xml"
strategy :sparkle
end
auto_updates true
app "Skim.app"
binary "#{appdir}/Skim.app/Contents/SharedSupport/displayline"
binary "#{appdir}/Skim.app/Contents/SharedSupport/skimnotes"
binary "#{appdir}/Skim.app/Contents/SharedSupport/skimpdf"
zap trash: [
"~/Library/Preferences/net.sourceforge.skim-app.skim.plist",
"~/Library/Preferences/net.sourceforge.skim-app.skim.bookmarks.plist",
"~/Library/Caches/net.sourceforge.skim-app.skim",
"~/Library/Cookies/net.sourceforge.skim-app.skim.binarycookies",
]
end
| 33.5 | 118 | 0.739303 |
382737b9c13ea6280c8ab44f8efb60242c385119 | 1,212 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Pcc
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
# Log to standard out
config.logger = Logger.new(STDOUT)
end
end
| 32.756757 | 82 | 0.766502 |
625b417e711065d46e58973acac8bd9540a123aa | 2,561 | # frozen_string_literal: true
require 'spec_helper'
describe Fondy::Response do
before do
allow(Fondy::Signature).to receive(:build)
.with(params: hash_including, password: 'pass')
.and_return('valid_signature')
end
let(:http_response) do
double(
status: 200,
body: {
response: {
response_status: 'success',
actual_amount: 100,
order_status: 'approved',
signature: 'valid_signature',
},
}.to_json,
)
end
let(:response) do
described_class.new(http_response)
end
context 'with success response' do
it '#to_h returns all data' do
response_hash = {
response_status: 'success',
actual_amount: 100,
order_status: 'approved',
signature: 'valid_signature',
}
expect(response.to_h).to eq(response_hash)
end
it '#success? returns true' do
expect(response.success?).to eq(true)
end
it '#error? returns false' do
expect(response.error?).to eq(false)
end
it '#error_code returns nil' do
expect(response.error_code).to be_nil
end
it '#error_message returns nil' do
expect(response.error_message).to be_nil
end
it 'returns response keys data' do
expect(response.actual_amount).to eq(100)
expect(response.order_status).to eq('approved')
end
it '#respond_to? checks response keys' do
expect(response.respond_to?(:actual_amount)).to eq(true)
expect(response.respond_to?(:order_status)).to eq(true)
expect(response.respond_to?(:other_key)).to eq(false)
end
end
context 'with error response' do
let(:http_response) do
double(
status: 200,
body: {
response: {
response_status: 'failure',
error_message: 'Order not found',
error_code: 1018,
},
}.to_json,
)
end
it '#to_h returns all data' do
response_hash = {
response_status: 'failure',
error_message: 'Order not found',
error_code: 1018,
}
expect(response.to_h).to eq(response_hash)
end
it '#success? returns false' do
expect(response.success?).to eq(false)
end
it '#error? returns true' do
expect(response.error?).to eq(true)
end
it '#error_code returns error code' do
expect(response.error_code).to eq(1018)
end
it '#error_message returns error message' do
expect(response.error_message).to eq('Order not found')
end
end
end
| 23.712963 | 62 | 0.615775 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.