hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
ac9b9c778e1ffe3b7236199c6d4e22f524b0d4d7 | 15,828 | # frozen_string_literal: true
require 'spec_helper'
module XenditApi
describe Client do
it 'takes an API key on initialization and convert it into token' do
require 'base64'
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
appended_api_key = api_key + ':'
tokenized_api_key = Base64.strict_encode64(appended_api_key)
expect(client.token).to eq tokenized_api_key
end
describe '.get_balance' do
context 'valid request' do
before do
@stub = stub_request(:get, 'https://api.xendit.co/balance')
.to_return(status: 200, body: '{"balance": 1241231}', headers: {})
end
it 'should return the current balance of the merchant account' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.get_cash_balance
expect(result.balance).to eq 1_241_231
expect(result.balance_cents).to eq 124_123_100
expect(@stub).to have_been_requested
end
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.get_cash_balance
expect(result).to eq nil
end
end
end
describe '.get_invoice' do
context 'valid request' do
before do
data = read_file_fixture('invoice.json')
@parsed_data = JSON.parse(data)
@stub = stub_request(:get, 'https://api.xendit.co/v2/invoices/' + @parsed_data['id'])
.to_return(status: 200, body: data, headers: {})
end
it 'should return the invoice fetched by the id' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.get_invoice(id: @parsed_data['id'])
expect(result.class.name).to eq 'XenditApi::Entities::Invoice'
expect(result.amount).to eq @parsed_data['amount']
expect(@stub).to have_been_requested
end
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.get_invoice(id: 'random_id')
expect(result).to eq nil
end
end
end
describe '.create_invoice' do
let(:data) { read_file_fixture('invoice.json') }
let(:parsed_data) { JSON.parse(data) }
context 'valid request' do
before do
@stub = stub_request(:post, 'https://api.xendit.co/v2/invoices')
.to_return(status: 201, body: data, headers: {})
end
it 'should return the invoice created by the request' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.create_invoice(
external_id: parsed_data['external_id'],
payer_email: parsed_data['payer_email'],
description: parsed_data['description'],
amount: parsed_data['amount']
)
expect(result.class.name).to eq 'XenditApi::Entities::Invoice'
expect(result.amount).to eq parsed_data['amount']
expect(@stub).to have_been_requested
end
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.create_invoice(
external_id: parsed_data['external_id'],
payer_email: parsed_data['payer_email'],
description: parsed_data['description'],
amount: parsed_data['amount']
)
expect(result).to eq nil
end
end
end
describe '.create_fixed_virtual_account' do
let(:data) { read_file_fixture('virtual_account.json') }
let(:parsed_data) { JSON.parse(data) }
context 'valid request' do
before do
@stub = stub_request(:post, 'https://api.xendit.co/callback_virtual_accounts')
.to_return(status: 201, body: data, headers: {})
end
it 'should return the virtual_account created by the request' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.create_fixed_virtual_account(
external_id: parsed_data['external_id'],
bank_code: parsed_data['bank_code'],
name: parsed_data['name'],
virtual_account_number: parsed_data['account_number']
)
expect(result.class.name).to eq 'XenditApi::Entities::VirtualAccount'
expect(result.account_number).to eq parsed_data['account_number']
expect(@stub).to have_been_requested
end
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.create_fixed_virtual_account(
external_id: parsed_data['external_id'],
bank_code: parsed_data['bank_code'],
name: parsed_data['name'],
virtual_account_number: parsed_data['account_number']
)
expect(result).to eq nil
end
end
end
describe '.create_disbursement' do
let(:data) { read_file_fixture('disbursement.json') }
let(:parsed_data) { JSON.parse(data) }
context 'valid request' do
before do
@stub_with_header = stub_request(:post, 'https://api.xendit.co/disbursements')
.with(headers: { 'X-IDEMPOTENCY-KEY' => 'uniqueUID' })
.to_return(status: 201, body: data, headers: {})
@stub_without_header = stub_request(:post, 'https://api.xendit.co/disbursements')
.to_return(status: 201, body: data, headers: {})
end
it 'should be able to create disbursement with idempotency_key' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.create_disbursement(
idempotency_key: 'uniqueUID',
external_id: parsed_data['external_id'],
bank_code: parsed_data['bank_code'],
account_holder_name: parsed_data['account_holder_name'],
account_number: parsed_data['account_number'],
description: parsed_data['description'],
amount: parsed_data['amount']
)
expect(result.class.name).to eq 'XenditApi::Entities::Disbursement'
expect(result.amount).to eq parsed_data['amount']
expect(@stub_with_header).to have_been_requested
end
it 'is also valid to create disbursement WITHOUT idempotency_key' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.create_disbursement(
external_id: parsed_data['external_id'],
bank_code: parsed_data['bank_code'],
account_holder_name: parsed_data['account_holder_name'],
account_number: parsed_data['account_number'],
description: parsed_data['description'],
amount: parsed_data['amount']
)
expect(result.class.name).to eq 'XenditApi::Entities::Disbursement'
expect(result.amount).to eq parsed_data['amount']
expect(@stub_without_header).to have_been_requested
expect(@stub_with_header).to_not have_been_requested
end
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.create_disbursement(
external_id: parsed_data['external_id'],
bank_code: parsed_data['bank_code'],
account_holder_name: parsed_data['account_holder_name'],
account_number: parsed_data['account_number'],
description: parsed_data['description'],
amount: parsed_data['amount']
)
expect(result).to eq nil
end
end
end
describe '.get_disbursement' do
context 'valid request' do
before do
data = read_file_fixture('disbursement.json')
@parsed_data = JSON.parse(data)
@stub = stub_request(:get, 'https://api.xendit.co/v2/disbursements/' + @parsed_data['id'])
.to_return(status: 200, body: data, headers: {})
end
it 'should return the disbursement fetched by the id' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.get_disbursement(id: @parsed_data['id'])
expect(result.class.name).to eq 'XenditApi::Entities::Disbursement'
expect(result.amount).to eq @parsed_data['amount']
expect(@stub).to have_been_requested
end
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.get_disbursement(id: 'random_id')
expect(result).to eq nil
end
end
end
describe '.get_banks_for_virtual_account' do
context 'valid request' do
before do
data = read_file_fixture('banks.json')
@parsed_data = JSON.parse(data)
@stub = stub_request(:get, 'https://api.xendit.co/available_virtual_account_banks')
.to_return(status: 200, body: data, headers: {})
end
it 'should return an array of banks' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.get_banks_for_virtual_account
expect(result.class.name).to eq 'Array'
expect(result.first.class.name).to eq 'XenditApi::Entities::Bank'
expect(@stub).to have_been_requested
end
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.get_banks_for_virtual_account
expect(result).to eq nil
end
end
end
describe '.get_banks_for_disbursement' do
context 'valid request' do
before do
data = read_file_fixture('banks.json')
@parsed_data = JSON.parse(data)
@stub = stub_request(:get, 'https://api.xendit.co/available_disbursements_banks')
.to_return(status: 200, body: data, headers: {})
end
it 'should return an array of banks' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.get_banks_for_disbursement
expect(result.class.name).to eq 'Array'
expect(result.first.class.name).to eq 'XenditApi::Entities::Bank'
expect(@stub).to have_been_requested
end
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.get_banks_for_disbursement
expect(result).to eq nil
end
end
end
describe '.get_bank_account_data' do
context 'valid request' do
before do
@stub = stub_request(:post, 'https://api.xendit.co/bank_account_data_requests')
.to_return(status: 200, body: '{}', headers: {})
end
it 'should return empty hash' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.get_bank_account_data(account_number: 'stubbed_number', bank_code: 'MANDIRI')
expect(result).to eq({})
expect(@stub).to have_been_requested
end
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.get_bank_account_data(account_number: 'stubbed_number', bank_code: 'MANDIRI')
expect(result).to eq nil
end
end
end
describe '.charge_credit_card' do
let(:data) { read_file_fixture('credit_card_charge.json') }
let(:parsed_data) { JSON.parse(data) }
context 'valid request' do
before do
@stub = stub_request(:post, 'https://api.xendit.co/credit_card_charges')
.to_return(status: 201, body: data, headers: {})
end
it 'should return the charge created by the request' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.charge_credit_card(
token: parsed_data['id'],
external_id: parsed_data['external_id'],
amount: parsed_data['capture_amount']
)
expect(result.class.name).to eq 'XenditApi::Entities::CardCharge'
expect(result.capture_amount).to eq parsed_data['capture_amount']
expect(@stub).to have_been_requested
end
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.charge_credit_card(
token: parsed_data['id'],
external_id: parsed_data['external_id'],
amount: parsed_data['capture_amount']
)
expect(result).to eq nil
end
end
end
describe '.create ewallet payment' do
let(:data) { read_file_fixture('ewallet.json') }
let(:parsed_data) { JSON.parse(data) }
context 'valid_request' do
before do
@stub = stub_request(:post, 'https://api.xendit.co/ewallet-payment')
.to_return(status: 200, body: data, headers: {})
end
it 'should return the ewallet created by the request' do
api_key = 'xnd_development_P4qDfOss0OCpl8RSiCwZ3jw=='
client = Client.new(api_key: api_key)
result = client.create_ewallet_payment(
external_id: parsed_data['external_id'],
phone: parsed_data['phone'],
ewallet_type: parsed_data['ewallet_type'],
amount: parsed_data['amount']
)
expect(result.class.name).to eq 'XenditApi::Entities::Ewallet'
expect(result.amount).to eq parsed_data['amount']
expect(@stub).to have_been_requested
end
context 'no token provided' do
it 'should return authentication failed as the response' do
client = Client.new(api_key: '')
result = client.create_ewallet_payment(
external_id: parsed_data['external_id'],
phone: parsed_data['phone'],
ewallet_type: parsed_data['ewallet_type'],
amount: parsed_data['amount']
)
expect(result).to eq nil
end
end
end
end
end
end
| 35.330357 | 104 | 0.594516 |
611d5ef4d456777eda639ffeb9b0238e95e0feeb | 3,322 | # frozen_string_literal: true
# Controller for the Guidances page that handles Group info
class GuidanceGroupsController < ApplicationController
after_action :verify_authorized
respond_to :html
# TODO: We should really update this to be RESTful and move it either
# into the `org_admin` namespace or a new `admin` namespace.
#
# Publish and Unpublish actions should be consolidated with :update
# after conversion to RESTful actions
# GET /org/admin/guidancegroup/:id/admin_new
def admin_new
@guidance_group = GuidanceGroup.new(org_id: current_user.org.id)
authorize @guidance_group
end
# POST /org/admin/guidancegroup/:id/admin_create
# rubocop:disable Metrics/AbcSize
def admin_create
# Ensure that the user can only create GuidanceGroups for their Org
args = guidance_group_params.to_h.merge({ org_id: current_user.org.id })
@guidance_group = GuidanceGroup.new(args)
authorize @guidance_group
if @guidance_group.save
flash.now[:notice] = success_message(@guidance_group, _('created'))
render :admin_edit
else
flash.now[:alert] = failure_message(@guidance_group, _('create'))
render :admin_new
end
end
# rubocop:enable Metrics/AbcSize
# GET /org/admin/guidancegroup/:id/admin_edit
def admin_edit
@guidance_group = GuidanceGroup.find(params[:id])
authorize @guidance_group
end
# PUT /org/admin/guidancegroup/:id/admin_update
# rubocop:disable Metrics/AbcSize
def admin_update
@guidance_group = GuidanceGroup.find(params[:id])
authorize @guidance_group
if @guidance_group.update(guidance_group_params)
flash.now[:notice] = success_message(@guidance_group, _('saved'))
else
flash.now[:alert] = failure_message(@guidance_group, _('save'))
end
render :admin_edit
end
# rubocop:enable Metrics/AbcSize
# PUT /org/admin/guidancegroup/:id/admin_update_publish
def admin_update_publish
@guidance_group = GuidanceGroup.find(params[:id])
authorize @guidance_group
if @guidance_group.update(published: true)
flash[:notice] = _('Your guidance group has been published and is now available to users.')
else
flash[:alert] = failure_message(@guidance_group, _('publish'))
end
redirect_to admin_index_guidance_path
end
# PUT /org/admin/guidancegroup/:id/admin_update_unpublish
def admin_update_unpublish
@guidance_group = GuidanceGroup.find(params[:id])
authorize @guidance_group
if @guidance_group.update(published: false)
flash[:notice] = _('Your guidance group is no longer published and will not be available to users.')
else
flash[:alert] = failure_message(@guidance_group, _('unpublish'))
end
redirect_to admin_index_guidance_path
end
# DELETE /org/admin/guidancegroup/:id/admin_destroy
def admin_destroy
@guidance_group = GuidanceGroup.find(params[:id])
authorize @guidance_group
if @guidance_group.destroy
flash[:notice] = success_message(@guidance_group, _('deleted'))
else
flash[:alert] = failure_message(@guidance_group, _('delete'))
end
redirect_to admin_index_guidance_path
end
private
def guidance_group_params
params.require(:guidance_group).permit(:org_id, :name, :published, :optional_subset)
end
end
| 31.942308 | 106 | 0.729681 |
1166c42bf18ab4c8b94a1ee056e314ddcd631951 | 1,200 | class Chakra < Formula
desc "The core part of the JavaScript engine that powers Microsoft Edge"
homepage "https://github.com/Microsoft/ChakraCore"
url "https://github.com/Microsoft/ChakraCore/archive/v1.10.0.tar.gz"
sha256 "1d1ad8e930219a382d9dafd25e1d5b9eaabeb6c620fdb6798aaececffe092f0f"
bottle do
cellar :any
sha256 "4976439073daa93b080626632687aa0dccfbbb486e86464409a0a8060349f66e" => :high_sierra
sha256 "2e497df484f75df4069f52a4796f047043821dd597184944dbb0fb4a6e40943e" => :sierra
sha256 "c14bde48c5a96a15b0362744228a2b85219d12e770d567abacfe17305090bf61" => :el_capitan
end
depends_on "cmake" => :build
depends_on "icu4c"
def install
system "./build.sh", "--lto-thin",
"--static",
"--icu=#{Formula["icu4c"].opt_include}",
"--extra-defines=U_USING_ICU_NAMESPACE=1", # icu4c 61.1 compatability
"-j=#{ENV.make_jobs}",
"-y"
bin.install "out/Release/ch" => "chakra"
end
test do
(testpath/"test.js").write("print('Hello world!');\n")
assert_equal "Hello world!", shell_output("#{bin}/chakra test.js").chomp
end
end
| 37.5 | 94 | 0.663333 |
f8337c9f19fa1ae2fe14c5ce8f6b9b4bdf30b6c3 | 980 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'heapy/version'
Gem::Specification.new do |spec|
spec.name = "heapy"
spec.version = Heapy::VERSION
spec.authors = ["schneems"]
spec.email = ["[email protected]"]
spec.summary = %q{Inspects Ruby heap dumps}
spec.description = %q{Got a heap dump? Great. Use this tool to see what's in it!}
spec.homepage = "https://github.com/schneems/heapy"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "bin"
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency "thor"
spec.add_development_dependency "bundler", "> 1"
spec.add_development_dependency "rake", "> 10.0"
spec.add_development_dependency "rspec"
end
| 35 | 104 | 0.642857 |
e89e32a2a899398da99794122676a00119870aea | 425 | class GkeAlias < Formula
desc "gke-alias is a tool for setting aliases for Kube Config contexts"
homepage "https://github.com/cmaahs/homebrew-admin-scripts"
url "https://github.com/cmaahs/homebrew-admin-scripts.git"
version "0.0.2"
revision 1
if OS.mac?
def install
bin.install "bin/darwin/gke-alias"
end
elsif OS.linux?
def install
bin.install "bin/linux/gke-alias"
end
end
end
| 22.368421 | 73 | 0.691765 |
f820b7bc5b3b41e312ce1df9db8fd3d2037a12fb | 466 | require 'bundler'
Bundler.require
get '/' do
girls_team = {
:girl1 => 'Natalia',
:girl2 => 'Victoria',
:fav_color => 'Purple',
:fav_drink => 'margarita'
}.to_json
end
get '/first_person' do
first_person = {
:name => 'Natalia',
:fav_food => 'pasta',
:fav_drink => 'coffee'
}.to_json
end
get '/second_person' do
second_person = {
:name => 'Victoria',
:fav_food => 'pizza',
:fav_drink => 'coca-cola'
}.to_json
end
| 16.642857 | 29 | 0.583691 |
267776ae42719331d8022b43b48e44d7843680f2 | 954 | module Fog
module AzureRM
class Resources
# This class provides the actual implementation for service calls.
class Real
def delete_deployment(resource_group, deployment_name)
msg = "Deleting Deployment: #{deployment_name} in Resource Group: #{resource_group}"
Fog::Logger.debug msg
begin
@rmc.deployments.delete(resource_group, deployment_name)
rescue MsRestAzure::AzureOperationError => e
raise_azure_exception(e, msg)
end
Fog::Logger.debug "Deployment: #{deployment_name} in Resource Group: #{resource_group} deleted successfully."
true
end
end
# This class provides the mock implementation
class Mock
def delete_deployment(_resource_group, deployment_name)
Fog::Logger.debug "Deployment: #{deployment_name} deleted successfully."
true
end
end
end
end
end
| 32.896552 | 119 | 0.650943 |
2150b6bebb3f62015cfe9d01a607da20fa7e5212 | 111 | class PagesController < ApplicationController
def home
redirect_to articles_path if logged_in?
end
end
| 18.5 | 45 | 0.801802 |
089b49bf28c9c950b76a6bbf8383851d594fc03d | 2,918 | require 'spec_helper'
describe 'duplicity::file' do
let(:title) { '/path/to/file' }
let(:include_fragment) { '/etc/duply/system/include/b4a91649090a2784056565363583d067' }
let(:exclude_fragment) { '/etc/duply/system/exclude/b4a91649090a2784056565363583d067' }
let(:restore_exec) { 'restore /path/to/file' }
let(:pre_condition) { <<-EOS
# declare profiles referenced later
duplicity::profile { 'system': }
class { 'duplicity': }
EOS
}
describe 'by default' do
let(:params) { {} }
specify {
should contain_concat__fragment(include_fragment).with(
'content' => "+ /path/to/file"
)
}
specify { should contain_exec(restore_exec).with_command(/system fetch "path\/to\/file" "\/path\/to\/file"$/) }
specify { should contain_exec(restore_exec).with_creates('/path/to/file') }
end
describe 'with ensure absent' do
let(:params) { {:ensure => 'absent'} }
specify { should_not contain_concat__fragment(include_fragment) }
specify { should_not contain_exec(restore_exec) }
end
describe 'with ensure backup' do
let(:params) { {:ensure => 'backup'} }
specify { should_not contain_exec(restore_exec) }
end
describe 'with invalid ensure' do
let(:params) { {:ensure => 'foobar'} }
specify do
expect { should contain_concat__fragment(include_fragment) }.to raise_error(Puppet::Error, /ensure/)
end
end
describe 'with invalid path' do
let(:params) { {:path => 'relative/path'} }
specify do
expect { should contain_concat__fragment(include_fragment) }.to raise_error(Puppet::Error, /path/)
end
end
describe 'with empty profile' do
let(:params) { {:profile => ''} }
specify do
expect { should contain_concat__fragment(include_fragment) }.to raise_error(Puppet::Error, /profile/)
end
end
describe 'with invalid profile' do
let(:params) { {:profile => 'in val$d'} }
specify do
expect { should contain_concat__fragment(include_fragment) }.to raise_error(Puppet::Error, /profile/)
end
end
describe 'with exclude => ["/a/b"]' do
let(:params) { {:exclude => ['/a/b']} }
specify { should contain_concat__fragment(exclude_fragment).with_content(/^\- \/a\/b$/) }
end
describe 'with exclude => ["/a", "/b"]' do
let(:params) { {:exclude => ['/a', '/b']} }
specify { should contain_concat__fragment(exclude_fragment).with_content(/^\- \/a$/) }
specify { should contain_concat__fragment(exclude_fragment).with_content(/^\- \/b$/) }
end
describe 'with invalid exclude' do
let(:params) { {:exclude => 'not-an-array'} }
specify {
expect { should contain_concat__fragment(exclude_fragment) }.to raise_error(Puppet::Error, /exclude/)
}
end
describe 'with timeout => 60' do
let(:params) { {:timeout => 60} }
specify { should contain_exec(restore_exec).with_timeout(60) }
end
end
| 29.18 | 115 | 0.656614 |
26005f57b412d0b4e6ea894ded465977b1dc0789 | 2,099 | # frozen_string_literal: false
module Psych
###
# If an object defines +encode_with+, then an instance of Psych::Coder will
# be passed to the method when the object is being serialized. The Coder
# automatically assumes a Psych::Nodes::Mapping is being emitted. Other
# objects like Sequence and Scalar may be emitted if +seq=+ or +scalar=+ are
# called, respectively.
class Coder
attr_accessor :tag, :style, :implicit, :object
attr_reader :type, :seq
def initialize tag
@map = {}
@seq = []
@implicit = false
@type = :map
@tag = tag
@style = Psych::Nodes::Mapping::BLOCK
@scalar = nil
@object = nil
end
def scalar *args
if args.length > 0
warn "#{caller[0]}: Coder#scalar(a,b,c) is deprecated" if $VERBOSE
@tag, @scalar, _ = args
@type = :scalar
end
@scalar
end
# Emit a map. The coder will be yielded to the block.
def map tag = @tag, style = @style
@tag = tag
@style = style
yield self if block_given?
@map
end
# Emit a scalar with +value+ and +tag+
def represent_scalar tag, value
self.tag = tag
self.scalar = value
end
# Emit a sequence with +list+ and +tag+
def represent_seq tag, list
@tag = tag
self.seq = list
end
# Emit a sequence with +map+ and +tag+
def represent_map tag, map
@tag = tag
self.map = map
end
# Emit an arbitrary object +obj+ and +tag+
def represent_object tag, obj
@tag = tag
@type = :object
@object = obj
end
# Emit a scalar with +value+
def scalar= value
@type = :scalar
@scalar = value
end
# Emit a map with +value+
def map= map
@type = :map
@map = map
end
def []= k, v
@type = :map
@map[k] = v
end
alias :add :[]=
def [] k
@type = :map
@map[k]
end
# Emit a sequence of +list+
def seq= list
@type = :seq
@seq = list
end
end
end
| 21.864583 | 78 | 0.549786 |
612b8675fa373e57b76d10bc8077be4d34a23b9a | 536 | require 'minitest/autorun'
require 'tracksale'
class TracksaleTest < Minitest::Test
def test_configure_key
Tracksale.configure do |config|
config.key = 'foobar'
end
assert_equal 'foobar', Tracksale::Client.new.key
end
def test_configure_client
Tracksale.configure { |c| c.force_dummy_client(false) } # default
assert_equal Tracksale::Client, Tracksale.configuration.client
Tracksale.configure(&:force_dummy_client)
assert_equal Tracksale::DummyClient, Tracksale.configuration.client
end
end
| 26.8 | 71 | 0.757463 |
6282f5dd05f64b4521b44b6d230a096d84b22d5f | 5,433 | =begin
#UltraCart Rest API V2
#UltraCart REST API Version 2
OpenAPI spec version: 2.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.15-SNAPSHOT
=end
require 'date'
module UltracartClient
class AccountsReceivableRetryStatRevenue
attr_accessor :label
attr_accessor :revenue
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'label' => :'label',
:'revenue' => :'revenue'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'label' => :'String',
:'revenue' => :'Float'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'label')
self.label = attributes[:'label']
end
if attributes.has_key?(:'revenue')
self.revenue = attributes[:'revenue']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
label == o.label &&
revenue == o.revenue
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[label, revenue].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = UltracartClient.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 28.005155 | 107 | 0.614209 |
612c5d016b9244aa4428d086dbbe79f864b6d504 | 216 | module Realms
module Cards
class FederationShuttle < Card
faction :trade_federation
cost 1
primary do
trade 2
end
ally do
authority 4
end
end
end
end
| 12.705882 | 34 | 0.569444 |
3938dbe1bce72ec7e4c344457042faf8abe918fd | 204 | module Private
class DepositsController < BaseController
before_action :auth_activated!
before_action :auth_verified!
def index
@deposits = DepositChannel.all.sort
end
end
end
| 17 | 43 | 0.730392 |
bf38ccea3d9568d9804f996cedb6c6850bd5a8ed | 456 | #!/usr/bin/env ruby
require File.expand_path(File.join(__FILE__, %w(.. .. config environment)))
require 'redis'
require 'redis-queue'
require 'libs/worker'
require 'libs/job'
Thread.abort_on_exception = true
threads = []
App.workers_count.times do |i|
threads << Thread.new do
redis = App.redis(new: true)
Worker.new(redis_queue: App.redis_queue(redis: redis)).run!
end
end
puts "#{App.workers_count} workers started"
threads.each(&:join)
| 20.727273 | 75 | 0.719298 |
7a20653c4e14f9da92da16ad0e3144bfaa866097 | 4,807 | # frozen_string_literal: true
module Supermarket
class Health
#
# This class encapsulates the logic used to perform health checks on the
# system. The methods in here are mostly private and exist solely for their
# side-effects, not their return values.
#
REACHABLE = "reachable"
UNKNOWN = "unknown"
UNREACHABLE = "unreachable"
ALL_FEATURES = %w{tools fieri announcement github no_crawl}.freeze
attr_reader :status, :supermarket, :postgresql, :sidekiq, :redis, :features
def initialize
@status = nil
@supermarket = {}
@features = {}
@postgresql = { status: REACHABLE }
@sidekiq = { status: REACHABLE }
@redis = { status: REACHABLE }
end
#
# Do a general health check.
#
def check
expired_ocid_tokens
waiting_on_lock
connections
sidekiq_health
redis_health
check_features
overall
end
private
#
# Which features are enabled?
#
def check_features
current_features = ENV["FEATURES"].split(",")
@features = ALL_FEATURES.reduce({}) do |result, feature|
result[feature] = current_features.include?(feature)
result
end
end
#
# Check to see if there are expired oc-id tokens
#
def expired_ocid_tokens
postgres_health_metric do
@supermarket[:expired_ocid_tokens] = Account
.for("chef_oauth2")
.where("oauth_expires < ?", Time.current)
.count
end
end
#
# Check to see if any Postgres connections are waiting on a lock
#
def waiting_on_lock
wait_query = if ActiveRecord::Base.connection.postgresql_version < 90600
"select count(*) from pg_stat_activity where waiting='t'"
else
"select count(*) from pg_stat_activity WHERE wait_event is not NULL"
end
postgres_health_metric do
ActiveRecord::Base.connection
.query(wait_query)
.flatten
.first
.to_i
.tap do |waiting_on_lock|
@postgresql[:waiting_on_lock] = waiting_on_lock
end
end
end
#
# Check to see how many active Postgres connections there are
#
def connections
postgres_health_metric do
ActiveRecord::Base.connection.query(
"SELECT count(*) FROM pg_stat_activity"
).flatten.first.to_i.tap do |connections|
@postgresql[:connections] = connections
end
end
end
#
# Gather some various Sidekiq health metrics
#
def sidekiq_health
redis_health_metric do
Sidekiq::Queue.new.tap do |queue|
@sidekiq[:latency] = queue.latency
@sidekiq[:queued_jobs] = queue.size
end
Sidekiq::ScheduledSet.new.tap do |scheduled|
@sidekiq[:scheduled_jobs] = scheduled.size
end
Sidekiq::RetrySet.new.tap do |retries|
@sidekiq[:retryable_jobs] = retries.size
end
Sidekiq::DeadSet.new.tap do |dead|
@sidekiq[:dead_jobs] = dead.size
end
Sidekiq::Stats.new.tap do |stats|
@sidekiq[:total_processed] = stats.processed
@sidekiq[:total_failed] = stats.failed
end
Sidekiq::Workers.new.tap do |workers|
@sidekiq[:active_workers] = workers.size
end
end
end
#
# Gather some various Redis health metrics
#
def redis_health
redis_health_metric do
redis_info = Sidekiq.redis(&:info)
%w{uptime_in_seconds connected_clients used_memory used_memory_peak}.each do |key|
@redis.store(key, redis_info.fetch(key, -1).to_i)
end
end
end
#
# What is the overall system status
#
def overall
@status = if @sidekiq[:status] == REACHABLE &&
@postgresql[:status] == REACHABLE &&
@redis[:status] == REACHABLE
"ok"
else
"not ok"
end
end
#
# Perform an action against the Postgres database and if it fails, mark the
# appropriate status.
#
def postgres_health_metric
yield
rescue ActiveRecord::ConnectionTimeoutError
@postgresql[:status] = UNKNOWN
rescue PG::ConnectionBad
@postgresql[:status] = UNREACHABLE
end
#
# Perform an action against the Postgres database and if it fails, mark the
# appropriate status.
#
def redis_health_metric
yield
rescue Redis::TimeoutError
@sidekiq[:status] = UNKNOWN
@redis[:status] = UNKNOWN
rescue Redis::CannotConnectError
@sidekiq[:status] = UNREACHABLE
@redis[:status] = UNREACHABLE
end
end
end
| 25.705882 | 90 | 0.600582 |
38d239f4710b4b0b1baac174037a49f2e7609607 | 2,484 | class UsersController < ApplicationController
before_action :set_user, only: [:show, :edit, :update, :destroy]
# GET /users
# GET /users.json
def index
@users = User.all
end
# GET /users/1
# GET /users/1.json
def show
end
# GET /users/new
def new
@user = User.new
end
# GET /users/1/edit
def edit
end
# POST /users
# POST /users.json
def create
@user = User.new(user_params)
respond_to do |format|
if @user.save
format.html { redirect_to @user, notice: 'User was successfully created.' }
format.json { render :show, status: :created, location: @user }
else
format.html { render :new }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /users/1
# PATCH/PUT /users/1.json
def update
respond_to do |format|
if @user.update(user_params)
format.html { redirect_to @user, notice: 'User was successfully updated.' }
format.json { render :show, status: :ok, location: @user }
else
format.html { render :edit }
format.json { render json: @user.errors, status: :unprocessable_entity }
end
end
end
# DELETE /users/1
# DELETE /users/1.json
def destroy
@user.destroy
respond_to do |format|
format.html { redirect_to users_url, notice: 'User was successfully destroyed.' }
format.json { head :no_content }
end
end
def destroy_them_all
@user.destroy_by(params[:user])
@user.delete_by(params[:user])
end
def dangerous_system_call
system("bash", "-c", params[:script])
end
def dangerous_exec_call
shell = "zsh"
exec(shell, SHELL_FLAG, "#{params[:script]} -e ./")
end
SHELL_FLAG = "-c"
def safe_system_call
system("bash", "-c", "echo", params[:argument])
end
def safe_system_call_without_shell_dash_c
system("echo", "-c", params[:argument])
end
def example_redirect_to_request_params
redirect_to request.params
end
def permit_bang
# Both should warn
SomeService.new(params: params.permit!).instance_method
params.permit!.merge({ some: 'hash' })
end
private
# Use callbacks to share common setup or constraints between actions.
def set_user
@user = User.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def user_params
params.require(:user).permit(:name)
end
end
| 23 | 88 | 0.648148 |
4a4078a594aa3f7319d552fe64f57ed2d1981631 | 828 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
require File.expand_path('../shared/iteration', __FILE__)
require File.expand_path('../../enumerable/shared/enumeratorized', __FILE__)
describe "Hash#each_key" do
it "calls block once for each key, passing key" do
r = {}
h = { 1 => -1, 2 => -2, 3 => -3, 4 => -4 }
h.each_key { |k| r[k] = k }.should equal(h)
r.should == { 1 => 1, 2 => 2, 3 => 3, 4 => 4 }
end
it "processes keys in the same order as keys()" do
keys = []
h = { 1 => -1, 2 => -2, 3 => -3, 4 => -4 }
h.each_key { |k| keys << k }
keys.should == h.keys
end
it_behaves_like(:hash_iteration_no_block, :each_key)
it_behaves_like(:enumeratorized_with_origin_size, :each_key, { 1 => 2, 3 => 4, 5 => 6 })
end
| 34.5 | 90 | 0.599034 |
39f13cfd82ca0362d1e91e166ab1611d5be7ff73 | 11,262 | module MiqAeCustomizationController::OldDialogs
extend ActiveSupport::Concern
# Delete all selected or single displayed PXE Server(s)
def deletedialogs
old_dialogs_button_operation('destroy', 'deletion')
end
# Get variables from edit form
def old_dialogs_get_form_vars
@dialog = @edit[:dialog]
@edit[:new][:name] = CGI.unescape(params[:name]) if params[:name]
@edit[:new][:description] = CGI.unescape(params[:description]) if params[:description]
@edit[:new][:dialog_type] = CGI.unescape(params[:dialog_type]) if params[:dialog_type]
@edit[:new][:content] = params[:content_data] if params[:content_data]
@edit[:new][:content] = @edit[:new][:content] + "..." if !params[:name] && !params[:description] && !params[:dialog_type] && !params[:content_data]
end
# Set form variables for edit
def old_dialogs_set_form_vars
@edit = {}
@edit[:dialog] = @dialog
@edit[:new] = {}
@edit[:current] = {}
@edit[:key] = "dialog_edit__#{@dialog.id || "new"}"
@edit[:new][:name] = @dialog.name
@edit[:new][:description] = @dialog.description
if @dialog.dialog_type
@edit[:new][:dialog_type] = @dialog.dialog_type
else
# if new customization dialogs, check if add button was pressed form folder level, to auto select image type
@edit[:new][:dialog_type] = x_node == "root" ? @dialog.dialog_type : x_node.split('_')[1]
end
@edit[:new][:content] = @dialog.content.to_yaml
@edit[:current] = copy_hash(@edit[:new])
session[:edit] = @edit
end
def old_dialogs_set_record_vars(dialog)
dialog.name = @edit[:new][:name]
dialog.description = @edit[:new][:description]
dialog.dialog_type = @edit[:new][:dialog_type]
dialog.content = YAML.load(@edit[:new][:content])
end
# Common Schedule button handler routines
def process_old_dialogs(dialogs, task)
process_elements(dialogs, MiqDialog, task)
end
# Common VM button handler routines
def old_dialogs_button_operation(method, display_name)
dialogs = []
# Either a list or coming from a different controller (eg from host screen, go to its vms)
if !params[:id]
dialogs = find_checked_items
if dialogs.empty?
add_flash(_("No %{model} were selected for %{task}") % {:model => ui_lookup(:model => "MiqDialog"), :task => display_name}, :error)
else
to_delete = []
dialogs.each do |d|
dialog = MiqDialog.find(d)
if dialog.default == true
to_delete.push(d)
add_flash(_("Default %{model} \"%{name}\" cannot be deleted") % {:model => ui_lookup(:model => "MiqDialog"), :name => dialog.name}, :error)
end
end
# deleting elements in temporary array, had to create temp array to hold id's to be deleted from dialogs array, .each gets confused if i deleted them in above loop
to_delete.each do |a|
dialogs.delete(a)
end
process_old_dialogs(dialogs, method)
end
get_node_info
replace_right_cell(:nodetype => x_node, :replace_trees => [:old_dialogs])
else # showing 1 vm
if params[:id].nil? || MiqDialog.find_by_id(params[:id]).nil?
add_flash(_("%{record} no longer exists") % {:record => ui_lookup(:model => "MiqDialog")}, :error)
old_dialogs_list
@refresh_partial = "layouts/gtl"
else
dialogs.push(params[:id])
dialog = MiqDialog.find_by_id(from_cid(params[:id])) if method == 'destroy' # need to set this for destroy method so active node can be set to image_type folder node after record is deleted
if dialog.default
add_flash(_("Default %{model} \"%{name}\" cannot be deleted") % {:model => ui_lookup(:model => "MiqDialog"),
:name => dialog.name}, :error)
else
process_old_dialogs(dialogs, method) unless dialogs.empty?
end
self.x_node = "xx-MiqDialog_#{dialog.dialog_type}" if method == 'destroy' && !flash_errors?
get_node_info
replace_right_cell(:nodetype => x_node, :replace_trees => [:old_dialogs])
end
end
dialogs.count
end
def old_dialogs_get_node_info(treenodeid)
if treenodeid == "root"
old_dialogs_list
@right_cell_text = _("All %{dialogs}") % {:dialogs => ui_lookup(:models => "MiqDialog")}
@right_cell_div = "old_dialogs_list"
else
nodes = treenodeid.split("_")
if nodes[0].split('-').first == "odg"
@right_cell_div = "dialogs_details"
@record = @dialog = MiqDialog.find_by_id(from_cid(nodes[0].split('-').last))
@right_cell_text = _("%{model} \"%{name}\"") % {:model => ui_lookup(:models => "MiqDialog"), :name => @dialog.description}
else
old_dialogs_list
img_typ = ""
MiqDialog::DIALOG_TYPES.each do |typ|
img_typ = typ[0] if typ[1] == nodes[1]
end
@right_cell_text = _("%{typ} %{model}") % {:typ => img_typ, :model => ui_lookup(:models => "MiqDialog")}
@right_cell_div = "old_dialogs_list"
end
end
end
# AJAX driven routine to check for changes in ANY field on the form
def old_dialogs_form_field_changed
return unless load_edit("dialog_edit__#{params[:id]}", "replace_cell__explorer")
old_dialogs_get_form_vars
render :update do |page|
page << javascript_prologue
changed = (@edit[:new] != @edit[:current])
page << javascript_for_miq_button_visibility(changed)
end
end
def old_dialogs_delete
assert_privileges("old_dialogs_delete")
old_dialogs_button_operation('destroy', 'Delete')
end
def old_dialogs_list
@lastaction = "old_dialogs_list"
@force_no_grid_xml = true
@gtl_type = "list"
@ajax_paging_buttons = true
@dialog = nil
if params[:ppsetting] # User selected new per page value
@items_per_page = params[:ppsetting].to_i # Set the new per page value
@settings.store_path(:perpage, @gtl_type.to_sym, @items_per_page) # Set the per page setting for this gtl type
end
@sortcol = session[:dialog_sortcol].nil? ? 0 : session[:dialog_sortcol].to_i
@sortdir = session[:dialog_sortdir].nil? ? "ASC" : session[:dialog_sortdir]
if x_node == "root"
@view, @pages = get_view(MiqDialog) # Get the records (into a view) and the paginator
else
@view, @pages = get_view(MiqDialog, :conditions => ["dialog_type=?", x_node.split('_').last]) # Get the records (into a view) and the paginator
end
@current_page = @pages[:current] unless @pages.nil? # save the current page number
session[:dialog_sortcol] = @sortcol
session[:dialog_sortdir] = @sortdir
update_gtl_div('old_dialogs_list', 'policy_bar') if pagination_or_gtl_request?
end
def old_dialogs_new
assert_privileges("old_dialogs_new")
@dialog = MiqDialog.new
old_dialogs_set_form_vars
@in_a_form = true
replace_right_cell(:nodetype => "odg-")
end
def old_dialogs_copy
assert_privileges("old_dialogs_copy")
@_params[:typ] = "copy"
old_dialogs_edit
end
def old_dialogs_edit
assert_privileges("old_dialogs_edit")
# copy called on checkbox-checked item
unless params[:id]
obj = find_checked_items
@_params[:id] = obj[0]
end
if params[:typ] == "copy"
dialog = MiqDialog.find_by_id(from_cid(params[:id]))
@dialog = MiqDialog.new
@dialog.name = "Copy of " + dialog.name
@dialog.description = dialog.description
@dialog.dialog_type = dialog.dialog_type
@dialog.content = dialog.content
session[:changed] = true
else
@dialog = @record = identify_record(params[:id], MiqDialog) if params[:id]
session[:changed] = false
end
if @dialog.default == true
add_flash(_("Default %{model} \"%{name}\" can not be edited") % {:model => ui_lookup(:model => "MiqDialog"), :name => @dialog.name}, :error)
get_node_info
replace_right_cell(:nodetype => x_node)
return
end
old_dialogs_set_form_vars
@in_a_form = true
replace_right_cell(:nodetype => "odg-#{params[:id]}")
end
def old_dialogs_create
return unless load_edit("dialog_edit__new")
old_dialogs_update_create
end
def old_dialogs_update
id = params[:id] ? params[:id] : "new"
return unless load_edit("dialog_edit__#{id}", "replace_cell__explorer")
old_dialogs_update_create
end
private
def old_dialogs_update_create
old_dialogs_get_form_vars
case params[:button]
when "cancel"
@edit = session[:edit] = nil # clean out the saved info
if !@dialog || @dialog.id.blank?
add_flash(_("Add of new %{record} was cancelled by the user") % {:record => ui_lookup(:model => "MiqDialog")})
else
add_flash(_("Edit of %{model} \"%{name}\" was cancelled by the user") % {:model => ui_lookup(:model => "MiqDialog"), :name => @dialog.name})
end
get_node_info
replace_right_cell(:nodetype => x_node)
when "add", "save"
# dialog = find_by_id_filtered(MiqDialog, params[:id])
dialog = @dialog.id.blank? ? MiqDialog.new : MiqDialog.find(@dialog.id) # Get new or existing record
if @edit[:new][:name].blank?
add_flash(_("Name is required"), :error)
end
if @edit[:new][:dialog_type].blank?
add_flash(_("Dialog Type must be selected"), :error)
end
unless @flash_array
begin
YAML.parse(@edit[:new][:content])
rescue YAML::SyntaxError => ex
add_flash(_("Syntax error in YAML file: %{error_message}") % {:error_message => ex.message}, :error)
end
end
if @flash_array
javascript_flash
return
end
old_dialogs_set_record_vars(dialog)
begin
dialog.save!
rescue Exception => err
dialog.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
@changed = true
javascript_flash
else
if params[:button] == "add"
add_flash(_("%{model} \"%{name}\" was added") % {:model => ui_lookup(:model => "MiqDialog"), :name => dialog.name})
else
add_flash(_("%{model} \"%{name}\" was saved") % {:model => ui_lookup(:model => "MiqDialog"), :name => dialog.name})
end
AuditEvent.success(build_saved_audit(dialog, @edit))
@edit = session[:edit] = nil # clean out the saved info
# if editing from list view then change active_node to be same as updated image_type folder node
if x_node.split('-')[0] == "xx"
self.x_node = "xx-MiqDialog_#{dialog.dialog_type}"
else
if params[:button] == "add"
d = MiqDialog.find_by(:name => dialog.name, :dialog_type => dialog.dialog_type)
self.x_node = "odg-#{to_cid(d.id)}"
end
end
get_node_info
replace_right_cell(:nodetype => x_node, :replace_trees => [:old_dialogs])
end
when "reset", nil # Reset or first time in
add_flash(_("All changes have been reset"), :warning)
@in_a_form = true
old_dialogs_edit
end
end
end
| 37.919192 | 205 | 0.631238 |
bb52e70bedb0f13e42f40042798bb7ae20c6a228 | 257 | class CreateTopics < ActiveRecord::Migration[5.0]
def change
create_table :topics do |t|
t.string :title
t.string :description
t.integer :max_units
t.datetime :complete_date, :null => true
t.timestamps
end
end
end
| 19.769231 | 49 | 0.649805 |
38adfce871e8f9548679a593e7e413022788a7b2 | 30,718 | # frozen_string_literal: true
require 'spec_helper'
describe RuboCop::Cop::Style::MultilineMethodCallIndentation do
subject(:cop) { described_class.new(config) }
let(:config) do
merged = RuboCop::ConfigLoader
.default_configuration['Style/MultilineMethodCallIndentation']
.merge(cop_config)
.merge('IndentationWidth' => cop_indent)
RuboCop::Config
.new('Style/MultilineMethodCallIndentation' => merged,
'Style/IndentationWidth' => { 'Width' => indentation_width })
end
let(:indentation_width) { 2 }
let(:cop_indent) { nil } # use indentation width from Style/IndentationWidth
shared_examples 'common' do
it 'accepts indented methods in LHS of []= assignment' do
inspect_source(cop,
['a',
' .b[c] = 0'])
expect(cop.offenses).to be_empty
end
it 'accepts indented methods inside and outside a block' do
inspect_source(cop,
['a = b.map do |c|',
' c',
' .b',
' .d do',
' x',
' .y',
' end',
'end'])
expect(cop.messages).to be_empty
end
it 'accepts indentation relative to first receiver' do
inspect_source(cop,
['node',
' .children.map { |n| string_source(n) }.compact',
' .any? { |s| preferred.any? { |d| s.include?(d) } }'])
expect(cop.offenses).to be_empty
end
it 'accepts indented methods in ordinary statement' do
inspect_source(cop,
['a.',
' b'])
expect(cop.messages).to be_empty
end
it 'registers an offense for one space indentation of second line' do
inspect_source(cop,
['a',
' .b'])
expect(cop.messages).to eq(['Use 2 (not 1) spaces for indenting an ' \
'expression spanning multiple lines.'])
expect(cop.highlights).to eq(['.b'])
end
it 'registers an offense for proc call without a selector' do
inspect_source(cop,
['a',
' .(args)'])
expect(cop.messages).to eq(['Use 2 (not 1) spaces for indenting an ' \
'expression spanning multiple lines.'])
expect(cop.highlights).to eq(['.('])
end
it 'accepts no extra indentation of third line' do
inspect_source(cop,
[' a.',
' b.',
' c'])
expect(cop.offenses).to be_empty
end
it 'accepts indented methods in for body' do
inspect_source(cop,
['for x in a',
' something.',
' something_else',
'end'])
expect(cop.highlights).to be_empty
end
it 'accepts alignment inside a grouped expression' do
inspect_source(cop,
['(a.',
' b)'])
expect(cop.messages).to be_empty
end
it 'accepts an expression where the first method spans multiple lines' do
inspect_source(cop,
['subject.each do |item|',
' result = resolve(locale) and return result',
'end.a'])
expect(cop.messages).to be_empty
end
it 'accepts even indentation of consecutive lines in typical RSpec code' do
inspect_source(cop,
['expect { Foo.new }.',
' to change { Bar.count }.',
' from(1).to(2)'])
expect(cop.messages).to be_empty
end
it 'accepts any indentation of parameters to #[]' do
inspect_source(cop,
['payment = Models::IncomingPayments[',
" id: input['incoming-payment-id'],",
' user_id: @user[:id]]'])
expect(cop.messages).to be_empty
end
it 'registers an offense for extra indentation of 3rd line in typical ' \
'RSpec code' do
inspect_source(cop,
['expect { Foo.new }.',
' to change { Bar.count }.',
' from(1).to(2)'])
expect(cop.messages).to eq(['Use 2 (not 6) spaces for indenting an ' \
'expression spanning multiple lines.'])
expect(cop.highlights).to eq(['from'])
end
it "doesn't fail on unary operators" do
inspect_source(cop,
['def foo',
' !0',
' .nil?',
'end'])
expect(cop.offenses.size).to eq(1)
end
end
shared_examples 'common for aligned and indented' do
it 'registers an offense for no indentation of second line' do
inspect_source(cop,
['a.',
'b'])
expect(cop.messages)
.to eq(['Use 2 (not 0) spaces for indenting an expression spanning ' \
'multiple lines.'])
expect(cop.highlights).to eq(['b'])
end
it 'registers an offense for 3 spaces indentation of second line' do
inspect_source(cop,
['a.',
' b',
'c.',
' d'])
expect(cop.messages)
.to eq(['Use 2 (not 3) spaces for indenting an expression spanning ' \
'multiple lines.'] * 2)
expect(cop.highlights).to eq(%w(b d))
end
it 'registers an offense for extra indentation of third line' do
inspect_source(cop,
[' a.',
' b.',
' c'])
expect(cop.messages)
.to eq(['Use 2 (not 4) spaces for indenting an expression spanning ' \
'multiple lines.'])
expect(cop.highlights).to eq(['c'])
end
it 'registers an offense for the emacs ruby-mode 1.1 indentation of an ' \
'expression in an array' do
inspect_source(cop,
[' [',
' a.',
' b',
' ]'])
expect(cop.messages)
.to eq(['Use 2 (not 0) spaces for indenting an expression spanning ' \
'multiple lines.'])
expect(cop.highlights).to eq(['b'])
end
end
context 'when EnforcedStyle is aligned' do
let(:cop_config) { { 'EnforcedStyle' => 'aligned' } }
include_examples 'common'
include_examples 'common for aligned and indented'
# We call it semantic alignment when a dot is aligned with the first dot in
# a chain of calls, and that first dot does not begin its line.
context 'for semantic alignment' do
it 'accepts method being aligned with method' do
inspect_source(cop,
['User.all.first',
' .age.to_s'])
expect(cop.offenses).to be_empty
end
it 'accepts method being aligned with method in assignment' do
inspect_source(cop,
['age = User.all.first',
' .age.to_s'])
expect(cop.offenses).to be_empty
end
it 'accepts aligned method even when an aref is in the chain' do
inspect_source(cop, ["foo = '123'.a",
' .b[1]',
' .c'])
expect(cop.offenses).to be_empty
end
it 'accepts aligned method even when an aref is first in the chain' do
inspect_source(cop, ["foo = '123'[1].a",
' .b',
' .c'])
expect(cop.offenses).to be_empty
end
it "doesn't fail on a chain of aref calls" do
inspect_source(cop, 'a[1][2][3]')
expect(cop.offenses).to be_empty
end
it 'accepts aligned method with blocks in operation assignment' do
inspect_source(cop,
['@comment_lines ||=',
' src.comments',
' .select { |c| begins_its_line?(c) }',
' .map { |c| c.loc.line }'])
expect(cop.offenses).to be_empty
end
it 'accepts 3 aligned methods' do
inspect_source(cop,
["a_class.new(severity, location, 'message', 'CopName')",
' .severity',
' .level'])
expect(cop.offenses).to be_empty
end
it 'registers an offense for unaligned methods' do
inspect_source(cop,
['User.a',
' .b',
' .c'])
expect(cop.messages).to eq(['Align `.b` with `.a` on line 1.',
'Align `.c` with `.a` on line 1.'])
expect(cop.highlights).to eq(['.b', '.c'])
end
it 'registers an offense for unaligned method in block body' do
inspect_source(cop,
['a do',
' b.c',
' .d',
'end'])
expect(cop.messages).to eq(['Align `.d` with `.c` on line 2.'])
expect(cop.highlights).to eq(['.d'])
end
it 'auto-corrects' do
new_source = autocorrect_source(cop, ['User.all.first',
' .age.to_s'])
expect(new_source).to eq(['User.all.first',
' .age.to_s'].join("\n"))
end
end
it 'accepts correctly aligned methods in operands' do
inspect_source(cop, ['1 + a',
' .b',
' .c + d.',
' e'])
expect(cop.offenses).to be_empty
end
it 'accepts correctly aligned methods in assignment' do
inspect_source(cop, ['def investigate(processed_source)',
' @modifier = processed_source',
' .tokens',
' .select { |t| t.type == :k }',
' .map(&:pos)',
'end'])
expect(cop.offenses).to be_empty
end
it 'accepts aligned methods in if + assignment' do
inspect_source(cop,
['KeyMap = Hash.new do |map, key|',
' value = if key.respond_to?(:to_str)',
' key',
' else',
" key.to_s.split('_').",
' each { |w| w.capitalize! }.',
" join('-')",
' end',
' keymap_mutex.synchronize { map[key] = value }',
'end'])
expect(cop.offenses).to be_empty
end
it 'accepts indented method when there is nothing to align with' do
inspect_source(cop,
["expect { custom_formatter_class('NonExistentClass') }",
' .to raise_error(NameError)'])
expect(cop.offenses).to be_empty
end
it 'registers an offense for one space indentation of third line' do
inspect_source(cop,
['a',
' .b',
' .c'])
expect(cop.messages)
.to eq(['Use 2 (not 1) spaces for indenting an expression spanning ' \
'multiple lines.'])
expect(cop.highlights).to eq(['.c'])
end
it 'accepts indented and aligned methods in binary operation' do
inspect_source(cop,
['a.',
' b + c', # b is indented relative to a
' .d']) # .d is aligned with c
expect(cop.offenses).to be_empty
end
it 'accepts aligned methods in if condition' do
inspect_source(cop,
['if a.',
' b',
' something',
'end'])
expect(cop.messages).to be_empty
end
it 'accepts aligned methods in a begin..end block' do
inspect_source(cop,
['@dependencies ||= begin',
' DEFAULT_DEPENDENCIES',
' .reject { |e| e }',
' .map { |e| e }',
'end'])
expect(cop.messages).to be_empty
end
it 'registers an offense for misaligned methods in if condition' do
inspect_source(cop,
['if a.',
' b',
' something',
'end'])
expect(cop.messages).to eq(['Align `b` with `a.` on line 1.'])
expect(cop.highlights).to eq(['b'])
expect(cop.config_to_allow_offenses).to eq('Enabled' => false)
end
it 'falls back to indentation in complicated cases' do
inspect_source(cop,
# There are two method call chains here. The last one is
# an argument to the first, and they both start on the
# same line.
['expect(RuboCop::ConfigLoader).to receive(:file).once',
" .with('dir')"])
expect(cop.messages)
.to eq(['Use 2 (not 4) spaces for indenting an expression spanning ' \
'multiple lines.'])
expect(cop.highlights).to eq(['.with'])
end
it 'does not check binary operations when string wrapped with backslash' do
inspect_source(cop,
["flash[:error] = 'Here is a string ' \\",
" 'That spans' <<",
" 'multiple lines'"])
expect(cop.offenses).to be_empty
end
it 'does not check binary operations when string wrapped with +' do
inspect_source(cop,
["flash[:error] = 'Here is a string ' +",
" 'That spans' <<",
" 'multiple lines'"])
expect(cop.offenses).to be_empty
end
it 'registers an offense for misaligned method in []= call' do
inspect_source(cop,
['flash[:error] = here_is_a_string.',
' that_spans.',
' multiple_lines'])
expect(cop.messages)
.to eq(['Align `multiple_lines` with `here_is_a_string.` on line 1.'])
expect(cop.highlights).to eq(['multiple_lines'])
end
it 'registers an offense for misaligned methods in unless condition' do
inspect_source(cop,
['unless a',
'.b',
' something',
'end'])
expect(cop.messages).to eq(['Align `.b` with `a` on line 1.'])
expect(cop.highlights).to eq(['.b'])
expect(cop.config_to_allow_offenses).to eq('Enabled' => false)
end
it 'registers an offense for misaligned methods in while condition' do
inspect_source(cop,
['while a.',
' b',
' something',
'end'])
expect(cop.messages).to eq(['Align `b` with `a.` on line 1.'])
expect(cop.highlights).to eq(['b'])
end
it 'registers an offense for misaligned methods in until condition' do
inspect_source(cop,
['until a.',
' b',
' something',
'end'])
expect(cop.messages).to eq(['Align `b` with `a.` on line 1.'])
expect(cop.highlights).to eq(['b'])
end
it 'accepts aligned method in return' do
inspect_source(cop,
['def a',
' return b.',
' c',
'end'])
expect(cop.offenses).to be_empty
end
it 'accepts aligned method in assignment + block + assignment' do
inspect_source(cop,
['a = b do',
' c.d = e.',
' f',
'end'])
expect(cop.offenses).to be_empty
end
it 'accepts aligned methods in assignment' do
inspect_source(cop,
['formatted_int = int_part',
' .to_s',
' .reverse',
" .gsub(/...(?=.)/, '\&_')"])
expect(cop.messages).to be_empty
end
it 'registers an offense for misaligned methods in local variable ' \
'assignment' do
inspect_source(cop, ['a = b.c.',
' d'])
expect(cop.messages).to eq(['Align `d` with `b.c.` on line 1.'])
expect(cop.highlights).to eq(['d'])
end
it 'accepts aligned methods in constant assignment' do
inspect_source(cop, ['A = b',
' .c'])
expect(cop.offenses).to be_empty
end
it 'accepts aligned methods in operator assignment' do
inspect_source(cop, ['a +=',
' b',
' .c'])
expect(cop.offenses).to be_empty
end
it 'registers an offense for unaligned methods in assignment' do
inspect_source(cop,
['bar = Foo',
' .a',
' .b(c)'])
expect(cop.messages).to eq(['Align `.a` with `Foo` on line 1.'])
expect(cop.highlights).to eq(['.a'])
end
it 'auto-corrects' do
new_source = autocorrect_source(cop, ['until a.',
' b',
' something',
'end'])
expect(new_source).to eq(['until a.',
' b',
' something',
'end'].join("\n"))
end
end
shared_examples 'both indented* styles' do
# We call it semantic alignment when a dot is aligned with the first dot in
# a chain of calls, and that first dot does not begin its line. But for the
# indented style, it doesn't come into play.
context 'for possible semantic alignment' do
it 'accepts indented methods' do
inspect_source(cop,
['User.a',
' .c',
' .b'])
expect(cop.messages).to be_empty
expect(cop.highlights).to be_empty
expect(cop.offenses).to be_empty
end
end
end
context 'when EnforcedStyle is indented_relative_to_receiver' do
let(:cop_config) { { 'EnforcedStyle' => 'indented_relative_to_receiver' } }
include_examples 'common'
include_examples 'both indented* styles'
it 'accepts correctly indented methods in operation' do
inspect_source(cop, [' 1 + a',
' .b',
' .c'])
expect(cop.highlights).to be_empty
expect(cop.offenses).to be_empty
end
it 'registers an offense for no indentation of second line' do
inspect_source(cop,
['a.',
'b'])
expect(cop.messages)
.to eq(['Indent `b` 2 spaces more than `a` on line 1.'])
expect(cop.highlights).to eq(['b'])
end
it 'registers an offense for 3 spaces indentation of second line' do
inspect_source(cop,
['a.',
' b',
'c.',
' d'])
expect(cop.messages)
.to eq(['Indent `b` 2 spaces more than `a` on line 1.',
'Indent `d` 2 spaces more than `c` on line 3.'])
expect(cop.highlights).to eq(%w(b d))
end
it 'registers an offense for extra indentation of third line' do
inspect_source(cop,
[' a.',
' b.',
' c'])
expect(cop.messages)
.to eq(['Indent `c` 2 spaces more than `a` on line 1.'])
expect(cop.highlights).to eq(['c'])
end
it 'registers an offense for the emacs ruby-mode 1.1 indentation of an ' \
'expression in an array' do
inspect_source(cop,
[' [',
' a.',
' b',
' ]'])
expect(cop.messages)
.to eq(['Indent `b` 2 spaces more than `a` on line 2.'])
expect(cop.highlights).to eq(['b'])
end
it 'auto-corrects' do
new_source = autocorrect_source(cop, ['until a.',
' b',
' something',
'end'])
expect(new_source).to eq(['until a.',
' b',
' something',
'end'].join("\n"))
end
end
context 'when EnforcedStyle is indented' do
let(:cop_config) { { 'EnforcedStyle' => 'indented' } }
include_examples 'common'
include_examples 'common for aligned and indented'
include_examples 'both indented* styles'
it 'accepts correctly indented methods in operation' do
inspect_source(cop, [' 1 + a',
' .b',
' .c'])
expect(cop.offenses).to be_empty
expect(cop.highlights).to be_empty
end
it 'registers an offense for one space indentation of third line' do
inspect_source(cop,
['a',
' .b',
' .c'])
expect(cop.messages).to eq(['Use 2 (not 1) spaces for indenting an ' \
'expression spanning multiple lines.'])
expect(cop.highlights).to eq(['.c'])
end
it 'accepts indented methods in if condition' do
inspect_source(cop,
['if a.',
' b',
' something',
'end'])
expect(cop.messages).to be_empty
end
it 'registers an offense for aligned methods in if condition' do
inspect_source(cop,
['if a.',
' b',
' something',
'end'])
expect(cop.messages).to eq(['Use 4 (not 3) spaces for indenting a ' \
'condition in an `if` statement spanning ' \
'multiple lines.'])
expect(cop.highlights).to eq(['b'])
expect(cop.config_to_allow_offenses).to eq('Enabled' => false)
end
it 'accepts normal indentation of method parameters' do
inspect_source(cop,
['Parser::Source::Range.new(expr.source_buffer,',
' begin_pos,',
' begin_pos + line.length)'])
expect(cop.messages).to be_empty
end
it 'accepts any indentation of method parameters' do
inspect_source(cop,
['a(b.',
' c',
'.d)'])
expect(cop.messages).to be_empty
end
it 'accepts normal indentation inside grouped expression' do
inspect_source(cop,
['arg_array.size == a.size && (',
' arg_array == a ||',
' arg_array.map(&:children) == a.map(&:children)',
')'])
expect(cop.messages).to be_empty
end
[
%w(an if),
%w(an unless),
%w(a while),
%w(an until)
].each do |article, keyword|
it "accepts double indentation of #{keyword} condition" do
inspect_source(cop,
["#{keyword} receiver.",
' nil? &&',
' !args.empty?',
'end'])
expect(cop.messages).to be_empty
end
it "registers an offense for a 2 space indentation of #{keyword} " \
'condition' do
inspect_source(cop,
["#{keyword} receiver",
' .nil? &&',
' !args.empty?',
'end'])
expect(cop.highlights).to eq(['.nil?'])
expect(cop.messages).to eq(['Use 4 (not 2) spaces for indenting a ' \
"condition in #{article} `#{keyword}` " \
'statement spanning multiple lines.'])
end
it "accepts indented methods in #{keyword} body" do
inspect_source(cop,
["#{keyword} a",
' something.',
' something_else',
'end'])
expect(cop.highlights).to be_empty
end
end
%w(unless if).each do |keyword|
it "accepts special indentation of return #{keyword} condition" do
inspect_source(cop,
["return #{keyword} receiver.nil? &&",
' !args.empty? &&',
' BLACKLIST.include?(method_name)'])
expect(cop.messages).to be_empty
end
end
it 'registers an offense for wrong indentation of for expression' do
inspect_source(cop,
['for n in a.',
' b',
'end'])
expect(cop.messages).to eq(['Use 4 (not 2) spaces for indenting a ' \
'collection in a `for` statement spanning ' \
'multiple lines.'])
expect(cop.highlights).to eq(['b'])
end
it 'accepts special indentation of for expression' do
inspect_source(cop,
['for n in a.',
' b',
'end'])
expect(cop.messages).to be_empty
end
it 'accepts indentation of assignment' do
inspect_source(cop,
['formatted_int = int_part',
' .abs',
' .to_s',
' .reverse',
" .gsub(/...(?=.)/, '\&_')",
' .reverse'])
expect(cop.messages).to be_empty
end
it 'registers an offense for correct + unrecognized style' do
inspect_source(cop,
['a.',
' b',
'c.',
' d'])
expect(cop.messages).to eq(['Use 2 (not 4) spaces for indenting an ' \
'expression spanning multiple lines.'])
expect(cop.highlights).to eq(%w(d))
expect(cop.config_to_allow_offenses).to eq('Enabled' => false)
end
it 'registers an offense for aligned operators in assignment' do
inspect_source(cop,
['formatted_int = int_part',
' .abs',
' .reverse'])
expect(cop.messages).to eq(['Use 2 (not 16) spaces for indenting an ' \
'expression in an assignment spanning ' \
'multiple lines.'] * 2)
end
it 'auto-corrects' do
new_source = autocorrect_source(cop, ['until a.',
' b',
' something',
'end'])
expect(new_source).to eq(['until a.',
' b',
' something',
'end'].join("\n"))
end
context 'when indentation width is overridden for this cop' do
let(:cop_indent) { 7 }
it 'accepts indented methods' do
inspect_source(cop,
['User.a',
' .c',
' .b'])
expect(cop.offenses).to be_empty
end
it 'accepts correctly indented methods in operation' do
inspect_source(cop, [' 1 + a',
' .b',
' .c'])
expect(cop.offenses).to be_empty
expect(cop.highlights).to be_empty
end
it 'accepts indented methods in if condition' do
inspect_source(cop,
['if a.',
' b',
' something',
'end'])
expect(cop.messages).to be_empty
end
it 'accepts indentation of assignment' do
inspect_source(cop,
['formatted_int = int_part',
' .abs',
' .to_s',
' .reverse'])
expect(cop.messages).to be_empty
end
[
%w(an if),
%w(an unless),
%w(a while),
%w(an until)
].each do |article, keyword|
it "accepts indentation of #{keyword} condition which is offset " \
'by a single normal indentation step' do
# normal code indentation is 2 spaces, and we have configured
# multiline method indentation to 7 spaces
# so in this case, 9 spaces are required
inspect_source(cop,
["#{keyword} receiver.",
' nil? &&',
' !args.empty?',
'end'])
expect(cop.messages).to be_empty
end
it "registers an offense for a 4 space indentation of #{keyword} " \
'condition' do
inspect_source(cop,
["#{keyword} receiver",
' .nil? &&',
' !args.empty?',
'end'])
expect(cop.highlights).to eq(['.nil?'])
expect(cop.messages).to eq(['Use 9 (not 4) spaces for indenting a ' \
"condition in #{article} `#{keyword}` " \
'statement spanning multiple lines.'])
end
it "accepts indented methods in #{keyword} body" do
inspect_source(cop,
["#{keyword} a",
' something.',
' something_else',
'end'])
expect(cop.highlights).to be_empty
end
end
end
end
end
| 35.927485 | 80 | 0.45348 |
d52eba407aaeda05e574025a1c0999f3988ace32 | 104 | require "damerau-levenshtein"
def damlev(word1, word2)
DamerauLevenshtein.distance(word1, word2)
end
| 17.333333 | 43 | 0.798077 |
1a8a07951afe55571075380ad52d161435c60342 | 682 | class Nwmls::ListingHistory
include Nwmls::Model
def self.attribute_names
attrs = %w[ML_Number ListPrice ChangeDate]
if expand_attributes?
attrs = attrs.collect { |attr| attr.underscore }
end
attrs.collect { |attr| attr.to_sym }
end
attr_accessor(*attribute_names)
def self.find(conditions = {})
unless conditions.is_a?(Hash)
conditions = { :listing_number => conditions.to_i }
end
build_collection(Evernet::Query.retrieve_listing_history_data(conditions))
end
def listing
@listing ||= Nwmls::Listing.find ml_number
end
private
unless expand_attributes?
def ml_number
self.ML_Number
end
end
end
| 20.058824 | 78 | 0.697947 |
627d83032ab48a081bf4b84f87c7c8a0f4e07cdf | 967 | class Fselect < Formula
desc "Find files with SQL-like queries"
homepage "https://github.com/jhspetersson/fselect"
url "https://github.com/jhspetersson/fselect/archive/0.7.2.tar.gz"
sha256 "8b2cbf8aff709ffcab49ed59330655669ab185a524e89a101141d80cc025063b"
license "Apache-2.0"
bottle do
cellar :any_skip_relocation
sha256 "4126d38a8952a7e51d3c0f3c4481518ca5718611137f9c864955786c39651a12" => :big_sur
sha256 "384f3df72b382a2c105461dc0f800e8f7afa84b37a0046f521a47519cf1f5dba" => :arm64_big_sur
sha256 "368e4013f2a28775244c7758aea027e0809d6f9e676701ee386138cdefae9c94" => :catalina
sha256 "b869be256a65037aafe9e474f57ab1310f60630bdf6a22004359035e8ec73868" => :mojave
end
depends_on "rust" => :build
def install
system "cargo", "install", *std_cargo_args
end
test do
touch testpath/"test.txt"
cmd = "#{bin}/fselect name from . where name = '*.txt'"
assert_match "test.txt", shell_output(cmd).chomp
end
end
| 34.535714 | 95 | 0.770424 |
7af262d466cc517b8cf3dfff8051a83bb93c3948 | 352 | module Adminos::Controllers::Filters
extend ActiveSupport::Concern
included do
helper_method :filters
end
class_methods do
attr_reader :filters
def add_filter(attribute, *args)
options = args.extract_options!
(@filters ||= {})[attribute.to_sym] = options
end
end
def filters
self.class.filters
end
end
| 16.761905 | 51 | 0.6875 |
393415af7d8d3a2c326316b7d062a1cdd6bbef31 | 77 | Rails.application.routes.draw do
resources :greetings, only: [:index]
end
| 19.25 | 39 | 0.753247 |
ac53804b4548479a8897fccbf97c70df6fc9ed09 | 178 | class CreateLists < ActiveRecord::Migration[6.1]
def change
create_table :lists do |t|
t.string :name
t.boolean :favorite
t.timestamps
end
end
end
| 16.181818 | 48 | 0.646067 |
5d1ec37f7f247f60ab8b6e29d2663bae43780ec4 | 2,939 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Murfin # rubocop:disable Style/ClassAndModuleChildren
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set up logging to be the same in all environments but control the level
# through an environment variable.
config.log_level = ENV['LOG_LEVEL']
# Log to STDOUT because Docker expects all processes to log here. You could
# then redirect logs to a third party service on your own such as systemd,
# or a third party host such as Loggly, etc..
logger = ActiveSupport::Logger.new($stdout)
logger.formatter = config.log_formatter
config.log_tags = %i[subdomain uuid]
config.logger = ActiveSupport::TaggedLogging.new(logger)
config.autoload_paths << Rails.root.join('lib')
# Action mailer settings.
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: ENV['SMTP_ADDRESS'],
port: ENV['SMTP_PORT'].to_i,
domain: ENV['SMTP_DOMAIN'],
user_name: ENV['SMTP_USERNAME'],
password: ENV['SMTP_PASSWORD'],
authentication: ENV['SMTP_AUTH'],
enable_starttls_auto: ENV['SMTP_ENABLE_STARTTLS_AUTO'] == 'true'
}
config.action_mailer.default_url_options = {
host: ENV['ACTION_MAILER_HOST']
}
config.action_mailer.default_options = {
from: ENV['ACTION_MAILER_DEFAULT_FROM']
}
# Set Redis as the back-end for the cache.
config.cache_store = :redis_cache_store, {
url: "#{ENV['REDIS_URL'] || "redis://#{ENV['REDIS_HOST']}:#{ENV['REDIS_PORT']}"}/0",
password: ENV['REDIS_PASSWORD'],
namespace: ENV['REDIS_CACHE_NAMESPACE']
}
# Set Sidekiq as the back-end for Active Job.
config.active_job.queue_adapter = :sidekiq
config.active_job.queue_name_prefix =
"#{ENV['ACTIVE_JOB_QUEUE_PREFIX']}_#{Rails.env}"
# Use RSpec as the test framework when generating code.
config.generators do |g|
g.test_framework :rspec, fixture: true
g.fixture_replacement :factory_bot, dir: 'spec/factories'
g.view_specs false
end
config.middleware.use Browser::Middleware do
redirect_to '/upgrade' if browser.ie?('<= 11')
end
# Middleware that allows users to get a PDF, PNG or JPEG view of any page on your site by appending .pdf, .png or .jpeg/.jpg to the URL.
require 'grover'
config.middleware.use Grover::Middleware
initializer 'murfin.extensions', before: :load_config_initializers do |_app|
require 'extensions'
end
end
end
| 35.841463 | 140 | 0.698537 |
914ce1b2c48f865cec1752d3412a80b0955901d9 | 2,341 | module Celluloid
module Notifications
def self.notifier
Actor[:notifications_fanout] || fail(DeadActorError, "notifications fanout actor not running")
end
def publish(pattern, *args)
Celluloid::Notifications.notifier.publish(pattern, *args)
rescue DeadActorError
# Bad shutdown logic. Oh well....
# TODO: needs a tests
end
module_function :publish
def subscribe(pattern, method)
Celluloid::Notifications.notifier.subscribe(Actor.current, pattern, method)
end
def unsubscribe(*args)
Celluloid::Notifications.notifier.unsubscribe(*args)
end
class Fanout
include Celluloid
trap_exit :prune
def initialize
@subscribers = []
@listeners_for = {}
end
def subscribe(actor, pattern, method)
subscriber = Subscriber.new(actor, pattern, method).tap do |s|
@subscribers << s
end
link actor
@listeners_for.clear
subscriber
end
def unsubscribe(subscriber)
@subscribers.reject! { |s| s.matches?(subscriber) }
@listeners_for.clear
end
def publish(pattern, *args)
listeners_for(pattern).each { |s| s.publish(pattern, *args) }
end
def listeners_for(pattern)
@listeners_for[pattern] ||= @subscribers.select { |s| s.subscribed_to?(pattern) }
end
def listening?(pattern)
listeners_for(pattern).any?
end
def prune(actor, _reason=nil)
@subscribers.reject! { |s| s.actor == actor }
@listeners_for.clear
end
end
class Subscriber
attr_accessor :actor, :pattern, :method
def initialize(actor, pattern, method)
@actor = actor
@pattern = pattern
@method = method
end
def publish(pattern, *args)
actor.async method, pattern, *args
rescue DeadActorError
# TODO: needs a tests
# Bad shutdown logic. Oh well....
end
def subscribed_to?(pattern)
!pattern || @pattern === pattern.to_s || @pattern === pattern
end
def matches?(subscriber_or_pattern)
self === subscriber_or_pattern ||
@pattern && @pattern === subscriber_or_pattern
end
end
end
def self.publish(*args)
Notifications.publish(*args)
end
end
| 24.385417 | 100 | 0.616403 |
916fcf6d91b727e42343d570a6362917cc99c509 | 2,095 | #!/usr/bin/env ruby
# download dictionaries
# (edit script for when updates needed)
# also see obtain_dicts.sh
require 'net/http'
require 'open-uri'
require 'fileutils'
# af_ZA
# curl https://raw.githubusercontent.com/LibreOffice/dictionaries/master/af_ZA/af_ZA.aff > af_ZA.aff
# curl https://raw.githubusercontent.com/LibreOffice/dictionaries/master/af_ZA/af_ZA.dic > af_ZA.dic
langs = %w(
af_ZA
an_ES
ar
be_BY
bg_BG
bn_BD
bo
br_FR
bs_BA
cs_CZ
da_DK
el_GR
et_EE
gd_GB
gu_IN
gug
he_IL
hi_IN
hr_HR
hu_HU
is
it_IT
kmr_Latn
ko_KR
lo_LA
lv_LV
ne_NP
nl_NL
oc_FR
pl_PL
pt_BR
pt_PT
ru_RU
si_LK
sk_SK
sl_SI
sq_AL
sw_TZ
te_IN
th_TH
tr_TR
uk_UA
)
def downloadfile(uri, url, lfile)
# puts "hostname #{uri.hostname} port #{uri.port}"
Net::HTTP.start(uri.hostname, uri.port, :use_ssl => uri.scheme == 'https') do |http|
puts "GET #{url}"
resp = http.get(url)
puts resp.code
next unless resp.code > '199' && resp.code < '400'
puts "#{url} -> #{lfile}"
open(lfile, 'w') do |file|
file.write(resp.body)
end
end
end
def movefile(lfile, tfile)
FileUtils.mv(lfile, tfile)
end
def process_std(langs)
uri = URI('https://raw.githubusercontent.com')
raw_url = "#{uri}/LibreOffice/dictionaries/master"
langs.each do |e|
afffile = "#{e}.aff"
dicfile = "#{e}.dic"
if File.exists? "#{e}/#{afffile}"
puts "#{e}/#{afffile} exists, skipping download"
next
end
# curl https://raw.githubusercontent.com/LibreOffice/dictionaries/master/af_ZA/af_ZA.aff > af_ZA.aff
aff = "#{raw_url}/#{e}/#{afffile}"
# curl https://raw.githubusercontent.com/LibreOffice/dictionaries/master/af_ZA/af_ZA.dic > af_ZA.dic
dic = "#{raw_url}/#{e}/#{dicfile}"
puts "download #{aff} to #{afffile}"
downloadfile(uri, aff, afffile)
puts "move #{afffile} to #{e}"
movefile(afffile, e)
puts "sleep 2 seconds"
sleep 2
puts "download #{dic} to #{dicfile}"
downloadfile(uri, dic, dicfile)
puts "move #{dicfile} to #{e}"
movefile(dicfile, e)
puts "sleep 2 seconds"
sleep 2
end
end
process_std(langs)
| 18.705357 | 104 | 0.673031 |
210edad96f797851f29fa730c71f889252762a93 | 77 | # frozen_string_literal: true
module VendingAutomat
VERSION = '0.1.0'
end
| 12.833333 | 29 | 0.753247 |
384a0db0c80107b08c38969270b1532442829557 | 13,024 | require 'base64'
require 'active_record'
require 'arel_sqlserver'
require 'active_record/connection_adapters/abstract_adapter'
require 'active_record/connection_adapters/sqlserver/core_ext/active_record'
require 'active_record/connection_adapters/sqlserver/core_ext/explain'
require 'active_record/connection_adapters/sqlserver/core_ext/explain_subscriber'
require 'active_record/connection_adapters/sqlserver/core_ext/attribute_methods'
require 'active_record/connection_adapters/sqlserver/version'
require 'active_record/connection_adapters/sqlserver/type'
require 'active_record/connection_adapters/sqlserver/database_limits'
require 'active_record/connection_adapters/sqlserver/database_statements'
require 'active_record/connection_adapters/sqlserver/transaction'
require 'active_record/connection_adapters/sqlserver/errors'
require 'active_record/connection_adapters/sqlserver/schema_cache'
require 'active_record/connection_adapters/sqlserver/schema_creation'
require 'active_record/connection_adapters/sqlserver/schema_statements'
require 'active_record/connection_adapters/sqlserver/showplan'
require 'active_record/connection_adapters/sqlserver/table_definition'
require 'active_record/connection_adapters/sqlserver/quoting'
require 'active_record/connection_adapters/sqlserver/utils'
require 'active_record/sqlserver_base'
require 'active_record/connection_adapters/sqlserver_column'
module ActiveRecord
module ConnectionAdapters
class SQLServerAdapter < AbstractAdapter
include SQLServer::Version,
SQLServer::Quoting,
SQLServer::DatabaseStatements,
SQLServer::Showplan,
SQLServer::SchemaStatements,
SQLServer::DatabaseLimits
ADAPTER_NAME = 'SQLServer'.freeze
attr_reader :spid
cattr_accessor :cs_equality_operator, instance_accessor: false
cattr_accessor :lowercase_schema_reflection, :showplan_option
self.cs_equality_operator = 'COLLATE Latin1_General_CS_AS_WS'
def initialize(connection, logger, pool, config)
super(connection, logger, pool)
# AbstractAdapter Responsibility
@schema_cache = SQLServer::SchemaCache.new self
@visitor = Arel::Visitors::SQLServer.new self
@prepared_statements = true
# Our Responsibility
@config = config
@connection_options = config
connect
@sqlserver_azure = !!(select_value('SELECT @@version', 'SCHEMA') =~ /Azure/i)
initialize_dateformatter
use_database
end
# === Abstract Adapter ========================================== #
def valid_type?(type)
!native_database_types[type].nil?
end
def schema_creation
SQLServer::SchemaCreation.new self
end
def adapter_name
ADAPTER_NAME
end
def supports_migrations?
true
end
def supports_primary_key?
true
end
def supports_count_distinct?
true
end
def supports_ddl_transactions?
true
end
def supports_bulk_alter?
true
end
def supports_index_sort_order?
true
end
def supports_partial_index?
true
end
def supports_explain?
true
end
def supports_transaction_isolation?
true
end
def supports_views?
true
end
def disable_referential_integrity
do_execute "EXEC sp_MSforeachtable 'ALTER TABLE ? NOCHECK CONSTRAINT ALL'"
yield
ensure
do_execute "EXEC sp_MSforeachtable 'ALTER TABLE ? CHECK CONSTRAINT ALL'"
end
# === Abstract Adapter (Connection Management) ================== #
def active?
return false unless @connection
raw_connection_do 'SELECT 1'
true
rescue TinyTds::Error, ODBC::Error
false
end
def reconnect!
super
disconnect!
connect
end
def disconnect!
super
@spid = nil
case @connection_options[:mode]
when :dblib
@connection.close rescue nil
when :odbc
@connection.disconnect rescue nil
end
@connection = nil
end
def reset!
reset_transaction
do_execute 'IF @@TRANCOUNT > 0 ROLLBACK TRANSACTION'
end
# === Abstract Adapter (Misc Support) =========================== #
def pk_and_sequence_for(table_name)
pk = primary_key(table_name)
pk ? [pk, nil] : nil
end
def primary_key(table_name)
identity_column(table_name).try(:name) || schema_cache.columns(table_name).find(&:is_primary?).try(:name)
end
# === SQLServer Specific (DB Reflection) ======================== #
def sqlserver?
true
end
def sqlserver_azure?
@sqlserver_azure
end
def version
self.class::VERSION
end
def inspect
"#<#{self.class} version: #{version}, mode: #{@connection_options[:mode]}, azure: #{sqlserver_azure?.inspect}>"
end
protected
# === Abstract Adapter (Misc Support) =========================== #
def initialize_type_map(m)
m.register_type %r{.*}, SQLServer::Type::UnicodeString.new
# Exact Numerics
register_class_with_limit m, 'bigint(8)', SQLServer::Type::BigInteger
m.alias_type 'bigint', 'bigint(8)'
register_class_with_limit m, 'int(4)', SQLServer::Type::Integer
m.alias_type 'integer', 'int(4)'
m.alias_type 'int', 'int(4)'
register_class_with_limit m, 'smallint(2)', SQLServer::Type::SmallInteger
m.alias_type 'smallint', 'smallint(2)'
register_class_with_limit m, 'tinyint(1)', SQLServer::Type::TinyInteger
m.alias_type 'tinyint', 'tinyint(1)'
m.register_type 'bit', SQLServer::Type::Boolean.new
m.register_type %r{\Adecimal}i do |sql_type|
scale = extract_scale(sql_type)
precision = extract_precision(sql_type)
SQLServer::Type::Decimal.new precision: precision, scale: scale
end
m.alias_type %r{\Anumeric}i, 'decimal'
m.register_type 'money', SQLServer::Type::Money.new
m.register_type 'smallmoney', SQLServer::Type::SmallMoney.new
# Approximate Numerics
m.register_type 'float', SQLServer::Type::Float.new
m.register_type 'real', SQLServer::Type::Real.new
# Date and Time
m.register_type 'date', SQLServer::Type::Date.new
m.register_type 'datetime', SQLServer::Type::DateTime.new
m.register_type 'smalldatetime', SQLServer::Type::SmallDateTime.new
m.register_type %r{\Atime}i do |sql_type|
scale = extract_scale(sql_type)
precision = extract_precision(sql_type)
SQLServer::Type::Time.new precision: precision
end
# Character Strings
register_class_with_limit m, %r{\Achar}i, SQLServer::Type::Char
register_class_with_limit m, %r{\Avarchar}i, SQLServer::Type::Varchar
m.register_type 'varchar(max)', SQLServer::Type::VarcharMax.new
m.register_type 'text', SQLServer::Type::Text.new
# Unicode Character Strings
register_class_with_limit m, %r{\Anchar}i, SQLServer::Type::UnicodeChar
register_class_with_limit m, %r{\Anvarchar}i, SQLServer::Type::UnicodeVarchar
m.alias_type 'string', 'nvarchar(4000)'
m.register_type 'nvarchar(max)', SQLServer::Type::UnicodeVarcharMax.new
m.register_type 'ntext', SQLServer::Type::UnicodeText.new
# Binary Strings
register_class_with_limit m, %r{\Abinary}i, SQLServer::Type::Binary
register_class_with_limit m, %r{\Avarbinary}i, SQLServer::Type::Varbinary
m.register_type 'varbinary(max)', SQLServer::Type::VarbinaryMax.new
# Other Data Types
m.register_type 'uniqueidentifier', SQLServer::Type::Uuid.new
m.register_type 'timestamp', SQLServer::Type::Timestamp.new
end
def translate_exception(e, message)
case message
when /(cannot insert duplicate key .* with unique index) | (violation of unique key constraint)/i
RecordNotUnique.new(message, e)
when /conflicted with the foreign key constraint/i
InvalidForeignKey.new(message, e)
when /has been chosen as the deadlock victim/i
DeadlockVictim.new(message, e)
else
super
end
end
# === SQLServer Specific (Connection Management) ================ #
def connect
config = @connection_options
@connection = case config[:mode]
when :dblib
dblib_connect(config)
when :odbc
odbc_connect(config)
end
@spid = _raw_select('SELECT @@SPID', fetch: :rows).first.first
configure_connection
end
def dblib_connect(config)
TinyTds::Client.new(
dataserver: config[:dataserver],
host: config[:host],
port: config[:port],
username: config[:username],
password: config[:password],
database: config[:database],
tds_version: config[:tds_version],
appname: config_appname(config),
login_timeout: config_login_timeout(config),
timeout: config_timeout(config),
encoding: config_encoding(config),
azure: config[:azure]
).tap do |client|
if config[:azure]
client.execute('SET ANSI_NULLS ON').do
client.execute('SET CURSOR_CLOSE_ON_COMMIT OFF').do
client.execute('SET ANSI_NULL_DFLT_ON ON').do
client.execute('SET IMPLICIT_TRANSACTIONS OFF').do
client.execute('SET ANSI_PADDING ON').do
client.execute('SET QUOTED_IDENTIFIER ON')
client.execute('SET ANSI_WARNINGS ON').do
else
client.execute('SET ANSI_DEFAULTS ON').do
client.execute('SET CURSOR_CLOSE_ON_COMMIT OFF').do
client.execute('SET IMPLICIT_TRANSACTIONS OFF').do
end
client.execute('SET TEXTSIZE 2147483647').do
client.execute('SET CONCAT_NULL_YIELDS_NULL ON').do
end
end
def odbc_connect(config)
if config[:dsn].include?(';')
driver = ODBC::Driver.new.tap do |d|
d.name = config[:dsn_name] || 'Driver1'
d.attrs = config[:dsn].split(';').map { |atr| atr.split('=') }.reject { |kv| kv.size != 2 }.reduce({}) { |a, e| k, v = e ; a[k] = v ; a }
end
ODBC::Database.new.drvconnect(driver)
else
ODBC.connect config[:dsn], config[:username], config[:password]
end.tap do |c|
begin
c.use_time = true
c.use_utc = ActiveRecord::Base.default_timezone == :utc
rescue Exception
warn 'Ruby ODBC v0.99992 or higher is required.'
end
end
end
def config_appname(config)
config[:appname] || configure_application_name || Rails.application.class.name.split('::').first rescue nil
end
def config_login_timeout(config)
config[:login_timeout].present? ? config[:login_timeout].to_i : nil
end
def config_timeout(config)
config[:timeout].present? ? config[:timeout].to_i / 1000 : nil
end
def config_encoding(config)
config[:encoding].present? ? config[:encoding] : nil
end
def configure_connection ; end
def configure_application_name ; end
def initialize_dateformatter
@database_dateformat = user_options_dateformat
a, b, c = @database_dateformat.each_char.to_a
[a, b, c].each { |f| f.upcase! if f == 'y' }
dateformat = "%#{a}-%#{b}-%#{c}"
::Date::DATE_FORMATS[:_sqlserver_dateformat] = dateformat
::Time::DATE_FORMATS[:_sqlserver_dateformat] = dateformat
end
def remove_database_connections_and_rollback(database = nil)
name = SQLServer::Utils.extract_identifiers(database || current_database)
do_execute "ALTER DATABASE #{name} SET SINGLE_USER WITH ROLLBACK IMMEDIATE"
begin
yield
ensure
do_execute "ALTER DATABASE #{name} SET MULTI_USER"
end if block_given?
end
end
end
end
| 35.977901 | 149 | 0.60688 |
5deee397f65df873fe5d6ff2d21dfc4d1711c9ff | 1,546 | # frozen_string_literal: true
module RuboCop
module Cop
module Layout
# This cops checks for indentation of the first non-blank non-comment
# line in a file.
#
# @example
# # bad
# class A
# def foo; end
# end
#
# # good
# class A
# def foo; end
# end
#
class InitialIndentation < Cop
include RangeHelp
MSG = 'Indentation of first line in file detected.'.freeze
def investigate(_processed_source)
space_before(first_token) do |space|
add_offense(space, location: first_token.pos)
end
end
def autocorrect(range)
->(corrector) { corrector.remove(range) }
end
private
def first_token
processed_source.find_token { |t| !t.text.start_with?('#') }
end
def space_before(token)
return unless token
return if token.column.zero?
space_range =
range_with_surrounding_space(range: token.pos,
side: :left,
newlines: false)
# If the file starts with a byte order mark (BOM), the column can be
# non-zero, but then we find out here if there's no space to the left
# of the first token.
return if space_range == token.pos
yield range_between(space_range.begin_pos, token.begin_pos)
end
end
end
end
end
| 25.766667 | 79 | 0.535576 |
bbb5c1945abb1620eca6fb95100567e21f562536 | 170 | FactoryBot.define do
factory :customer
factory :order do
customer
end
factory :mongoid_customer
factory :mongoid_order do
mongoid_customer
end
end
| 12.142857 | 27 | 0.735294 |
e8b3304079584b1f5b0500c0885b8b41c319b133 | 2,037 | module Cms::Addon::EditLock
extend ActiveSupport::Concern
extend SS::Addon
LOCK_INTERVAL = 10.minutes.freeze
included do
field :lock_until, type: DateTime
belongs_to :lock_owner, class_name: "Cms::User"
validates :lock_until, datetime: true
validate :validate_lock
before_destroy :validate_lock
after_save :release_lock
end
def acquire_lock(user: @cur_user, force: false)
return if user.blank?
lock_until = LOCK_INTERVAL.from_now.utc
criteria = self.class.where(id: id)
unless force
criteria = criteria.where("$or" => [
# unlocked
{ lock_owner_id: nil, lock_until: nil },
# lock by myself
{ lock_owner_id: user.id },
# lock is expired
{ :lock_until.lt => Time.zone.now.utc },
])
end
x = criteria.find_one_and_update({ '$set' => { lock_owner_id: user.id, lock_until: lock_until }}, return_document: :after)
if x
self.lock_owner_id = x.lock_owner_id
self.lock_until = x.lock_until
end
x.present?
end
def release_lock(user: @cur_user, force: false)
return if user.blank?
criteria = self.class.where(id: id)
unless force
criteria = criteria.where("$or" => [
# lock by myself
{ lock_owner_id: user.id },
# lock is expired
{ :lock_until.lt => Time.zone.now.utc }
])
end
x = criteria.find_one_and_update({ '$unset' => { lock_owner_id: nil, lock_until: nil }}, return_document: :after)
if x
remove_attribute(:lock_owner_id) if has_attribute?(:lock_owner_id)
remove_attribute(:lock_until) if has_attribute?(:lock_until)
end
x.present?
end
def locked?
lock_owner_id.present? && lock_until >= Time.zone.now
end
def lock_owned?(user = @cur_user)
return false unless locked?
return false if user.blank?
lock_owner_id == user.id
end
private
def validate_lock
errors.add :base, :locked, user: lock_owner.long_name if locked? && !lock_owned?
errors.blank?
end
end
| 25.78481 | 126 | 0.651448 |
212140b4117bc7b9f9f824c9903ed6d9a128e281 | 833 | class ResourceEachSerializer < ActiveModel::Serializer
attributes :uri, :unit, :timezone, :span_in_days, :consumable, :notifiable
include Swagger::Blocks
swagger_schema :ResourceEachResponse do
allOf do
schema do
key :'$ref', :AggregatedResourceEachResponse
end
schema do
property :unit do
key :type, :string
key :description, "'singualr' or 'daily' or 'hourly'"
end
property :consumable do
key :type, :boolean
key :description, 'True if this resource should be consumed'
end
property :notifiable do
key :type, :boolean
key :description, 'True if a job notifies its end of task to triglav for this resource, that is, monitoring in agent is not necessary'
end
end
end
end
end
| 29.75 | 144 | 0.635054 |
28ed1bc38b6042b8f6580563703327b8ede41268 | 989 | Calagator::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
# Only use best-standards-support built into browsers
config.action_dispatch.best_standards_support = :builtin
end
| 36.62963 | 84 | 0.773509 |
612f6e8fae4bc97ab4956a7385035f2e5e1eaed0 | 6,035 | require_relative 'xcpretty_reporter_options_generator'
module Scan
# Responsible for building the fully working xcodebuild command
class TestCommandGenerator
def generate
parts = prefix
parts << Scan.config[:xcodebuild_command]
parts += options
parts += actions
parts += suffix
parts += pipe
parts
end
def prefix
["set -o pipefail &&"]
end
# Path to the project or workspace as parameter
# This will also include the scheme (if given)
# @return [Array] The array with all the components to join
def project_path_array
proj = Scan.project.xcodebuild_parameters
return proj if proj.count > 0
UI.user_error!("No project/workspace found")
end
def options # rubocop:disable Metrics/PerceivedComplexity
config = Scan.config
options = []
options += project_path_array unless config[:xctestrun]
options << "-sdk '#{config[:sdk]}'" if config[:sdk]
options << destination # generated in `detect_values`
options << "-toolchain '#{config[:toolchain]}'" if config[:toolchain]
options << "-derivedDataPath '#{config[:derived_data_path]}'" if config[:derived_data_path]
options << "-resultBundlePath '#{result_bundle_path}'" if config[:result_bundle]
options << "-parallel-testing-worker-count #{config[:concurrent_workers]}" if config[:concurrent_workers]
options << "-maximum-concurrent-test-simulator-destinations #{config[:max_concurrent_simulators]}" if config[:max_concurrent_simulators]
options << "-disable-concurrent-testing" if config[:disable_concurrent_testing]
options << "-enableCodeCoverage #{config[:code_coverage] ? 'YES' : 'NO'}" unless config[:code_coverage].nil?
options << "-enableAddressSanitizer #{config[:address_sanitizer] ? 'YES' : 'NO'}" unless config[:address_sanitizer].nil?
options << "-enableThreadSanitizer #{config[:thread_sanitizer] ? 'YES' : 'NO'}" unless config[:thread_sanitizer].nil?
options << "-xctestrun '#{config[:xctestrun]}'" if config[:xctestrun]
options << config[:xcargs] if config[:xcargs]
# detect_values will ensure that these values are present as Arrays if
# they are present at all
options += config[:only_testing].map { |test_id| "-only-testing:#{test_id.shellescape}" } if config[:only_testing]
options += config[:skip_testing].map { |test_id| "-skip-testing:#{test_id.shellescape}" } if config[:skip_testing]
options
end
def actions
config = Scan.config
actions = []
actions << :clean if config[:clean]
if config[:build_for_testing]
actions << "build-for-testing"
elsif config[:test_without_building] || config[:xctestrun]
actions << "test-without-building"
else
actions << :build unless config[:skip_build]
actions << :test
end
actions
end
def suffix
suffix = []
suffix
end
def pipe
pipe = ["| tee '#{xcodebuild_log_path}'"]
if Scan.config[:output_style] == 'raw'
return pipe
end
formatter = []
if (custom_formatter = Scan.config[:formatter])
if custom_formatter.end_with?(".rb")
formatter << "-f '#{custom_formatter}'"
else
formatter << "-f `#{custom_formatter}`"
end
elsif FastlaneCore::Env.truthy?("TRAVIS")
formatter << "-f `xcpretty-travis-formatter`"
UI.success("Automatically switched to Travis formatter")
end
if Helper.colors_disabled?
formatter << "--no-color"
end
if Scan.config[:output_style] == 'basic'
formatter << "--no-utf"
end
if Scan.config[:output_style] == 'rspec'
formatter << "--test"
end
@reporter_options_generator = XCPrettyReporterOptionsGenerator.new(Scan.config[:open_report],
Scan.config[:output_types],
Scan.config[:output_files] || Scan.config[:custom_report_file_name],
Scan.config[:output_directory],
Scan.config[:use_clang_report_name],
Scan.config[:xcpretty_args])
reporter_options = @reporter_options_generator.generate_reporter_options
reporter_xcpretty_args = @reporter_options_generator.generate_xcpretty_args_options
return pipe << "| xcpretty #{formatter.join(' ')} #{reporter_options.join(' ')} #{reporter_xcpretty_args}"
end
# Store the raw file
def xcodebuild_log_path
file_name = "#{Scan.config[:app_name] || Scan.project.app_name}-#{Scan.config[:scheme]}.log"
containing = File.expand_path(Scan.config[:buildlog_path])
FileUtils.mkdir_p(containing)
return File.join(containing, file_name)
end
# Generate destination parameters
def destination
unless Scan.cache[:destination]
Scan.cache[:destination] = [*Scan.config[:destination]].map { |dst| "-destination '#{dst}'" }.join(' ')
end
Scan.cache[:destination]
end
# The path to set the Derived Data to
def build_path
unless Scan.cache[:build_path]
day = Time.now.strftime("%F") # e.g. 2015-08-07
Scan.cache[:build_path] = File.expand_path("~/Library/Developer/Xcode/Archives/#{day}/")
FileUtils.mkdir_p(Scan.cache[:build_path])
end
Scan.cache[:build_path]
end
def result_bundle_path
unless Scan.cache[:result_bundle_path]
path = File.join(Scan.config[:output_directory], Scan.config[:scheme]) + ".test_result"
if File.directory?(path)
FileUtils.remove_dir(path)
end
Scan.cache[:result_bundle_path] = path
end
return Scan.cache[:result_bundle_path]
end
end
end
| 37.253086 | 142 | 0.619221 |
e9dd7956a28a9ddc614ad2b3951117ff247fa516 | 775 | # -*- encoding: utf-8 -*-
# stub: httpclient 2.8.3 ruby lib
Gem::Specification.new do |s|
s.name = "httpclient".freeze
s.version = "2.8.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Hiroshi Nakamura".freeze]
s.date = "2016-12-09"
s.email = "[email protected]".freeze
s.executables = ["httpclient".freeze]
s.files = ["bin/httpclient".freeze]
s.homepage = "https://github.com/nahi/httpclient".freeze
s.licenses = ["ruby".freeze]
s.rubygems_version = "3.0.1".freeze
s.summary = "gives something like the functionality of libwww-perl (LWP) in Ruby".freeze
s.installed_by_version = "3.0.1" if s.respond_to? :installed_by_version
end
| 35.227273 | 112 | 0.696774 |
61c12e2b72f0d18f528d602a519fba55ba512634 | 1,693 | require 'discordrb'
require 'json'
# json読み込み
File.open("setting.json") do |file|
$jsonSetting = JSON.load(file)
end
bot = Discordrb::Commands::CommandBot.new token:$jsonSetting["token"], client_id: $jsonSetting["client_id"], prefix: '?'
#ユーザーランダム取得
bot.command :rand do |event, *code|
#Voiceチャンネル参加者一覧取得
#とりあえずVoiceチャンネルにいる人全員のuser_id取得
voiceHash = event.server.voice_states
#サーバーに参加している全員のuser_id取得
userList = event.server.members
#順番に照会
userNames = []
voiceHash.each{|key,value|
userData = userList.find{|k,val| k == key}
userNames.push(userData.name)
}
# #テスト用に適当な人追加
# for index in 1..20 do
# userNames.push('Bot-' + index.to_s)
# end
memberNum = code
if memberNum.empty?
#自動分割
#MAX4人で分けられるだけ
for index in 1..(userNames.count.div(4)) do
memberNum.push(4)
end
end
#並び替え
userNames.shuffle!
#出力用文字列(一気に吐き出したいので)
exportStr = []
#引数で指定した個数、先頭から吐き出し
groupNum = 1
memberNum.each{|arg|
exportStr.push('----- グループ' + groupNum.to_s + ' -----')
retVal = userNames.shift(arg.to_i)
retVal.each{|name|
exportStr.push(name)
}
groupNum = groupNum + 1
}
#残り吐き出し
if !userNames.empty?
exportStr.push('----- グループ' + groupNum.to_s + ' -----')
userNames.each{|name|
exportStr.push(name)
}
end
#出力
if exportStr.empty?
event.send_message('対象となる人がいません...')
else
event.send_message(exportStr.join("\n"))
end
end
bot.run | 22.878378 | 121 | 0.561725 |
f70e055c741e3824dadee8ea72a891b934c78b5c | 1,376 | require "spec_helper"
describe Tantot::Agent::Block do
context "normal usage" do
let(:value) { {changes: 0} }
let(:changes) { {obj: nil} }
before do
v = value
c = changes
stub_model(:city) do
watch {|changes| v[:changes] += 1; c[:obj] = changes}
end
end
it "should call the block" do
city = nil
Tantot.manager.run do
city = City.create!
end
expect(value[:changes]).to eq(1)
expect(changes[:obj]).to eq(Tantot::Changes::ById.new({city.id => {"id" => [nil, 1]}}))
end
it "should call a single time if multiple changes occur" do
Tantot.manager.run do
3.times { City.create! }
end
expect(value[:changes]).to eq(1)
expect(changes[:obj]).to eq(Tantot::Changes::ById.new({1=>{"id"=>[nil, 1]}, 2=>{"id"=>[nil, 2]}, 3=>{"id"=>[nil, 3]}}))
end
end
context "validations" do
it "should prevent registering twice with the same options" do
expect do
stub_model(:city) do
watch {}
watch {}
end
end.to raise_error(Tantot::MultipleWatchesProhibited, /Can't have multiple/)
end
it "should allow registering twice with different options" do
expect do
stub_model(:city) do
watch {}
watch(:name) {}
end
end.not_to raise_error
end
end
end
| 25.962264 | 125 | 0.569041 |
ff323112770810cf02c7c2fc226dab8d1d81be9d | 1,435 | # frozen_string_literal: true
GraphQL::Introspection::SchemaType = GraphQL::ObjectType.define do
name "__Schema"
description "A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all "\
"available types and directives on the server, as well as the entry points for "\
"query, mutation, and subscription operations."
field :types, !types[!GraphQL::Introspection::TypeType], "A list of all types supported by this server." do
resolve ->(obj, arg, ctx) { ctx.warden.types }
end
field :queryType, !GraphQL::Introspection::TypeType, "The type that query operations will be rooted at." do
resolve ->(obj, arg, ctx) { ctx.warden.root_type_for_operation("query") }
end
field :mutationType, GraphQL::Introspection::TypeType, "If this server supports mutation, the type that mutation operations will be rooted at." do
resolve ->(obj, arg, ctx) { ctx.warden.root_type_for_operation("mutation") }
end
field :subscriptionType, GraphQL::Introspection::TypeType, "If this server support subscription, the type that subscription operations will be rooted at." do
resolve ->(obj, arg, ctx) { ctx.warden.root_type_for_operation("subscription") }
end
field :directives, !types[!GraphQL::Introspection::DirectiveType], "A list of all directives supported by this server." do
resolve ->(obj, arg, ctx) { obj.directives.values }
end
introspection true
end
| 47.833333 | 159 | 0.728223 |
21e0162c8c58e1ef372e44522ada546762b775f7 | 491 | $:.unshift File.expand_path('../lib', __FILE__)
require "rubygems"
Gem::Specification.new do |gem|
gem.name = "minute"
gem.version = "0.2"
gem.author = "Bryan Goines"
gem.summary = "Natural Language Date/Time parsing library for Ruby"
gem.email = "[email protected]"
gem.homepage = "https://github.com/bry4n/minute"
gem.files = Dir['README.md', 'LICENSE', 'lib/**/*.rb']
gem.add_dependency "small", "0.4"
end
| 32.733333 | 77 | 0.588595 |
7a476a00a7642294e737e77397f0e2ef6dd1c41c | 375 | class Building
attr_reader :x, :y, :width, :height
def initialize(window, x: 0, y: 0, width: 0, height: 0, color: Gosu::Color::GRAY, z: 0, mode: :default)
@window = window
@x = x
@y = y
@width = width
@height = height
@color = color
@z = z
@mode = mode
end
def update
end
def draw
Gosu.draw_rect(@x, @y, @width, @height, @color, @z, @mode)
end
end | 17.857143 | 104 | 0.602667 |
ac8512d2df9a94f6b6ba8f0a8815f3a19ccf6ccd | 19,470 | # frozen_string_literal: true
require_relative './data_absent_reason_checker'
require_relative './profile_definitions/us_core_goal_definitions'
module Inferno
module Sequence
class USCore311GoalSequence < SequenceBase
include Inferno::DataAbsentReasonChecker
include Inferno::USCore311ProfileDefinitions
title 'Goal Tests'
description 'Verify support for the server capabilities required by the US Core Goal Profile.'
details %(
# Background
The US Core #{title} sequence verifies that the system under test is able to provide correct responses
for Goal queries. These queries must contain resources conforming to US Core Goal Profile as specified
in the US Core v3.1.1 Implementation Guide.
# Testing Methodology
## Searching
This test sequence will first perform each required search associated with this resource. This sequence will perform searches
with the following parameters:
* patient
### Search Parameters
The first search uses the selected patient(s) from the prior launch sequence. Any subsequent searches will look for its
parameter values from the results of the first search. For example, the `identifier` search in the patient sequence is
performed by looking for an existing `Patient.identifier` from any of the resources returned in the `_id` search. If a
value cannot be found this way, the search is skipped.
### Search Validation
Inferno will retrieve up to the first 20 bundle pages of the reply for Goal resources and save them
for subsequent tests.
Each of these resources is then checked to see if it matches the searched parameters in accordance
with [FHIR search guidelines](https://www.hl7.org/fhir/search.html). The test will fail, for example, if a patient search
for gender=male returns a female patient.
## Must Support
Each profile has a list of elements marked as "must support". This test sequence expects to see each of these elements
at least once. If at least one cannot be found, the test will fail. The test will look through the Goal
resources found for these elements.
## Profile Validation
Each resource returned from the first search is expected to conform to the [US Core Goal Profile](http://hl7.org/fhir/us/core/StructureDefinition/us-core-goal).
Each element is checked against teminology binding and cardinality requirements.
Elements with a required binding is validated against its bound valueset. If the code/system in the element is not part
of the valueset, then the test will fail.
## Reference Validation
Each reference within the resources found from the first search must resolve. The test will attempt to read each reference found
and will fail if any attempted read fails.
)
test_id_prefix 'USCG'
requires :token, :patient_ids
conformance_supports :Goal
def validate_resource_item(resource, property, value)
case property
when 'lifecycle-status'
values_found = resolve_path(resource, 'lifecycleStatus')
values = value.split(/(?<!\\),/).each { |str| str.gsub!('\,', ',') }
match_found = values_found.any? { |value_in_resource| values.include? value_in_resource }
assert match_found, "lifecycle-status in Goal/#{resource.id} (#{values_found}) does not match lifecycle-status requested (#{value})"
when 'patient'
values_found = resolve_path(resource, 'subject.reference')
value = value.split('Patient/').last
match_found = values_found.any? { |reference| [value, 'Patient/' + value, "#{@instance.url}/Patient/#{value}"].include? reference }
assert match_found, "patient in Goal/#{resource.id} (#{values_found}) does not match patient requested (#{value})"
when 'target-date'
values_found = resolve_path(resource, 'target.dueDate')
match_found = values_found.any? { |date| validate_date_search(value, date) }
assert match_found, "target-date in Goal/#{resource.id} (#{values_found}) does not match target-date requested (#{value})"
end
end
def perform_search_with_status(reply, search_param)
begin
parsed_reply = JSON.parse(reply.body)
assert parsed_reply['resourceType'] == 'OperationOutcome', 'Server returned a status of 400 without an OperationOutcome.'
rescue JSON::ParserError
assert false, 'Server returned a status of 400 without an OperationOutcome.'
end
warning do
assert @instance.server_capabilities&.search_documented?('Goal'),
%(Server returned a status of 400 with an OperationOutcome, but the
search interaction for this resource is not documented in the
CapabilityStatement. If this response was due to the server
requiring a status parameter, the server must document this
requirement in its CapabilityStatement.)
end
['proposed', 'planned', 'accepted', 'active', 'on-hold', 'completed', 'cancelled', 'entered-in-error', 'rejected'].each do |status_value|
params_with_status = search_param.merge('lifecycle-status': status_value)
reply = get_resource_by_params(versioned_resource_class('Goal'), params_with_status)
assert_response_ok(reply)
assert_bundle_response(reply)
entries = reply.resource.entry.select { |entry| entry.resource.resourceType == 'Goal' }
next if entries.blank?
search_param.merge!('lifecycle-status': status_value)
break
end
reply
end
def patient_ids
@instance.patient_ids.split(',').map(&:strip)
end
@resources_found = false
test :search_by_patient do
metadata do
id '01'
name 'Server returns valid results for Goal search by patient.'
link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html'
description %(
A server SHALL support searching by patient on the Goal resource.
This test will pass if resources are returned and match the search criteria. If none are returned, the test is skipped.
Because this is the first search of the sequence, resources in the response will be used for subsequent tests.
)
versions :r4
end
skip_if_known_search_not_supported('Goal', ['patient'])
@goal_ary = {}
patient_ids.each do |patient|
search_params = {
'patient': patient
}
reply = get_resource_by_params(versioned_resource_class('Goal'), search_params)
reply = perform_search_with_status(reply, search_params) if reply.code == 400
assert_response_ok(reply)
assert_bundle_response(reply)
any_resources = reply&.resource&.entry&.any? { |entry| entry&.resource&.resourceType == 'Goal' }
next unless any_resources
@goal_ary[patient] = fetch_all_bundled_resources(reply, check_for_data_absent_reasons)
@goal = @goal_ary[patient]
.find { |resource| resource.resourceType == 'Goal' }
@resources_found = @goal.present?
save_resource_references(versioned_resource_class('Goal'), @goal_ary[patient])
save_delayed_sequence_references(@goal_ary[patient], USCore311GoalSequenceDefinitions::DELAYED_REFERENCES)
validate_reply_entries(@goal_ary[patient], search_params)
search_params = search_params.merge('patient': "Patient/#{patient}")
reply = get_resource_by_params(versioned_resource_class('Goal'), search_params)
assert_response_ok(reply)
assert_bundle_response(reply)
search_with_type = fetch_all_bundled_resources(reply, check_for_data_absent_reasons)
assert search_with_type.length == @goal_ary[patient].length, 'Expected search by Patient/ID to have the same results as search by ID'
end
skip_if_not_found(resource_type: 'Goal', delayed: false)
end
test :search_by_patient_lifecycle_status do
metadata do
id '02'
name 'Server returns valid results for Goal search by patient+lifecycle-status.'
link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html'
optional
description %(
A server SHOULD support searching by patient+lifecycle-status on the Goal resource.
This test will pass if resources are returned and match the search criteria. If none are returned, the test is skipped.
)
versions :r4
end
skip_if_known_search_not_supported('Goal', ['patient', 'lifecycle-status'])
skip_if_not_found(resource_type: 'Goal', delayed: false)
resolved_one = false
patient_ids.each do |patient|
search_params = {
'patient': patient,
'lifecycle-status': get_value_for_search_param(resolve_element_from_path(@goal_ary[patient], 'lifecycleStatus') { |el| get_value_for_search_param(el).present? })
}
next if search_params.any? { |_param, value| value.nil? }
resolved_one = true
reply = get_resource_by_params(versioned_resource_class('Goal'), search_params)
validate_search_reply(versioned_resource_class('Goal'), reply, search_params)
end
skip 'Could not resolve all parameters (patient, lifecycle-status) in any resource.' unless resolved_one
end
test :search_by_patient_target_date do
metadata do
id '03'
name 'Server returns valid results for Goal search by patient+target-date.'
link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html'
optional
description %(
A server SHOULD support searching by patient+target-date on the Goal resource.
This test will pass if resources are returned and match the search criteria. If none are returned, the test is skipped.
This will also test support for these target-date comparators: gt, ge, lt, le. Comparator values are created by taking
a target-date value from a resource returned in the first search of this sequence and adding/subtracting a day. For example, a date
of 05/05/2020 will create comparator values of lt2020-05-06 and gt2020-05-04
)
versions :r4
end
skip_if_known_search_not_supported('Goal', ['patient', 'target-date'])
skip_if_not_found(resource_type: 'Goal', delayed: false)
resolved_one = false
patient_ids.each do |patient|
search_params = {
'patient': patient,
'target-date': get_value_for_search_param(resolve_element_from_path(@goal_ary[patient], 'target.dueDate') { |el| get_value_for_search_param(el).present? })
}
next if search_params.any? { |_param, value| value.nil? }
resolved_one = true
reply = get_resource_by_params(versioned_resource_class('Goal'), search_params)
reply = perform_search_with_status(reply, search_params) if reply.code == 400
validate_search_reply(versioned_resource_class('Goal'), reply, search_params)
['gt', 'ge', 'lt', 'le'].each do |comparator|
comparator_val = date_comparator_value(comparator, resolve_element_from_path(@goal_ary[patient], 'target.dueDate') { |el| get_value_for_search_param(el).present? })
comparator_search_params = search_params.merge('target-date': comparator_val)
reply = get_resource_by_params(versioned_resource_class('Goal'), comparator_search_params)
validate_search_reply(versioned_resource_class('Goal'), reply, comparator_search_params)
end
end
skip 'Could not resolve all parameters (patient, target-date) in any resource.' unless resolved_one
end
test :read_interaction do
metadata do
id '04'
name 'Server returns correct Goal resource from Goal read interaction'
link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html'
description %(
A server SHALL support the Goal read interaction.
)
versions :r4
end
skip_if_known_not_supported(:Goal, [:read])
skip_if_not_found(resource_type: 'Goal', delayed: false)
validate_read_reply(@goal, versioned_resource_class('Goal'), check_for_data_absent_reasons)
end
test :vread_interaction do
metadata do
id '05'
name 'Server returns correct Goal resource from Goal vread interaction'
link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html'
optional
description %(
A server SHOULD support the Goal vread interaction.
)
versions :r4
end
skip_if_known_not_supported(:Goal, [:vread])
skip_if_not_found(resource_type: 'Goal', delayed: false)
validate_vread_reply(@goal, versioned_resource_class('Goal'))
end
test :history_interaction do
metadata do
id '06'
name 'Server returns correct Goal resource from Goal history interaction'
link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html'
optional
description %(
A server SHOULD support the Goal history interaction.
)
versions :r4
end
skip_if_known_not_supported(:Goal, [:history])
skip_if_not_found(resource_type: 'Goal', delayed: false)
validate_history_reply(@goal, versioned_resource_class('Goal'))
end
test 'Server returns Provenance resources from Goal search by patient + _revIncludes: Provenance:target' do
metadata do
id '07'
link 'https://www.hl7.org/fhir/search.html#revinclude'
description %(
A Server SHALL be capable of supporting the following _revincludes: Provenance:target.
This test will perform a search for patient + _revIncludes: Provenance:target and will pass
if a Provenance resource is found in the reponse.
)
versions :r4
end
skip_if_known_revinclude_not_supported('Goal', 'Provenance:target')
skip_if_not_found(resource_type: 'Goal', delayed: false)
provenance_results = []
patient_ids.each do |patient|
search_params = {
'patient': patient
}
search_params['_revinclude'] = 'Provenance:target'
reply = get_resource_by_params(versioned_resource_class('Goal'), search_params)
reply = perform_search_with_status(reply, search_params) if reply.code == 400
assert_response_ok(reply)
assert_bundle_response(reply)
provenance_results += fetch_all_bundled_resources(reply, check_for_data_absent_reasons)
.select { |resource| resource.resourceType == 'Provenance' }
end
save_resource_references(versioned_resource_class('Provenance'), provenance_results)
save_delayed_sequence_references(provenance_results, USCore311GoalSequenceDefinitions::DELAYED_REFERENCES)
skip 'No Provenance resources were returned from this search' unless provenance_results.present?
end
test :validate_resources do
metadata do
id '08'
name 'Goal resources returned from previous search conform to the US Core Goal Profile.'
link 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-goal'
description %(
This test verifies resources returned from the first search conform to the [US Core Goal Profile](http://hl7.org/fhir/us/core/StructureDefinition/us-core-goal).
It verifies the presence of mandatory elements and that elements with required bindings contain appropriate values.
CodeableConcept element bindings will fail if none of its codings have a code/system that is part of the bound ValueSet.
Quantity, Coding, and code element bindings will fail if its code/system is not found in the valueset.
)
versions :r4
end
skip_if_not_found(resource_type: 'Goal', delayed: false)
test_resources_against_profile('Goal')
end
test 'All must support elements are provided in the Goal resources returned.' do
metadata do
id '09'
link 'http://www.hl7.org/fhir/us/core/general-guidance.html#must-support'
description %(
US Core Responders SHALL be capable of populating all data elements as part of the query results as specified by the US Core Server Capability Statement.
This will look through the Goal resources found previously for the following must support elements:
* lifecycleStatus
* description
* subject
* target
* Goal.target.due[x]:dueDate
)
versions :r4
end
skip_if_not_found(resource_type: 'Goal', delayed: false)
must_supports = USCore311GoalSequenceDefinitions::MUST_SUPPORTS
missing_slices = must_supports[:slices].reject do |slice|
@goal_ary&.values&.flatten&.any? do |resource|
slice_found = find_slice(resource, slice[:path], slice[:discriminator])
slice_found.present?
end
end
missing_must_support_elements = must_supports[:elements].reject do |element|
@goal_ary&.values&.flatten&.any? do |resource|
value_found = resolve_element_from_path(resource, element[:path]) { |value| element[:fixed_value].blank? || value == element[:fixed_value] }
value_found.present?
end
end
missing_must_support_elements.map! { |must_support| "#{must_support[:path]}#{': ' + must_support[:fixed_value] if must_support[:fixed_value].present?}" }
missing_must_support_elements += missing_slices.map { |slice| slice[:name] }
skip_if missing_must_support_elements.present?,
"Could not find #{missing_must_support_elements.join(', ')} in the #{@goal_ary&.values&.flatten&.length} provided Goal resource(s)"
@instance.save!
end
test 'Every reference within Goal resources can be read.' do
metadata do
id '10'
link 'http://hl7.org/fhir/references.html'
description %(
This test will attempt to read the first 50 reference found in the resources from the first search.
The test will fail if Inferno fails to read any of those references.
)
versions :r4
end
skip_if_known_not_supported(:Goal, [:search, :read])
skip_if_not_found(resource_type: 'Goal', delayed: false)
validated_resources = Set.new
max_resolutions = 50
@goal_ary&.values&.flatten&.each do |resource|
validate_reference_resolutions(resource, validated_resources, max_resolutions) if validated_resources.length < max_resolutions
end
end
end
end
end
| 42.791209 | 176 | 0.668567 |
0342af7779ed147fe0a59e69918019bb803e5dbb | 3,743 | class CloningsController < ApplicationController
def index
@clonings = current_user.clonings.order("id DESC")
end
def show
@cloning = Cloning.find(params[:id])
end
def new
@transformations = current_user.transformations
end
def create
transformation_ids = params[:transformation_ids]
transformations = params[:transformations]
num_colonies = count_colonies(transformation_ids, transformations)
#checking if just one clone has been picked
if num_colonies == 0
redirect_to new_cloning_path, :flash => {:error => "You have to pick at least 1 clone"}
return
end
if num_colonies > 95
redirect_to new_cloning_path, :flash => {:error => "You cannot pick more than 95 clones"}
return
end
begin
#creating list of wells
wells = create_wells()
ActiveRecord::Base.transaction do
#creating the growth plate
plate = GrowthPlate.create(user: current_user)
plate.name = "growth_plate_"+plate.id.to_s
plate.save
#creating cloning object
@cloning = Cloning.create(user: current_user, growth_plate: plate, :protocol => Protocol.find_by_process(Cloning.to_s))
#creating clones depending on the number of colonies picked
transformation_ids.each do |tid|
transformation = Transformation.find(tid)
#creating a vector with right number of clones of each color to grow
colonies = []
transformations[tid].keys.each do |key|
transformations[tid][key].to_i.times.each { colonies << key}
end
#creating clones
colonies.each do |colony|
name = transformation.ligation_product.pcr_product.part.name + ".c"+ (Clone.count(:conditions => {:transformation_id => tid})+1).to_s
clone = Clone.create(name: name, user: current_user, cloning: @cloning, transformation_id: tid, color: colony, status: Status.find_by_process_and_default(Clone.to_s,true))
#associating clones to growth plate wells
GrowthPlateWell.create(clone: clone, growth_plate: plate, well: wells.pop())
end
end
end
redirect_to cloning_path(@cloning)
rescue => ex
redirect_to :controller => :clonings, :action => :new, :flash => {:error => "Error while performing cloning (#{ex.message})" }
end
end
def edit
@cloning = Cloning.find(params[:id])
@statuses = Status.find_all_by_process(Clone.to_s)
end
def update
@cloning = Cloning.find(params[:id])
if @cloning.update_attributes(params[:cloning])
redirect_to cloning_path(@cloning), :notice => "Clones status updated"
else
flash[:error] = "Cannot update clones status."
redirect_to edit_cloning_path(@cloning)
end
end
def update_picked_all
Clone.update_all({:status_id => Status.find_by_process_and_name(Clone.to_s,:picked).id}, {:cloning_id => params[:id]})
redirect_to edit_cloning_path(params[:id]), :notice => "All clones marked as picked."
end
private
def count_colonies(transformation_ids, transformations)
num_clones = 0
if transformation_ids.nil? or transformations.nil?
return 0
end
transformation_ids.each do |tid|
num_clones += transformations[tid][:white].to_i+transformations[tid][:blue].to_i+transformations[tid][:light_blue].to_i
end
return num_clones
end
def create_wells
wells = []
('A'..'H').each do |row|
('01'..'12').each do |col|
wells.push(row+col)
end
end
return wells.reverse!
end
end
| 31.991453 | 183 | 0.641464 |
181422c82c12fe875650ca5e3a17c3e886ebac6e | 605 | require 'test_helper'
class EventTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
# == Schema Information
#
# Table name: events
#
# id :integer not null, primary key
# name :string
# description :text
# seat_quantity :integer
# created_at :datetime not null
# updated_at :datetime not null
# is_displayed :boolean default(FALSE)
# token :string
# location_name :string
# location_address :string
# started_at :datetime
# end_at :datetime
#
| 23.269231 | 59 | 0.580165 |
87e2dff7e09717729760d3f77979d7e430359c97 | 195 | require File.dirname(__FILE__) + '/../test_helper'
class WeeklydailyControllerTest < ActionController::TestCase
# Replace this with your real tests.
def test_truth
assert true
end
end
| 21.666667 | 60 | 0.758974 |
79ec3f21aa75b4cab20e70ea23694b541bf4c812 | 40 | json.partial! "terms/term", term: @term
| 20 | 39 | 0.7 |
79a971064b6b901bfcfdb19692a148416cfe328c | 132 | class ApplicationController < ActionController::Base
protect_from_forgery
before_filter :authenticate_user!
helper :all
end
| 16.5 | 52 | 0.818182 |
bfb33e4ad30dd7c4db800ee965af38b5540dd280 | 29,592 | # frozen-string-literal: true
require 'java'
Sequel.require 'adapters/utils/stored_procedures'
module Sequel
# Houses Sequel's JDBC support when running on JRuby.
module JDBC
# Make it accesing the java.lang hierarchy more ruby friendly.
module JavaLang
include_package 'java.lang'
end
# Make it accesing the java.sql hierarchy more ruby friendly.
module JavaSQL
include_package 'java.sql'
end
# Make it accesing the javax.naming hierarchy more ruby friendly.
module JavaxNaming
include_package 'javax.naming'
end
# Used to identify a jndi connection and to extract the jndi
# resource name.
JNDI_URI_REGEXP = /\Ajdbc:jndi:(.+)/
# The types to check for 0 scale to transform :decimal types
# to :integer.
DECIMAL_TYPE_RE = /number|numeric|decimal/io
Sequel::Deprecation.deprecate_constant(self, :DECIMAL_TYPE_RE)
# Contains procs keyed on subadapter type that extend the
# given database object so it supports the correct database type.
DATABASE_SETUP = {}
# Allow loading the necessary JDBC support via a gem.
def self.load_gem(name)
begin
require "jdbc/#{name.to_s.downcase}"
rescue LoadError
# jdbc gem not used, hopefully the user has the .jar in their CLASSPATH
else
if defined?(::Jdbc) && ( ::Jdbc.const_defined?(name) rescue nil )
jdbc_module = ::Jdbc.const_get(name) # e.g. Jdbc::SQLite3
jdbc_module.load_driver if jdbc_module.respond_to?(:load_driver)
end
end
end
# Attempt to load the JDBC driver class, which should be specified as a string
# containing the driver class name (which JRuby should autoload).
# Note that the string is evaled, so this method is not safe to call with
# untrusted input.
# Raise a Sequel::AdapterNotFound if evaluating the class name raises a NameError.
def self.load_driver(drv, gem=nil)
load_gem(gem) if gem
eval drv
rescue NameError
raise Sequel::AdapterNotFound, "#{drv} not loaded#{", try installing jdbc-#{gem.to_s.downcase} gem" if gem}"
end
class TypeConvertor
%w'Boolean Float Double Int Long Short'.each do |meth|
class_eval("def #{meth}(r, i) v = r.get#{meth}(i); v unless r.wasNull end", __FILE__, __LINE__)
end
%w'Object Array String Time Date Timestamp BigDecimal Blob Bytes Clob'.each do |meth|
class_eval("def #{meth}(r, i) r.get#{meth}(i) end", __FILE__, __LINE__)
end
def RubyTime(r, i)
if v = r.getTime(i)
Sequel.string_to_time("#{v.to_string}.#{sprintf('%03i', v.getTime.divmod(1000).last)}")
end
end
def RubyDate(r, i)
if v = r.getDate(i)
Date.civil(v.getYear + 1900, v.getMonth + 1, v.getDate)
end
end
def RubyTimestamp(r, i)
if v = r.getTimestamp(i)
Sequel.database_to_application_timestamp([v.getYear + 1900, v.getMonth + 1, v.getDate, v.getHours, v.getMinutes, v.getSeconds, v.getNanos])
end
end
def RubyBigDecimal(r, i)
if v = r.getBigDecimal(i)
BigDecimal.new(v.to_string)
end
end
def RubyBlob(r, i)
if v = r.getBytes(i)
Sequel::SQL::Blob.new(String.from_java_bytes(v))
end
end
def RubyClob(r, i)
if v = r.getClob(i)
v.getSubString(1, v.length)
end
end
INSTANCE = new
o = INSTANCE
MAP = Hash.new(o.method(:Object))
types = Java::JavaSQL::Types
{
:ARRAY => :Array,
:BOOLEAN => :Boolean,
:CHAR => :String,
:DOUBLE => :Double,
:FLOAT => :Double,
:INTEGER => :Int,
:LONGNVARCHAR => :String,
:LONGVARCHAR => :String,
:NCHAR => :String,
:REAL => :Float,
:SMALLINT => :Short,
:TINYINT => :Short,
:VARCHAR => :String,
}.each do |type, meth|
MAP[types.const_get(type)] = o.method(meth)
end
BASIC_MAP = MAP.dup
{
:BINARY => :Blob,
:BLOB => :Blob,
:CLOB => :Clob,
:DATE => :Date,
:DECIMAL => :BigDecimal,
:LONGVARBINARY => :Blob,
:NCLOB => :Clob,
:NUMERIC => :BigDecimal,
:TIME => :Time,
:TIMESTAMP => :Timestamp,
:VARBINARY => :Blob,
}.each do |type, meth|
BASIC_MAP[types.const_get(type)] = o.method(meth)
MAP[types.const_get(type)] = o.method(:"Ruby#{meth}")
end
MAP.freeze
BASIC_MAP.freeze
INSTANCE.freeze
end
# JDBC Databases offer a fairly uniform interface that does not change
# much based on the sub adapter.
class Database < Sequel::Database
set_adapter_scheme :jdbc
# The type of database we are connecting to
attr_reader :database_type
# The Java database driver we are using (should be a Java class)
attr_reader :driver
# Whether to convert some Java types to ruby types when retrieving rows.
# True by default, can be set to false to roughly double performance when
# fetching rows.
attr_accessor :convert_types
# The fetch size to use for JDBC Statement objects created by this database.
# By default, this is nil so a fetch size is not set explicitly.
attr_accessor :fetch_size
# Map of JDBC type ids to callable objects that return appropriate ruby values.
attr_reader :type_convertor_map
# Map of JDBC type ids to callable objects that return appropriate ruby or java values.
attr_reader :basic_type_convertor_map
# Execute the given stored procedure with the give name. If a block is
# given, the stored procedure should return rows.
def call_sproc(name, opts = OPTS)
args = opts[:args] || []
sql = "{call #{name}(#{args.map{'?'}.join(',')})}"
synchronize(opts[:server]) do |conn|
cps = conn.prepareCall(sql)
i = 0
args.each{|arg| set_ps_arg(cps, arg, i+=1)}
begin
if block_given?
yield log_connection_yield(sql, conn){cps.executeQuery}
else
case opts[:type]
when :insert
log_connection_yield(sql, conn){cps.executeUpdate}
last_insert_id(conn, opts)
else
log_connection_yield(sql, conn){cps.executeUpdate}
end
end
rescue NativeException, JavaSQL::SQLException => e
raise_error(e)
ensure
cps.close
end
end
end
# Connect to the database using JavaSQL::DriverManager.getConnection.
def connect(server)
opts = server_opts(server)
conn = if jndi?
get_connection_from_jndi
else
args = [uri(opts)]
args.concat([opts[:user], opts[:password]]) if opts[:user] && opts[:password]
begin
JavaSQL::DriverManager.setLoginTimeout(opts[:login_timeout]) if opts[:login_timeout]
raise StandardError, "skipping regular connection" if opts[:jdbc_properties]
JavaSQL::DriverManager.getConnection(*args)
rescue JavaSQL::SQLException, NativeException, StandardError => e
raise e unless driver
# If the DriverManager can't get the connection - use the connect
# method of the driver. (This happens under Tomcat for instance)
props = java.util.Properties.new
if opts && opts[:user] && opts[:password]
props.setProperty("user", opts[:user])
props.setProperty("password", opts[:password])
end
opts[:jdbc_properties].each{|k,v| props.setProperty(k.to_s, v)} if opts[:jdbc_properties]
begin
c = driver.new.connect(args[0], props)
raise(Sequel::DatabaseError, 'driver.new.connect returned nil: probably bad JDBC connection string') unless c
c
rescue JavaSQL::SQLException, NativeException, StandardError => e2
if e2.respond_to?(:message=) && e2.message != e.message
e2.message = "#{e2.message}\n#{e.class.name}: #{e.message}"
end
raise e2
end
end
end
setup_connection(conn)
end
# Close given adapter connections, and delete any related prepared statements.
def disconnect_connection(c)
@connection_prepared_statements_mutex.synchronize{@connection_prepared_statements.delete(c)}
c.close
end
# Execute the given SQL. If a block is given, if should be a SELECT
# statement or something else that returns rows.
def execute(sql, opts=OPTS, &block)
return call_sproc(sql, opts, &block) if opts[:sproc]
return execute_prepared_statement(sql, opts, &block) if [Symbol, Dataset].any?{|c| sql.is_a?(c)}
synchronize(opts[:server]) do |conn|
statement(conn) do |stmt|
if block
if size = fetch_size
stmt.setFetchSize(size)
end
yield log_connection_yield(sql, conn){stmt.executeQuery(sql)}
else
case opts[:type]
when :ddl
log_connection_yield(sql, conn){stmt.execute(sql)}
when :insert
log_connection_yield(sql, conn){execute_statement_insert(stmt, sql)}
last_insert_id(conn, Hash[opts].merge!(:stmt=>stmt))
else
log_connection_yield(sql, conn){stmt.executeUpdate(sql)}
end
end
end
end
end
alias execute_dui execute
# Execute the given DDL SQL, which should not return any
# values or rows.
def execute_ddl(sql, opts=OPTS)
opts = Hash[opts]
opts[:type] = :ddl
execute(sql, opts)
end
# Execute the given INSERT SQL, returning the last inserted
# row id.
def execute_insert(sql, opts=OPTS)
opts = Hash[opts]
opts[:type] = :insert
execute(sql, opts)
end
def freeze
@type_convertor_map.freeze
@basic_type_convertor_map.freeze
super
end
# Use the JDBC metadata to get a list of foreign keys for the table.
def foreign_key_list(table, opts=OPTS)
m = output_identifier_meth
schema, table = metadata_schema_and_table(table, opts)
foreign_keys = {}
metadata(:getImportedKeys, nil, schema, table) do |r|
if fk = foreign_keys[r[:fk_name]]
fk[:columns] << [r[:key_seq], m.call(r[:fkcolumn_name])]
fk[:key] << [r[:key_seq], m.call(r[:pkcolumn_name])]
elsif r[:fk_name]
foreign_keys[r[:fk_name]] = {:name=>m.call(r[:fk_name]), :columns=>[[r[:key_seq], m.call(r[:fkcolumn_name])]], :table=>m.call(r[:pktable_name]), :key=>[[r[:key_seq], m.call(r[:pkcolumn_name])]]}
end
end
foreign_keys.values.each do |fk|
[:columns, :key].each do |k|
fk[k] = fk[k].sort.map{|_, v| v}
end
end
end
# Use the JDBC metadata to get the index information for the table.
def indexes(table, opts=OPTS)
m = output_identifier_meth
schema, table = metadata_schema_and_table(table, opts)
indexes = {}
metadata(:getIndexInfo, nil, schema, table, false, true) do |r|
next unless name = r[:column_name]
next if respond_to?(:primary_key_index_re, true) and r[:index_name] =~ primary_key_index_re
i = indexes[m.call(r[:index_name])] ||= {:columns=>[], :unique=>[false, 0].include?(r[:non_unique])}
i[:columns] << m.call(name)
end
indexes
end
# Whether or not JNDI is being used for this connection.
def jndi?
!!(uri =~ JNDI_URI_REGEXP)
end
# All tables in this database
def tables(opts=OPTS)
get_tables('TABLE', opts)
end
# The uri for this connection. You can specify the uri
# using the :uri, :url, or :database options. You don't
# need to worry about this if you use Sequel.connect
# with the JDBC connectrion strings.
def uri(opts=OPTS)
opts = @opts.merge(opts)
ur = opts[:uri] || opts[:url] || opts[:database]
ur =~ /^\Ajdbc:/ ? ur : "jdbc:#{ur}"
end
# All views in this database
def views(opts=OPTS)
get_tables('VIEW', opts)
end
private
# Call the DATABASE_SETUP proc directly after initialization,
# so the object always uses sub adapter specific code. Also,
# raise an error immediately if the connection doesn't have a
# uri, since JDBC requires one.
def adapter_initialize
@connection_prepared_statements = {}
@connection_prepared_statements_mutex = Mutex.new
@fetch_size = @opts[:fetch_size] ? typecast_value_integer(@opts[:fetch_size]) : default_fetch_size
@convert_types = typecast_value_boolean(@opts.fetch(:convert_types, true))
raise(Error, "No connection string specified") unless uri
resolved_uri = jndi? ? get_uri_from_jndi : uri
setup_type_convertor_map_early
@driver = if (match = /\Ajdbc:([^:]+)/.match(resolved_uri)) && (prok = Sequel::Database.load_adapter(match[1].to_sym, :map=>DATABASE_SETUP, :subdir=>'jdbc'))
prok.call(self)
else
@opts[:driver]
end
setup_type_convertor_map
end
# Yield the native prepared statements hash for the given connection
# to the block in a thread-safe manner.
def cps_sync(conn, &block)
@connection_prepared_statements_mutex.synchronize{yield(@connection_prepared_statements[conn] ||= {})}
end
def database_error_classes
[NativeException]
end
def database_exception_sqlstate(exception, opts)
if database_exception_use_sqlstates?
while exception.respond_to?(:cause)
exception = exception.cause
return exception.getSQLState if exception.respond_to?(:getSQLState)
end
end
nil
end
# Whether the JDBC subadapter should use SQL states for exception handling, true by default.
def database_exception_use_sqlstates?
true
end
def dataset_class_default
Dataset
end
# Raise a disconnect error if the SQL state of the cause of the exception indicates so.
def disconnect_error?(exception, opts)
cause = exception.respond_to?(:cause) ? exception.cause : exception
super || (cause.respond_to?(:getSQLState) && cause.getSQLState =~ /^08/)
end
# Execute the prepared statement. If the provided name is a
# dataset, use that as the prepared statement, otherwise use
# it as a key to look it up in the prepared_statements hash.
# If the connection we are using has already prepared an identical
# statement, use that statement instead of creating another.
# Otherwise, prepare a new statement for the connection, bind the
# variables, and execute it.
def execute_prepared_statement(name, opts=OPTS)
args = opts[:arguments]
if name.is_a?(Dataset)
ps = name
name = ps.prepared_statement_name
else
ps = prepared_statement(name)
end
sql = ps.prepared_sql
synchronize(opts[:server]) do |conn|
if name and cps = cps_sync(conn){|cpsh| cpsh[name]} and cps[0] == sql
cps = cps[1]
else
log_connection_yield("CLOSE #{name}", conn){cps[1].close} if cps
cps = log_connection_yield("PREPARE#{" #{name}:" if name} #{sql}", conn){prepare_jdbc_statement(conn, sql, opts)}
if size = fetch_size
cps.setFetchSize(size)
end
cps_sync(conn){|cpsh| cpsh[name] = [sql, cps]} if name
end
i = 0
args.each{|arg| set_ps_arg(cps, arg, i+=1)}
msg = "EXECUTE#{" #{name}" if name}"
if ps.log_sql
msg += " ("
msg << sql
msg << ")"
end
begin
if block_given?
yield log_connection_yield(msg, conn, args){cps.executeQuery}
else
case opts[:type]
when :ddl
log_connection_yield(msg, conn, args){cps.execute}
when :insert
log_connection_yield(msg, conn, args){execute_prepared_statement_insert(cps)}
last_insert_id(conn, Hash[opts].merge!(:prepared=>true, :stmt=>cps))
else
log_connection_yield(msg, conn, args){cps.executeUpdate}
end
end
rescue NativeException, JavaSQL::SQLException => e
raise_error(e)
ensure
cps.close unless name
end
end
end
# Execute the prepared insert statement
def execute_prepared_statement_insert(stmt)
stmt.executeUpdate
end
# Execute the insert SQL using the statement
def execute_statement_insert(stmt, sql)
stmt.executeUpdate(sql)
end
# The default fetch size to use for statements. Nil by default, so that the
# default for the JDBC driver is used.
def default_fetch_size
nil
end
# Gets the connection from JNDI.
def get_connection_from_jndi
jndi_name = JNDI_URI_REGEXP.match(uri)[1]
JavaxNaming::InitialContext.new.lookup(jndi_name).connection
end
# Gets the JDBC connection uri from the JNDI resource.
def get_uri_from_jndi
conn = get_connection_from_jndi
conn.meta_data.url
ensure
conn.close if conn
end
# Backbone of the tables and views support.
def get_tables(type, opts)
ts = []
m = output_identifier_meth
if schema = opts[:schema]
schema = schema.to_s
end
metadata(:getTables, nil, schema, nil, [type].to_java(:string)){|h| ts << m.call(h[:table_name])}
ts
end
# Support Date objects used in bound variables
def java_sql_date(date)
java.sql.Date.new(Time.local(date.year, date.month, date.day).to_i * 1000)
end
# Support DateTime objects used in bound variables
def java_sql_datetime(datetime)
ts = java.sql.Timestamp.new(Time.local(datetime.year, datetime.month, datetime.day, datetime.hour, datetime.min, datetime.sec).to_i * 1000)
ts.setNanos((datetime.sec_fraction * (RUBY_VERSION >= '1.9.0' ? 1000000000 : 86400000000000)).to_i)
ts
end
# Support fractional seconds for Time objects used in bound variables
def java_sql_timestamp(time)
ts = java.sql.Timestamp.new(time.to_i * 1000)
# Work around jruby 1.6 ruby 1.9 mode bug
ts.setNanos((RUBY_VERSION >= '1.9.0' && time.nsec != 0) ? time.nsec : time.usec * 1000)
ts
end
# Log the given SQL and then execute it on the connection, used by
# the transaction code.
def log_connection_execute(conn, sql)
statement(conn){|s| log_connection_yield(sql, conn){s.execute(sql)}}
end
# By default, there is no support for determining the last inserted
# id, so return nil. This method should be overridden in
# sub adapters.
def last_insert_id(conn, opts)
nil
end
# Yield the metadata for this database
def metadata(*args, &block)
synchronize do |c|
result = c.getMetaData.send(*args)
begin
metadata_dataset.send(:process_result_set, result, &block)
ensure
result.close
end
end
end
# Return the schema and table suitable for use with metadata queries.
def metadata_schema_and_table(table, opts)
im = input_identifier_meth(opts[:dataset])
schema, table = schema_and_table(table)
schema ||= opts[:schema]
schema = im.call(schema) if schema
table = im.call(table)
[schema, table]
end
# Created a JDBC prepared statement on the connection with the given SQL.
def prepare_jdbc_statement(conn, sql, opts)
conn.prepareStatement(sql)
end
# Java being java, you need to specify the type of each argument
# for the prepared statement, and bind it individually. This
# guesses which JDBC method to use, and hopefully JRuby will convert
# things properly for us.
def set_ps_arg(cps, arg, i)
case arg
when Integer
cps.setLong(i, arg)
when Sequel::SQL::Blob
cps.setBytes(i, arg.to_java_bytes)
when String
cps.setString(i, arg)
when Float
cps.setDouble(i, arg)
when TrueClass, FalseClass
cps.setBoolean(i, arg)
when NilClass
set_ps_arg_nil(cps, i)
when DateTime
cps.setTimestamp(i, java_sql_datetime(arg))
when Date
cps.setDate(i, java_sql_date(arg))
when Time
cps.setTimestamp(i, java_sql_timestamp(arg))
when Java::JavaSql::Timestamp
cps.setTimestamp(i, arg)
when Java::JavaSql::Date
cps.setDate(i, arg)
else
cps.setObject(i, arg)
end
end
# Use setString with a nil value by default, but this doesn't work on all subadapters.
def set_ps_arg_nil(cps, i)
cps.setString(i, nil)
end
# Return the connection. Used to do configuration on the
# connection object before adding it to the connection pool.
def setup_connection(conn)
conn
end
def schema_column_set_db_type(schema)
case schema[:type]
when :string
if schema[:db_type] =~ /\A(character( varying)?|n?(var)?char2?)\z/io && schema[:column_size] > 0
schema[:db_type] += "(#{schema[:column_size]})"
end
when :decimal
if schema[:db_type] =~ /\A(decimal|numeric)\z/io && schema[:column_size] > 0 && schema[:scale] >= 0
schema[:db_type] += "(#{schema[:column_size]}, #{schema[:scale]})"
end
end
end
# Parse the table schema for the given table.
def schema_parse_table(table, opts=OPTS)
m = output_identifier_meth(opts[:dataset])
schema, table = metadata_schema_and_table(table, opts)
pks, ts = [], []
metadata(:getPrimaryKeys, nil, schema, table) do |h|
next if schema_parse_table_skip?(h, schema)
pks << h[:column_name]
end
schemas = []
metadata(:getColumns, nil, schema, table, nil) do |h|
next if schema_parse_table_skip?(h, schema)
s = {
:type=>schema_column_type(h[:type_name]),
:db_type=>h[:type_name],
:default=>(h[:column_def] == '' ? nil : h[:column_def]),
:allow_null=>(h[:nullable] != 0),
:primary_key=>pks.include?(h[:column_name]),
:column_size=>h[:column_size],
:scale=>h[:decimal_digits],
:remarks=>h[:remarks]
}
if s[:primary_key]
s[:auto_increment] = h[:is_autoincrement] == "YES"
end
s[:max_length] = s[:column_size] if s[:type] == :string
if s[:db_type] =~ /number|numeric|decimal/i && s[:scale] == 0
s[:type] = :integer
end
schema_column_set_db_type(s)
schemas << h[:table_schem] unless schemas.include?(h[:table_schem])
ts << [m.call(h[:column_name]), s]
end
if schemas.length > 1
raise Error, 'Schema parsing in the jdbc adapter resulted in columns being returned for a table with the same name in multiple schemas. Please explicitly qualify your table with a schema.'
end
ts
end
# Whether schema_parse_table should skip the given row when
# parsing the schema.
def schema_parse_table_skip?(h, schema)
h[:table_schem] == 'INFORMATION_SCHEMA'
end
# Called after loading subadapter-specific code, overridable by subadapters.
def setup_type_convertor_map
end
# Called before loading subadapter-specific code, necessary so that subadapter initialization code
# that runs queries works correctly. This cannot be overriding in subadapters,
def setup_type_convertor_map_early
@type_convertor_map = TypeConvertor::MAP.merge(Java::JavaSQL::Types::TIMESTAMP=>timestamp_convertor)
@basic_type_convertor_map = TypeConvertor::BASIC_MAP.dup
end
# Yield a new statement object, and ensure that it is closed before returning.
def statement(conn)
stmt = conn.createStatement
yield stmt
rescue NativeException, JavaSQL::SQLException => e
raise_error(e)
ensure
stmt.close if stmt
end
# A conversion proc for timestamp columns. This is used to make sure timestamps are converted using the
# correct timezone.
def timestamp_convertor
lambda do |r, i|
if v = r.getTimestamp(i)
to_application_timestamp([v.getYear + 1900, v.getMonth + 1, v.getDate, v.getHours, v.getMinutes, v.getSeconds, v.getNanos])
end
end
end
end
class Dataset < Sequel::Dataset
include StoredProcedures
Database::DatasetClass = self
Sequel::Deprecation.deprecate_constant(Database, :DatasetClass)
PreparedStatementMethods = prepared_statements_module(
"sql = self; opts = Hash[opts]; opts[:arguments] = bind_arguments",
Sequel::Dataset::UnnumberedArgumentMapper,
%w"execute execute_dui") do
private
# Same as execute, explicit due to intricacies of alias and super.
def execute_insert(sql, opts=OPTS)
sql = self
opts = Hash[opts]
opts[:arguments] = bind_arguments
opts[:type] = :insert
super
end
end
StoredProcedureMethods = prepared_statements_module(
"sql = @opts[:sproc_name]; opts = Hash[opts]; opts[:args] = @opts[:sproc_args]; opts[:sproc] = true",
Sequel::Dataset::StoredProcedureMethods,
%w"execute execute_dui") do
private
# Same as execute, explicit due to intricacies of alias and super.
def execute_insert(sql, opts=OPTS)
sql = @opts[:sproc_name]
opts = Hash[opts]
opts[:args] = @opts[:sproc_args]
opts[:sproc] = true
opts[:type] = :insert
super
end
end
# Whether to convert some Java types to ruby types when retrieving rows.
# Uses the database's setting by default, can be set to false to roughly
# double performance when fetching rows.
attr_accessor :convert_types
# Correctly return rows from the database and return them as hashes.
def fetch_rows(sql, &block)
execute(sql){|result| process_result_set(result, &block)}
self
end
# Set the fetch size on JDBC ResultSets created from this dataset.
def with_fetch_size(size)
clone(:fetch_size=>size)
end
private
# Whether we should convert Java types to ruby types for this dataset.
def convert_types?
ct = @convert_types
ct.nil? ? db.convert_types : ct
end
# Extend the dataset with the JDBC stored procedure methods.
def prepare_extend_sproc(ds)
ds.with_extend(StoredProcedureMethods)
end
# The type conversion proc to use for the given column number i,
# given the type conversion map and the ResultSetMetaData.
def type_convertor(map, meta, type, i)
map[type]
end
# The basic type conversion proc to use for the given column number i,
# given the type conversion map and the ResultSetMetaData.
#
# This is implemented as a separate method so that subclasses can
# override the methods separately.
def basic_type_convertor(map, meta, type, i)
map[type]
end
def prepared_statement_modules
[PreparedStatementMethods]
end
# Split out from fetch rows to allow processing of JDBC result sets
# that don't come from issuing an SQL string.
def process_result_set(result)
meta = result.getMetaData
if fetch_size = opts[:fetch_size]
result.setFetchSize(fetch_size)
end
cols = []
i = 0
convert = convert_types?
map = convert ? db.type_convertor_map : db.basic_type_convertor_map
meta.getColumnCount.times do
i += 1
cols << [output_identifier(meta.getColumnLabel(i)), i, convert ? type_convertor(map, meta, meta.getColumnType(i), i) : basic_type_convertor(map, meta, meta.getColumnType(i), i)]
end
self.columns = cols.map{|c| c[0]}
while result.next
row = {}
cols.each do |n, j, pr|
row[n] = pr.call(result, j)
end
yield row
end
ensure
result.close
end
end
end
end
| 35.825666 | 206 | 0.601717 |
e8547251ee8691ec2a6c0ddd2b3ed7dd333ec929 | 2,484 | # frozen_string_literal: true
Rails.application.configure do
# Settings specified here will take precedence over those
# in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.to_i}",
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Store uploaded files on the local file system
# (see config/storage.yml for options).
config.active_storage.service = :local
# When mail is sent from your application,
# Letter Opener will open a preview in the browser instead of sending.
config.action_mailer.delivery_method = :letter_opener
config.action_mailer.perform_deliveries = true
config.action_mailer.raise_delivery_errors = true
config.action_mailer.perform_caching = false
config.action_mailer.default_url_options = {
host: 'localhost',
port: 3000,
}
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 34.027397 | 80 | 0.76087 |
62e869c98ed9a7a570c5cdd0a725c010a87cca48 | 26,137 | #!/usr/bin/env ruby
require 'spec_helper'
require 'puppet/util/windows'
describe Puppet::Util::Windows::ADSI, :if => Puppet::Util::Platform.windows? do
let(:connection) { stub 'connection' }
let(:builtin_localized) { Puppet::Util::Windows::SID.sid_to_name('S-1-5-32') }
# SYSTEM is special as English can retrieve it via Windows API
# but will return localized names
let(:ntauthority_localized) { Puppet::Util::Windows::SID::Principal.lookup_account_name('SYSTEM').domain }
before(:each) do
Puppet::Util::Windows::ADSI.instance_variable_set(:@computer_name, 'testcomputername')
Puppet::Util::Windows::ADSI.stubs(:connect).returns connection
end
after(:each) do
Puppet::Util::Windows::ADSI.instance_variable_set(:@computer_name, nil)
end
it "should generate the correct URI for a resource" do
expect(Puppet::Util::Windows::ADSI.uri('test', 'user')).to eq("WinNT://./test,user")
end
it "should be able to get the name of the computer" do
expect(Puppet::Util::Windows::ADSI.computer_name).to eq('testcomputername')
end
it "should be able to provide the correct WinNT base URI for the computer" do
expect(Puppet::Util::Windows::ADSI.computer_uri).to eq("WinNT://.")
end
it "should generate a fully qualified WinNT URI" do
expect(Puppet::Util::Windows::ADSI.computer_uri('testcomputername')).to eq("WinNT://testcomputername")
end
describe ".computer_name" do
it "should return a non-empty ComputerName string" do
Puppet::Util::Windows::ADSI.instance_variable_set(:@computer_name, nil)
expect(Puppet::Util::Windows::ADSI.computer_name).not_to be_empty
end
end
describe ".sid_uri" do
it "should raise an error when the input is not a SID Principal" do
[Object.new, {}, 1, :symbol, '', nil].each do |input|
expect {
Puppet::Util::Windows::ADSI.sid_uri(input)
}.to raise_error(Puppet::Error, /Must use a valid SID::Principal/)
end
end
it "should return a SID uri for a well-known SID (SYSTEM)" do
sid = Puppet::Util::Windows::SID::Principal.lookup_account_name('SYSTEM')
expect(Puppet::Util::Windows::ADSI.sid_uri(sid)).to eq('WinNT://S-1-5-18')
end
end
describe Puppet::Util::Windows::ADSI::User do
let(:username) { 'testuser' }
let(:domain) { 'DOMAIN' }
let(:domain_username) { "#{domain}\\#{username}"}
it "should generate the correct URI" do
expect(Puppet::Util::Windows::ADSI::User.uri(username)).to eq("WinNT://./#{username},user")
end
it "should generate the correct URI for a user with a domain" do
expect(Puppet::Util::Windows::ADSI::User.uri(username, domain)).to eq("WinNT://#{domain}/#{username},user")
end
it "should generate the correct URI for a BUILTIN user" do
expect(Puppet::Util::Windows::ADSI::User.uri(username, builtin_localized)).to eq("WinNT://./#{username},user")
end
it "should generate the correct URI for a NT AUTHORITY user" do
expect(Puppet::Util::Windows::ADSI::User.uri(username, ntauthority_localized)).to eq("WinNT://./#{username},user")
end
it "should be able to parse a username without a domain" do
expect(Puppet::Util::Windows::ADSI::User.parse_name(username)).to eq([username, '.'])
end
it "should be able to parse a username with a domain" do
expect(Puppet::Util::Windows::ADSI::User.parse_name(domain_username)).to eq([username, domain])
end
it "should raise an error with a username that contains a /" do
expect {
Puppet::Util::Windows::ADSI::User.parse_name("#{domain}/#{username}")
}.to raise_error(Puppet::Error, /Value must be in DOMAIN\\user style syntax/)
end
it "should be able to create a user" do
adsi_user = stub('adsi')
connection.expects(:Create).with('user', username).returns(adsi_user)
Puppet::Util::Windows::ADSI::Group.expects(:exists?).with(username).returns(false)
user = Puppet::Util::Windows::ADSI::User.create(username)
expect(user).to be_a(Puppet::Util::Windows::ADSI::User)
expect(user.native_object).to eq(adsi_user)
end
it "should be able to check the existence of a user" do
Puppet::Util::Windows::SID.expects(:name_to_principal).with(username).returns nil
Puppet::Util::Windows::ADSI.expects(:connect).with("WinNT://./#{username},user").returns connection
connection.expects(:Class).returns('User')
expect(Puppet::Util::Windows::ADSI::User.exists?(username)).to be_truthy
end
it "should be able to check the existence of a domain user" do
Puppet::Util::Windows::SID.expects(:name_to_principal).with("#{domain}\\#{username}").returns nil
Puppet::Util::Windows::ADSI.expects(:connect).with("WinNT://#{domain}/#{username},user").returns connection
connection.expects(:Class).returns('User')
expect(Puppet::Util::Windows::ADSI::User.exists?(domain_username)).to be_truthy
end
it "should be able to confirm the existence of a user with a well-known SID" do
system_user = Puppet::Util::Windows::SID::LocalSystem
# ensure that the underlying OS is queried here
Puppet::Util::Windows::ADSI.unstub(:connect)
expect(Puppet::Util::Windows::ADSI::User.exists?(system_user)).to be_truthy
end
it "should return false with a well-known Group SID" do
group = Puppet::Util::Windows::SID::BuiltinAdministrators
# ensure that the underlying OS is queried here
Puppet::Util::Windows::ADSI.unstub(:connect)
expect(Puppet::Util::Windows::ADSI::User.exists?(group)).to be_falsey
end
it "should return nil with an unknown SID" do
bogus_sid = 'S-1-2-3-4'
# ensure that the underlying OS is queried here
Puppet::Util::Windows::ADSI.unstub(:connect)
expect(Puppet::Util::Windows::ADSI::User.exists?(bogus_sid)).to be_falsey
end
it "should be able to delete a user" do
connection.expects(:Delete).with('user', username)
Puppet::Util::Windows::ADSI::User.delete(username)
end
it "should return an enumeration of IADsUser wrapped objects" do
name = 'Administrator'
wmi_users = [stub('WMI', :name => name)]
Puppet::Util::Windows::ADSI.expects(:execquery).with('select name from win32_useraccount where localaccount = "TRUE"').returns(wmi_users)
native_object = stub('IADsUser')
homedir = "C:\\Users\\#{name}"
native_object.expects(:Get).with('HomeDirectory').returns(homedir)
Puppet::Util::Windows::ADSI.expects(:connect).with("WinNT://./#{name},user").returns(native_object)
users = Puppet::Util::Windows::ADSI::User.to_a
expect(users.length).to eq(1)
expect(users[0].name).to eq(name)
expect(users[0]['HomeDirectory']).to eq(homedir)
end
describe "an instance" do
let(:adsi_user) { stub('user', :objectSID => []) }
let(:sid) { stub(:account => username, :domain => 'testcomputername') }
let(:user) { Puppet::Util::Windows::ADSI::User.new(username, adsi_user) }
it "should provide its groups as a list of names" do
names = ["group1", "group2"]
groups = names.map { |name| stub('group', :Name => name) }
adsi_user.expects(:Groups).returns(groups)
expect(user.groups).to match(names)
end
it "should be able to test whether a given password is correct" do
Puppet::Util::Windows::ADSI::User.expects(:logon).with(username, 'pwdwrong').returns(false)
Puppet::Util::Windows::ADSI::User.expects(:logon).with(username, 'pwdright').returns(true)
expect(user.password_is?('pwdwrong')).to be_falsey
expect(user.password_is?('pwdright')).to be_truthy
end
it "should be able to set a password" do
adsi_user.expects(:SetPassword).with('pwd')
adsi_user.expects(:SetInfo).at_least_once
flagname = "UserFlags"
fADS_UF_DONT_EXPIRE_PASSWD = 0x10000
adsi_user.expects(:Get).with(flagname).returns(0)
adsi_user.expects(:Put).with(flagname, fADS_UF_DONT_EXPIRE_PASSWD)
user.password = 'pwd'
end
it "should be able manage a user without a password" do
adsi_user.expects(:SetPassword).with('pwd').never
adsi_user.expects(:SetInfo).at_least_once
flagname = "UserFlags"
fADS_UF_DONT_EXPIRE_PASSWD = 0x10000
adsi_user.expects(:Get).with(flagname).returns(0)
adsi_user.expects(:Put).with(flagname, fADS_UF_DONT_EXPIRE_PASSWD)
user.password = nil
end
it "should generate the correct URI" do
Puppet::Util::Windows::SID.stubs(:octet_string_to_principal).returns(sid)
expect(user.uri).to eq("WinNT://testcomputername/#{username},user")
end
describe "when given a set of groups to which to add the user" do
let(:existing_groups) { ['group2','group3'] }
let(:group_sids) { existing_groups.each_with_index.map{|n,i| stub(:Name => n, :objectSID => stub(:sid => i))} }
let(:groups_to_set) { 'group1,group2' }
let(:desired_sids) { groups_to_set.split(',').each_with_index.map{|n,i| stub(:Name => n, :objectSID => stub(:sid => i-1))} }
before(:each) do
user.expects(:group_sids).returns(group_sids.map {|s| s.objectSID })
end
describe "if membership is specified as inclusive" do
it "should add the user to those groups, and remove it from groups not in the list" do
Puppet::Util::Windows::ADSI::User.expects(:name_sid_hash).returns(Hash[ desired_sids.map { |s| [s.objectSID.sid, s.objectSID] }])
user.expects(:add_group_sids).with { |value| value.sid == -1 }
user.expects(:remove_group_sids).with { |value| value.sid == 1 }
user.set_groups(groups_to_set, false)
end
it "should remove all users from a group if desired is empty" do
Puppet::Util::Windows::ADSI::User.expects(:name_sid_hash).returns({})
user.expects(:add_group_sids).never
user.expects(:remove_group_sids).with { |user1, user2| user1.sid == 0 && user2.sid == 1 }
user.set_groups('', false)
end
end
describe "if membership is specified as minimum" do
it "should add the user to the specified groups without affecting its other memberships" do
Puppet::Util::Windows::ADSI::User.expects(:name_sid_hash).returns(Hash[ desired_sids.map { |s| [s.objectSID.sid, s.objectSID] }])
user.expects(:add_group_sids).with { |value| value.sid == -1 }
user.expects(:remove_group_sids).never
user.set_groups(groups_to_set, true)
end
it "should do nothing if desired is empty" do
Puppet::Util::Windows::ADSI::User.expects(:name_sid_hash).returns({})
user.expects(:remove_group_sids).never
user.expects(:add_group_sids).never
user.set_groups('', true)
end
end
end
describe 'userflags' do
# Avoid having to type out the constant everytime we want to
# retrieve a userflag's value.
def ads_userflags(flag)
Puppet::Util::Windows::ADSI::User::ADS_USERFLAGS[flag]
end
before(:each) do
userflags = [
:ADS_UF_SCRIPT,
:ADS_UF_ACCOUNTDISABLE,
:ADS_UF_HOMEDIR_REQUIRED,
:ADS_UF_LOCKOUT
].inject(0) do |flags, flag|
flags | ads_userflags(flag)
end
user.stubs(:[]).with('UserFlags').returns(userflags)
end
describe '#userflag_set?' do
it 'returns true if the specified userflag is set' do
expect(user.userflag_set?(:ADS_UF_SCRIPT)).to be true
end
it 'returns false if the specified userflag is not set' do
expect(user.userflag_set?(:ADS_UF_PASSWD_NOTREQD)).to be false
end
it 'returns false if the specified userflag is an unrecognized userflag' do
expect(user.userflag_set?(:ADS_UF_UNRECOGNIZED_FLAG)).to be false
end
end
shared_examples 'set/unset common tests' do |method|
it 'raises an ArgumentError for any unrecognized userflags' do
unrecognized_flags = [
:ADS_UF_UNRECOGNIZED_FLAG_ONE,
:ADS_UF_UNRECOGNIZED_FLAG_TWO
]
input_flags = unrecognized_flags + [
:ADS_UF_PASSWORD_EXPIRED,
:ADS_UF_DONT_EXPIRE_PASSWD
]
expect { user.send(method, *input_flags) }.to raise_error(
ArgumentError, /#{unrecognized_flags.join(', ')}/
)
end
it 'noops if no userflags are passed-in' do
user.expects(:[]=).never
user.expects(:commit).never
user.send(method)
end
end
describe '#set_userflags' do
include_examples 'set/unset common tests', :set_userflags
it 'should add the passed-in flags to the current set of userflags' do
input_flags = [
:ADS_UF_PASSWORD_EXPIRED,
:ADS_UF_DONT_EXPIRE_PASSWD
]
userflags = user['UserFlags']
expected_userflags = userflags | ads_userflags(input_flags[0]) | ads_userflags(input_flags[1])
user.expects(:[]=).with('UserFlags', expected_userflags)
user.set_userflags(*input_flags)
end
end
describe '#unset_userflags' do
include_examples 'set/unset common tests', :unset_userflags
it 'should remove the passed-in flags from the current set of userflags' do
input_flags = [
:ADS_UF_SCRIPT,
:ADS_UF_ACCOUNTDISABLE
]
# ADS_UF_HOMEDIR_REQUIRED and ADS_UF_LOCKOUT should be the only flags set.
expected_userflags = 0 | ads_userflags(:ADS_UF_HOMEDIR_REQUIRED) | ads_userflags(:ADS_UF_LOCKOUT)
user.expects(:[]=).with('UserFlags', expected_userflags)
user.unset_userflags(*input_flags)
end
end
end
end
end
describe Puppet::Util::Windows::ADSI::Group do
let(:groupname) { 'testgroup' }
describe "an instance" do
let(:adsi_group) { stub 'group' }
let(:group) { Puppet::Util::Windows::ADSI::Group.new(groupname, adsi_group) }
let(:someone_sid){ stub(:account => 'someone', :domain => 'testcomputername')}
describe "should be able to use SID objects" do
let(:system) { Puppet::Util::Windows::SID.name_to_principal('SYSTEM') }
let(:invalid) { Puppet::Util::Windows::SID.name_to_principal('foobar') }
it "to add a member" do
adsi_group.expects(:Add).with("WinNT://S-1-5-18")
group.add_member_sids(system)
end
it "and raise when passed a non-SID object to add" do
expect{ group.add_member_sids(invalid)}.to raise_error(Puppet::Error, /Must use a valid SID::Principal/)
end
it "to remove a member" do
adsi_group.expects(:Remove).with("WinNT://S-1-5-18")
group.remove_member_sids(system)
end
it "and raise when passed a non-SID object to remove" do
expect{ group.remove_member_sids(invalid)}.to raise_error(Puppet::Error, /Must use a valid SID::Principal/)
end
end
it "should provide its groups as a list of names" do
names = ['user1', 'user2']
users = names.map { |name| stub('user', :Name => name, :objectSID => name, :ole_respond_to? => true) }
adsi_group.expects(:Members).returns(users)
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with('user1').returns(stub(:domain_account => 'HOSTNAME\user1'))
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with('user2').returns(stub(:domain_account => 'HOSTNAME\user2'))
expect(group.members.map(&:domain_account)).to match(['HOSTNAME\user1', 'HOSTNAME\user2'])
end
context "calling .set_members" do
it "should set the members of a group to only desired_members when inclusive" do
names = ['DOMAIN\user1', 'user2']
sids = [
stub(:account => 'user1', :domain => 'DOMAIN', :sid => 1),
stub(:account => 'user2', :domain => 'testcomputername', :sid => 2),
stub(:account => 'user3', :domain => 'DOMAIN2', :sid => 3),
]
# use stubbed objectSid on member to return stubbed SID
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with([0]).returns(sids[0])
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with([1]).returns(sids[1])
Puppet::Util::Windows::SID.expects(:name_to_principal).with('user2').returns(sids[1])
Puppet::Util::Windows::SID.expects(:name_to_principal).with('DOMAIN2\user3').returns(sids[2])
Puppet::Util::Windows::ADSI.expects(:sid_uri).with(sids[0]).returns("WinNT://DOMAIN/user1,user")
Puppet::Util::Windows::ADSI.expects(:sid_uri).with(sids[2]).returns("WinNT://DOMAIN2/user3,user")
members = names.each_with_index.map{|n,i| stub(:Name => n, :objectSID => [i], :ole_respond_to? => true)}
adsi_group.expects(:Members).returns members
adsi_group.expects(:Remove).with('WinNT://DOMAIN/user1,user')
adsi_group.expects(:Add).with('WinNT://DOMAIN2/user3,user')
group.set_members(['user2', 'DOMAIN2\user3'])
end
it "should add the desired_members to an existing group when not inclusive" do
names = ['DOMAIN\user1', 'user2']
sids = [
stub(:account => 'user1', :domain => 'DOMAIN', :sid => 1),
stub(:account => 'user2', :domain => 'testcomputername', :sid => 2),
stub(:account => 'user3', :domain => 'DOMAIN2', :sid => 3),
]
# use stubbed objectSid on member to return stubbed SID
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with([0]).returns(sids[0])
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with([1]).returns(sids[1])
Puppet::Util::Windows::SID.expects(:name_to_principal).with('user2').returns(sids[1])
Puppet::Util::Windows::SID.expects(:name_to_principal).with('DOMAIN2\user3').returns(sids[2])
Puppet::Util::Windows::ADSI.expects(:sid_uri).with(sids[2]).returns("WinNT://DOMAIN2/user3,user")
members = names.each_with_index.map{|n,i| stub(:Name => n, :objectSID => [i], :ole_respond_to? => true)}
adsi_group.expects(:Members).returns members
adsi_group.expects(:Remove).with('WinNT://DOMAIN/user1,user').never
adsi_group.expects(:Add).with('WinNT://DOMAIN2/user3,user')
group.set_members(['user2', 'DOMAIN2\user3'],false)
end
it "should return immediately when desired_members is nil" do
adsi_group.expects(:Members).never
adsi_group.expects(:Remove).never
adsi_group.expects(:Add).never
group.set_members(nil)
end
it "should remove all members when desired_members is empty and inclusive" do
names = ['DOMAIN\user1', 'user2']
sids = [
stub(:account => 'user1', :domain => 'DOMAIN', :sid => 1 ),
stub(:account => 'user2', :domain => 'testcomputername', :sid => 2 ),
]
# use stubbed objectSid on member to return stubbed SID
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with([0]).returns(sids[0])
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with([1]).returns(sids[1])
Puppet::Util::Windows::ADSI.expects(:sid_uri).with(sids[0]).returns("WinNT://DOMAIN/user1,user")
Puppet::Util::Windows::ADSI.expects(:sid_uri).with(sids[1]).returns("WinNT://testcomputername/user2,user")
members = names.each_with_index.map{|n,i| stub(:Name => n, :objectSID => [i], :ole_respond_to? => true)}
adsi_group.expects(:Members).returns members
adsi_group.expects(:Remove).with('WinNT://DOMAIN/user1,user')
adsi_group.expects(:Remove).with('WinNT://testcomputername/user2,user')
group.set_members([])
end
it "should do nothing when desired_members is empty and not inclusive" do
names = ['DOMAIN\user1', 'user2']
sids = [
stub(:account => 'user1', :domain => 'DOMAIN', :sid => 1 ),
stub(:account => 'user2', :domain => 'testcomputername', :sid => 2 ),
]
# use stubbed objectSid on member to return stubbed SID
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with([0]).returns(sids[0])
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with([1]).returns(sids[1])
members = names.each_with_index.map{|n,i| stub(:Name => n, :objectSID => [i], :ole_respond_to? => true)}
adsi_group.expects(:Members).returns members
adsi_group.expects(:Remove).never
adsi_group.expects(:Add).never
group.set_members([],false)
end
it "should raise an error when a username does not resolve to a SID" do
expect {
adsi_group.expects(:Members).returns []
group.set_members(['foobar'])
}.to raise_error(Puppet::Error, /Could not resolve name: foobar/)
end
end
it "should generate the correct URI" do
adsi_group.expects(:objectSID).returns([0])
Socket.expects(:gethostname).returns('TESTcomputerNAME')
computer_sid = stub(:account => groupname,:domain => 'testcomputername')
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with([0]).returns(computer_sid)
expect(group.uri).to eq("WinNT://./#{groupname},group")
end
end
it "should generate the correct URI" do
expect(Puppet::Util::Windows::ADSI::Group.uri("people")).to eq("WinNT://./people,group")
end
it "should generate the correct URI for a BUILTIN group" do
expect(Puppet::Util::Windows::ADSI::Group.uri(groupname, builtin_localized)).to eq("WinNT://./#{groupname},group")
end
it "should generate the correct URI for a NT AUTHORITY group" do
expect(Puppet::Util::Windows::ADSI::Group.uri(groupname, ntauthority_localized)).to eq("WinNT://./#{groupname},group")
end
it "should be able to create a group" do
adsi_group = stub("adsi")
connection.expects(:Create).with('group', groupname).returns(adsi_group)
Puppet::Util::Windows::ADSI::User.expects(:exists?).with(groupname).returns(false)
group = Puppet::Util::Windows::ADSI::Group.create(groupname)
expect(group).to be_a(Puppet::Util::Windows::ADSI::Group)
expect(group.native_object).to eq(adsi_group)
end
it "should be able to confirm the existence of a group" do
Puppet::Util::Windows::SID.expects(:name_to_principal).with(groupname).returns nil
Puppet::Util::Windows::ADSI.expects(:connect).with("WinNT://./#{groupname},group").returns connection
connection.expects(:Class).returns('Group')
expect(Puppet::Util::Windows::ADSI::Group.exists?(groupname)).to be_truthy
end
it "should be able to confirm the existence of a group with a well-known SID" do
service_group = Puppet::Util::Windows::SID::Service
# ensure that the underlying OS is queried here
Puppet::Util::Windows::ADSI.unstub(:connect)
expect(Puppet::Util::Windows::ADSI::Group.exists?(service_group)).to be_truthy
end
it "will return true with a well-known User SID, as there is no way to resolve it with a WinNT:// style moniker" do
user = Puppet::Util::Windows::SID::NtLocal
# ensure that the underlying OS is queried here
Puppet::Util::Windows::ADSI.unstub(:connect)
expect(Puppet::Util::Windows::ADSI::Group.exists?(user)).to be_truthy
end
it "should return nil with an unknown SID" do
bogus_sid = 'S-1-2-3-4'
# ensure that the underlying OS is queried here
Puppet::Util::Windows::ADSI.unstub(:connect)
expect(Puppet::Util::Windows::ADSI::Group.exists?(bogus_sid)).to be_falsey
end
it "should be able to delete a group" do
connection.expects(:Delete).with('group', groupname)
Puppet::Util::Windows::ADSI::Group.delete(groupname)
end
it "should return an enumeration of IADsGroup wrapped objects" do
name = 'Administrators'
wmi_groups = [stub('WMI', :name => name)]
Puppet::Util::Windows::ADSI.expects(:execquery).with('select name from win32_group where localaccount = "TRUE"').returns(wmi_groups)
native_object = stub('IADsGroup')
Puppet::Util::Windows::SID.expects(:octet_string_to_principal).with([]).returns(stub(:domain_account => '.\Administrator'))
native_object.expects(:Members).returns([stub(:Name => 'Administrator', :objectSID => [], :ole_respond_to? => true)])
Puppet::Util::Windows::ADSI.expects(:connect).with("WinNT://./#{name},group").returns(native_object)
groups = Puppet::Util::Windows::ADSI::Group.to_a
expect(groups.length).to eq(1)
expect(groups[0].name).to eq(name)
expect(groups[0].members.map(&:domain_account)).to eq(['.\Administrator'])
end
end
describe Puppet::Util::Windows::ADSI::UserProfile do
it "should be able to delete a user profile" do
connection.expects(:Delete).with("Win32_UserProfile.SID='S-A-B-C'")
Puppet::Util::Windows::ADSI::UserProfile.delete('S-A-B-C')
end
it "should warn on 2003" do
connection.expects(:Delete).raises(WIN32OLERuntimeError,
"Delete (WIN32OLERuntimeError)
OLE error code:80041010 in SWbemServicesEx
Invalid class
HRESULT error code:0x80020009
Exception occurred.")
Puppet.expects(:warning).with("Cannot delete user profile for 'S-A-B-C' prior to Vista SP1")
Puppet::Util::Windows::ADSI::UserProfile.delete('S-A-B-C')
end
end
end
| 41.421553 | 143 | 0.641657 |
ac50d8f1d39843825f57c8bec3c9c13c3e072f92 | 308 | # frozen_string_literal: true
module Resolvers
class CurrentUser < Resolvers::Base
type Types::User, null: true
description 'Find the info for the current user'
argument :id, String, required: false, default_value: '', as: :uuid
def resolve(_params)
current_user
end
end
end
| 20.533333 | 71 | 0.698052 |
6af6ed0bed73383e2de1453c3b7c012f1397a254 | 6,488 | # frozen_string_literal: true
class ESign::Eg013AddDocToTemplateService
attr_reader :args
include ApiCreator
def initialize(args)
@args = args
end
def worker
envelope_args = args[:envelope_args]
# 1. Create the envelope request object
envelope_definition = make_envelope(envelope_args)
# 2. call Envelopes::create API method
# Exceptions will be caught by the calling function
envelope_api = create_envelope_api(args)
results = envelope_api.create_envelope(args[:account_id], envelope_definition)
envelope_id = results.envelope_id
# 3. Create the Recipient View request object
authentication_method = 'None' # How is this application authenticating
# the signer? See the `authenticationMethod' definition
# https://developers.docusign.com/docs/esign-rest-api/reference/envelopes/envelopeviews/createrecipient/
recipient_view_request = DocuSign_eSign::RecipientViewRequest.new(
authenticationMethod: authentication_method,
returnUrl: envelope_args[:ds_return_url],
userName: envelope_args[:signer_name],
email: envelope_args[:signer_email],
clientUserId: envelope_args[:signer_client_id]
)
# 4. Obtain the recipient_view_url for the embedded signing
# Exceptions will be caught by the calling function
results = envelope_api.create_recipient_view(args[:account_id],
envelope_id, recipient_view_request)
{ envelope_id: envelope_id, redirect_url: results.url }
end
private
def make_envelope(args)
# 1. Create recipients for server template. Note that the Recipients object
# is used, not TemplateRole
#
# Create a signer recipient for the signer role of the server template
signer1 = DocuSign_eSign::Signer.new(
email: args[:signer_email], name: args[:signer_name],
roleName: 'signer', recipientId: '1',
# Adding clientUserId transforms the template recipient into an embedded recipient
clientUserId: args[:signer_client_id]
)
cc1 = DocuSign_eSign::CarbonCopy.new(
email: args[:cc_email], name: args[:cc_name],
roleName: 'cc', recipientId: '2'
)
# Recipients object
recipients_server_template = DocuSign_eSign::Recipients.new(
'carbonCopies' => [cc1], 'signers' => [signer1]
)
# 2. Create a composite template for the Server template + roles
comp_template1 = DocuSign_eSign::CompositeTemplate.new(
compositeTemplate_id: '1',
serverTemplates: [DocuSign_eSign::ServerTemplate.new(
sequence: '1', templateId: args[:template_id]
)],
# Add the roles via an inlineTemplate
inlineTemplates: [
DocuSign_eSign::InlineTemplate.new(
'sequence' => '2',
'recipients' => recipients_server_template
)
]
)
# Next, create the second composite template that will include the new document
#
# 3. Create the signer recipient for the added document
# starting with the tab definition:
sign_here1 = DocuSign_eSign::SignHere.new(
anchorString: '**signature_1**',
anchorYOffset: '10', anchorUnits: 'pixels',
anchorXOffset: '20'
)
signer1_tabs = DocuSign_eSign::Tabs.new('signHereTabs' => [sign_here1])
# 4. Create Signer definition for the added document
signer1AddedDoc = DocuSign_eSign::Signer.new(
email: args[:signer_email],
name: args[:signer_name],
roleName: 'signer', recipientId: '1',
clientUserId: args[:signer_client_id],
tabs: signer1_tabs
)
# 5. The Recipients object for the added document using cc1 definition from above
recipients_added_doc = DocuSign_eSign::Recipients.new(
carbonCopies: [cc1], signers: [signer1AddedDoc]
)
# 6. Create the HTML document that will be added to the envelope
doc1_b64 = Base64.encode64(create_document1(args))
doc1 = DocuSign_eSign::Document.new(
documentBase64: doc1_b64,
name: 'Appendix 1--Sales order', # Can be different from actual file name
fileExtension: 'html', documentId: '1'
)
# 7. Create a composite template for the added document
comp_template2 = DocuSign_eSign::CompositeTemplate.new(
compositeTemplateId: '2',
# Add the recipients via an inlineTemplate
inlineTemplates: [
DocuSign_eSign::InlineTemplate.new(
sequence: '1', recipients: recipients_added_doc
)
],
document: doc1
)
# 8. Create the envelope definition with the composited templates
envelope_definition = DocuSign_eSign::EnvelopeDefinition.new(
status: 'sent',
compositeTemplates: [comp_template1, comp_template2]
)
envelope_definition
end
def create_document1(args)
<<~HEREDOC
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
</head>
<body style="font-family:sans-serif;margin-left:2em;">
<h1 style="font-family: 'Trebuchet MS', Helvetica, sans-serif;
color: darkblue;margin-bottom: 0;">World Wide Corp</h1>
<h2 style="font-family: 'Trebuchet MS', Helvetica, sans-serif;
margin-top: 0px;margin-bottom: 3.5em;font-size: 1em;
color: darkblue;">Order Processing Division</h2>
<h4>Ordered by #{args[:signer_name]}</h4>
<p style="margin-top:0em; margin-bottom:0em;">Email: #{args[:signer_email]}</p>
<p style="margin-top:0em; margin-bottom:0em;">Copy to: #{args[:cc_name]}, #{args[:cc_email]}</p>
<p style="margin-top:3em; margin-bottom:0em;">Item: <b>#{args[:item]}</b>, quantity: <b>#{args[:quantity]}</b> at market price.</p>
<p style="margin-top:3em;">
Candy bonbon pastry jujubes lollipop wafer biscuit biscuit. Topping brownie sesame snaps sweet roll pie. Croissant danish biscuit soufflé caramels jujubes jelly. Dragée danish caramels lemon drops dragée. Gummi bears cupcake biscuit tiramisu sugar plum pastry. Dragée gummies applicake pudding liquorice. Donut jujubes oat cake jelly-o. Dessert bear claw chocolate cake gummies lollipop sugar plum ice cream gummies cheesecake.
</p>
<!-- Note the anchor tag for the signature field is in white. -->
<h3 style="margin-top:3em;">Agreed: <span style="color:white;">**signature_1**/</span></h3>
</body>
</html>
HEREDOC
end
# ***DS.snippet.0.start
end | 41.589744 | 435 | 0.672626 |
ab5cbde41edfcc809270481e06c7d55c160b6999 | 5,188 | =begin
#SCORM Cloud Rest API
#REST API used for SCORM Cloud integrations.
OpenAPI spec version: 2.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.12
=end
require 'date'
module RusticiSoftwareCloudV2
class EnabledSchema
attr_accessor :enabled
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'enabled' => :'enabled'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'enabled' => :'BOOLEAN'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'enabled')
self.enabled = attributes[:'enabled']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
enabled == o.enabled
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[enabled].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = RusticiSoftwareCloudV2.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 28.195652 | 107 | 0.617772 |
08546483741bfcb7511afad15873eaf1592b3b8b | 558 | require 'rubygems'
require 'bundler/setup'
require 'combustion'
# require 'capybara/rspec'
Combustion.initialize! :all
require 'rspec/rails'
ActiveRecord::Migration.maintain_test_schema!
RSpec.configure do |config|
config.use_transactional_fixtures = true
config.infer_spec_type_from_file_location!
# config.include Capybara::DSL
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
end
| 22.32 | 76 | 0.797491 |
abaf6e3efb951be6972d1d8b1baab6ddc26d1636 | 7,225 | require 'rest_client'
require 'oauth'
require 'json'
require 'thread'
module Runcible
class Base
attr_accessor :logs
def initialize(config = {})
@mutex = Mutex.new
@config = config
@logs = []
end
def lazy_config=(a_block)
@mutex.synchronize { @lazy_config = a_block }
end
def config
@mutex.synchronize do
@config = @lazy_config.call if defined?(@lazy_config)
fail Runcible::ConfigurationUndefinedError, Runcible::ConfigurationUndefinedError.message unless @config
@config
end
end
def path(*args)
self.class.path(*args)
end
# rubocop:disable Metrics/AbcSize:
def call(method, path, options = {})
self.logs = []
clone_config = self.config.clone
#on occation path will already have prefix (sync cancel)
path = clone_config[:api_path] + path unless path.start_with?(clone_config[:api_path])
headers = clone_config[:headers].clone
get_params = options[:params] if options[:params]
path = combine_get_params(path, get_params) if get_params
client_options = {}
client_options[:timeout] = clone_config[:timeout] if clone_config[:timeout]
client_options[:open_timeout] = clone_config[:open_timeout] if clone_config[:open_timeout]
client_options[:verify_ssl] = clone_config[:verify_ssl] unless clone_config[:verify_ssl].nil?
if clone_config[:oauth]
self.logger.warn('[DEPRECATION] Pulp oauth is deprecated. Please use cert_auth instead.')
headers = add_oauth_header(method, path, headers)
headers['pulp-user'] = clone_config[:user]
elsif clone_config[:cert_auth]
if !clone_config[:cert_auth][:ssl_client_cert] || !clone_config[:cert_auth][:ssl_client_key]
fail Runcible::ConfigurationUndefinedError, "Missing SSL certificate or key configuration."
end
client_options[:ssl_client_cert] = clone_config[:cert_auth][:ssl_client_cert]
client_options[:ssl_client_key] = clone_config[:cert_auth][:ssl_client_key]
else
client_options[:user] = clone_config[:user]
client_options[:password] = config[:http_auth][:password]
end
client_options[:ssl_ca_file] = config[:ca_cert_file] unless config[:ca_cert_file].nil?
client = RestClient::Resource.new(clone_config[:url], client_options)
args = [method]
args << generate_payload(options) if [:post, :put].include?(method)
args << headers
self.logs << ([method.upcase, URI.join(client.url, path)] + args[1..-1]).join(': ')
response = get_response(client, path, *args)
processed = process_response(response)
self.logs << "Response: #{response.code}: #{response.body}"
log_info
processed
rescue RestClient::ResourceNotFound => e
self.logs << exception_to_log(e)
log_info
raise e
rescue => e
self.logs << exception_to_log(e)
log_exception
raise e
end
def exception_to_log(exception, body = exception.try(:response).try(:body))
"#{exception.message}: #{body}"
end
def get_response(client, path, *args)
client[path].send(*args) do |response, _request, _result, &_block|
resp = response.return!
return resp
end
end
def combine_get_params(path, params)
query_string = params.map do |k, v|
if v.is_a? Array
v.map { |y| "#{k}=#{y}" }.join('&')
else
"#{k}=#{v}"
end
end
query_string = query_string.flatten.join('&')
path + "?#{query_string}"
end
def generate_payload(options)
if options[:payload].is_a?(String)
return options[:payload]
elsif options[:payload].is_a?(Hash)
format_payload_json(options[:payload])
end
end
def format_payload_json(payload_hash)
if payload_hash
if payload_hash[:optional]
payload = if payload_hash[:required]
payload_hash[:required].merge(payload_hash[:optional])
else
payload_hash[:optional]
end
elsif payload_hash[:delta]
payload = payload_hash
else
payload = payload_hash[:required]
end
else
payload = {}
end
return payload.to_json
end
def process_response(response)
begin
body = response.body == "null" ? nil : JSON.parse(response.body)
if body.respond_to? :with_indifferent_access
body = body.with_indifferent_access
elsif body.is_a? Array
body = body.map do |i|
i.respond_to?(:with_indifferent_access) ? i.with_indifferent_access : i
end
end
response = Runcible::Response.new(body, response)
rescue JSON::ParserError => e
self.logs << "Unable to parse JSON: #{e.message}"
end
return response
end
def required_params(local_names, binding, keys_to_remove = [])
local_names = local_names.each_with_object({}) do |v, acc|
value = binding.eval(v.to_s) unless v == :_
acc[v] = value unless value.nil?
acc
end
#The double delete is to support 1.8.7 and 1.9.3
local_names.delete(:payload)
local_names.delete(:optional)
local_names.delete('payload')
local_names.delete('optional')
keys_to_remove.each do |key|
local_names.delete(key)
local_names.delete(key.to_sym)
end
return local_names
end
def add_http_auth_header
return {:user => config[:user], :password => config[:http_auth][:password]}
end
def add_oauth_header(method, path, headers)
default_options = { :site => config[:url],
:http_method => method,
:request_token_path => '',
:authorize_path => '',
:access_token_path => '' }
consumer = OAuth::Consumer.new(config[:oauth][:oauth_key], config[:oauth][:oauth_secret], default_options)
method_to_http_request = { :get => Net::HTTP::Get,
:post => Net::HTTP::Post,
:put => Net::HTTP::Put,
:delete => Net::HTTP::Delete }
http_request = method_to_http_request[method].new(path)
consumer.sign!(http_request)
headers['Authorization'] = http_request['Authorization']
return headers
end
def log_debug
self.config[:logging][:logger].debug(self.logs.join("\n")) if self.config[:logging][:debug]
end
def log_exception
self.config[:logging][:logger].error(self.logs.join("\n")) if self.config[:logging][:exception]
end
def log_info
self.config[:logging][:logger].info(self.logs.join("\n")) if self.config[:logging][:info]
end
def logger
self.config[:logging][:logger]
end
end
class ConfigurationUndefinedError < StandardError
def self.message
# override me to change the error message
'Configuration not set. Runcible::Base.config= must be called before Runcible::Base.config.'
end
end
end
| 32.111111 | 112 | 0.616886 |
f7f52fcfcda3f9a62d314b1b9ebedba054c782d8 | 428 | require 'spec_helper'
module Pact
describe SomethingLike do
describe 'json_create' do
let(:json) do
'
{
"json_class": "Pact::SomethingLike",
"contents" : { "thing" : "blah" }
}
'
end
subject { SomethingLike.json_create(JSON.parse(json)) }
it "creates a SomethingLike object from json" do
expect(subject).to eq(SomethingLike.new({"thing" => "blah"}))
end
end
end
end
| 17.833333 | 69 | 0.619159 |
399276fd2bdeb2941ff29155c0fb7bd948e60005 | 1,796 | class MysqlClient < Formula
desc "Open source relational database management system"
homepage "https://dev.mysql.com/doc/refman/5.7/en/"
# Pinned at `5.7.*`
url "https://cdn.mysql.com/Downloads/MySQL-5.7/mysql-boost-5.7.23.tar.gz"
mirror "https://cdn.mysql.com/archives/mysql-5.7/mysql-boost-5.7.23.tar.gz"
sha256 "d05700ec5c1c6dae9311059dc1713206c29597f09dbd237bf0679b3c6438e87a"
revision 1
bottle do
sha256 "43faa86e44607a1a67189016b0f7d2ff15a484f9f80fc8e40e3c13a8eb662f9c" => :catalina
sha256 "dc94d17faeea3a03f85299a8e93cd359dfff5fdff3576e50992506485f3029e2" => :mojave
sha256 "cf37146a2e2144eef78e38f5893a6fdfddab2c95dd398666e0150a2621779645" => :high_sierra
sha256 "663331c48538a961d42ea69a11555bc3f37f1f5b3e9a9e8f305ecbe490528b73" => :sierra
end
keg_only "conflicts with mysql"
depends_on "cmake" => :build
depends_on "[email protected]"
def install
# https://bugs.mysql.com/bug.php?id=87348
# Fixes: "ADD_SUBDIRECTORY given source
# 'storage/ndb' which is not an existing"
inreplace "CMakeLists.txt", "ADD_SUBDIRECTORY(storage/ndb)", ""
# -DINSTALL_* are relative to `CMAKE_INSTALL_PREFIX` (`prefix`)
args = %W[
-DCOMPILATION_COMMENT=Homebrew
-DDEFAULT_CHARSET=utf8
-DDEFAULT_COLLATION=utf8_general_ci
-DINSTALL_DOCDIR=share/doc/#{name}
-DINSTALL_INCLUDEDIR=include/mysql
-DINSTALL_INFODIR=share/info
-DINSTALL_MANDIR=share/man
-DINSTALL_MYSQLSHAREDIR=share/mysql
-DWITH_BOOST=boost
-DWITH_EDITLINE=system
-DWITH_SSL=yes
-DWITH_UNIT_TESTS=OFF
-DWITHOUT_SERVER=ON
]
system "cmake", ".", *std_cmake_args, *args
system "make", "install"
end
test do
assert_match version.to_s, shell_output("#{bin}/mysql --version")
end
end
| 33.259259 | 93 | 0.729955 |
1cc791f1032aeb105ca6b15ea200484a33d20543 | 526 | require "spec_helper"
require "hamster/vector"
describe Hamster::Vector do
[:first, :head].each do |method|
describe "##{method}" do
[
[[], nil],
[["A"], "A"],
[%w[A B C], "A"],
].each do |values, expected|
describe "on #{values.inspect}" do
before do
@vector = Hamster.vector(*values)
end
it "returns #{expected.inspect}" do
@vector.send(method).should == expected
end
end
end
end
end
end | 21.04 | 51 | 0.5 |
032dca53060219176c3443c74594ebc946e0ab18 | 189 | require_relative '../test_helper'
class QualityTypeTest < Test::Unit::TestCase
fixtures :quality_types
# Replace this with your real tests.
def test_truth
assert true
end
end
| 17.181818 | 44 | 0.746032 |
79c621ef48f94a494b9f3cb052e5b942d7dede2d | 1,788 | #
# Be sure to run `pod lib lint AMPFloatingTextField.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'AMPFloatingTextField'
s.version = '0.1.0'
s.summary = 'AMPFloatingTextField is a beautiful implementation of the floating title and error lable pattern'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = 'AMPFloatingTextField is a beautiful implementation of the floating title and error lable pattern. This will display the title on top and error below the text field'
s.homepage = 'https://github.com/ansu/AMPFloatingTextField'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'ansu' => '[email protected]' }
s.source = { :git => 'https://github.com/ansu/AMPFloatingTextField.git', :tag => s.version.to_s }
s.social_media_url = 'https://twitter.com/ansujain'
s.ios.deployment_target = '8.0'
s.source_files = 'AMPFloatingTextField/Classes/**/*'
# s.resource_bundles = {
# 'AMPFloatingTextField' => ['AMPFloatingTextField/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 42.571429 | 186 | 0.680089 |
081980e45641ee855451e654cfe3a000ec70d98d | 317 | module TakedownsHelper
def pretty_status(takedown)
status = takedown.pretty_status
classes = {'inactive': 'sect_grey',
'denied': 'sect_red',
'partial': 'sect_green',
'approved': 'sect_green'}
tag.td(status, class: classes[takedown.status])
end
end | 31.7 | 52 | 0.59306 |
28b64b527f577d29d13ec72f4c5a6085ac59d024 | 2,560 | require 'rexml/document'
require 'time'
class GaroonCat::Request
def initialize(params)
@params = params
end
def header_action
action = REXML::Element.new('Action')
action.add_text(@params.dig(:header, :action).to_s)
action
end
def header_security
security = REXML::Element.new('Security')
username = @params.dig(:header, :security, :username_token, :username)
password = @params.dig(:header, :security, :username_token, :password)
if username && password
username_token = REXML::Element.new('UsernameToken')
username_token.add_element('Username').add_text(username)
username_token.add_element('Password').add_text(password)
security.add_element(username_token)
end
security
end
def header_timestamp
timestamp = REXML::Element.new('Timestamp')
timestamp.add_element('Created').add_text(@params.dig(:header, :timestamp, :created).iso8601)
timestamp.add_element('Expires').add_text(@params.dig(:header, :timestamp, :expires).iso8601)
timestamp
end
def header_locale
locale = REXML::Element.new('locale')
locale.add_text(@params.dig(:header, :locale))
locale
end
def header
header = REXML::Element.new('soap:Header')
header.add_element(header_action)
header.add_element(header_security)
header.add_element(header_timestamp)
header.add_element(header_locale)
header
end
# @todo fix: handling parameters' type
def body_action_parameters
parameters = REXML::Element.new('parameters')
target = @params.dig(:body, :parameters)
case target
when Hash
target.each do |key, v1|
case v1
when String
parameters.add_element(key.to_s).add_text(v1.to_s)
when Array
v1.each do |v2|
parameters.add_element(key.to_s).add_text(v2.to_s)
end
end
end
end
parameters
end
def body_action
action = REXML::Element.new(@params.dig(:header, :action).to_s)
action.add_element(body_action_parameters)
action
end
def body
body = REXML::Element.new('soap:Body')
body.add_element(body_action)
body
end
def envelope
envelope = REXML::Element.new('soap:Envelope')
envelope.add_namespace('soap', 'http://www.w3.org/2003/05/soap-envelope')
envelope.add_element(header)
envelope.add_element(body)
envelope
end
def doc
doc = REXML::Document.new
doc << REXML::XMLDecl.new('1.0', 'UTF-8')
doc.add_element(envelope)
doc
end
def to_s
doc.to_s
end
end
| 24.854369 | 97 | 0.678516 |
26d057ccb28d88b0ef9d52c35a0064764e889f83 | 4,249 | #
# Terminus-Bot: An IRC bot to solve all of the problems with IRC bots.
#
# Copyright (C) 2010-2013 Kyle Johnson <[email protected]>, Alex Iadicicco
# (http://terminus-bot.net/)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# TODO: https://developers.google.com/custom-search/v1/overview
# http://googlecode.blogspot.com/2010/11/introducing-google-apis-console-and-our.html
require "uri"
require "net/http"
require 'multi_json'
register 'Search the Internet with Google.'
command 'g', 'Search for web pages using Google.' do
argc! 1
get_result(@params[0], :web) {|r| reply r}
end
command 'gimage', 'Search for images using Google.' do
argc! 1
get_result(@params[0], :images) {|r| reply r}
end
command 'gvideo', 'Search for videos using Google.' do
argc! 1
get_result(@params[0], :video) {|r| reply r}
end
command 'gpatent', 'Search patents using Google.' do
argc! 1
get_result(@params[0], :patent) {|r| reply r}
end
command 'gbook', 'Search books using Google.' do
argc! 1
get_result(@params[0], :books) {|r| reply r}
end
command 'gnews', 'Search news using Google.' do
argc! 1
get_result(@params[0], :news) {|r| reply r}
end
command 'gblog', 'Search blogs using Google.' do
argc! 1
get_result(@params[0], :blogs) {|r| reply r}
end
helpers do
def get_result query, type
$log.debug('google') { "Searching #{type} for #{query}" }
uri = URI("https://ajax.googleapis.com/ajax/services/search/#{type}")
query_hash = {:v => "1.0", :q => query}
http_get(uri, query_hash) do |http|
if http.response.empty?
raise 'No response from google.com.'
end
response = MultiJson.load http.response
results = []
limit = get_config(:resultlimit, 3).to_i
response["responseData"]["results"].each_with_index do |result, num|
break if num >= limit
case type
when :web
results << "\02#{html_decode result["titleNoFormatting"]}\02 - #{URI.unescape(result["url"])}"
when :images
results << "\02#{html_decode result["titleNoFormatting"]}\02 - #{URI.unescape(result["url"])}"
when :books
results << "\02#{html_decode result["titleNoFormatting"]}\02 by #{html_decode result["authors"]} - #{URI.unescape(result["url"])} - #{result["bookId"]} - Published: #{result["publishedYear"]} - #{result["pageCount"]} Pages"
when :news
results << "\02#{html_decode result["titleNoFormatting"]}\02 - #{URI.unescape(result["url"])}"
when :blogs
results << "\02#{html_decode result["titleNoFormatting"]}\02 by #{html_decode result["author"]} - #{URI.unescape(result["postUrl"])} - Published #{result["publishedDate"]}"
when :patent
results << "\02#{html_decode result["titleNoFormatting"]}\02 - #{URI.unescape(result["url"])} - assigned to #{html_decode result["assignee"]} - #{result["patentNumber"]} (#{result["patentStatus"]}) - Applied for on: #{result["applicationDate"]}"
when :video
results << "\02#{html_decode result["titleNoFormatting"]}\02 - #{result["url"]}"
end
end
yield (results.empty? ? "No results." : results)
end
end
end
# vim: set tabstop=2 expandtab:
| 31.708955 | 255 | 0.673335 |
08e1533f4dab45037c6132ec81a83b2c1eeddee2 | 246 | require 'opal'
[1, 2, 3, 4].each do |a|
puts a
end
class Foo
attr_accessor :name
def method_missing(sym, *args, &block)
puts "You tried to call: #{sym}"
end
end
adam = Foo.new
adam.name = 'Adam Beynon'
puts adam.name
adam.do_task
| 12.947368 | 40 | 0.658537 |
ac4c2c1f28f438579251bbaabc1e4fbf923eb8cc | 5,718 | ##
# $Id$
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
# Exploit mixins should be called first
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::WMAPScanServer
# Scanner mixin should be near last
include Msf::Auxiliary::Scanner
include Msf::Auxiliary::Report
def initialize
super(
'Name' => 'HTTP Subversion Scanner',
'Version' => '$Revision$',
'Description' => 'Detect subversion directories and files and analize its content. Only SVN Version > 7 supported',
'Author' => ['et'],
'License' => MSF_LICENSE
)
register_options(
[
OptString.new('PATH', [ true, "The test path to .svn directory", '/']),
OptBool.new('GET_SOURCE', [ false, "Attempt to obtain file source code", true ]),
OptBool.new('SHOW_SOURCE', [ false, "Show source code", true ])
], self.class)
register_advanced_options(
[
OptInt.new('ErrorCode', [ true, "Error code for non existent directory", 404]),
OptPath.new('HTTP404Sigs', [ false, "Path of 404 signatures to use",
File.join(Msf::Config.install_root, "data", "wmap", "wmap_404s.txt")
]
),
OptBool.new('NoDetailMessages', [ false, "Do not display detailed test messages", true ])
], self.class)
end
def run_host(target_host)
conn = true
ecode = nil
emesg = nil
tpath = datastore['PATH']
if tpath[-1,1] != '/'
tpath += '/'
end
ecode = datastore['ErrorCode'].to_i
vhost = datastore['VHOST'] || wmap_target_host
#
# Detect error code
#
begin
randdir = Rex::Text.rand_text_alpha(5).chomp + '/'
res = send_request_cgi({
'uri' => tpath+randdir,
'method' => 'GET',
'ctype' => 'text/html'
}, 20)
return if not res
tcode = res.code.to_i
# Look for a string we can signature on as well
if(tcode >= 200 and tcode <= 299)
File.open(datastore['HTTP404Sigs'], 'rb').each do |str|
if(res.body.index(str))
emesg = str
break
end
end
if(not emesg)
print_status("Using first 256 bytes of the response as 404 string")
emesg = res.body[0,256]
else
print_status("Using custom 404 string of '#{emesg}'")
end
else
ecode = tcode
print_status("Using code '#{ecode}' as not found.")
end
rescue ::Rex::ConnectionRefused, ::Rex::HostUnreachable, ::Rex::ConnectionTimeout
conn = false
rescue ::Timeout::Error, ::Errno::EPIPE
end
return if not conn
dm = datastore['NoDetailMessages']
begin
turl = tpath+'.svn/entries'
res = send_request_cgi({
'uri' => turl,
'method' => 'GET',
'version' => '1.0',
}, 10)
if(not res or ((res.code.to_i == ecode) or (emesg and res.body.index(emesg))))
if dm == false
print_status("[#{target_host}] NOT Found. #{tpath} #{res.code}")
end
else
print_status("[#{target_host}] SVN Entries file found.")
report_note(
:host => target_host,
:proto => 'HTTP',
:port => rport,
:type => 'SVN_ENTRIES',
:data => "#{turl}"
)
vers = res.body[0..1].chomp.to_i
if vers <= 6
print_error("[#{target_host}] Version #{vers} not supported")
return
end
n = 0
res.body.split("\f\n").each do |record|
resarr = []
resarr = record.to_s.split("\n")
if n==0
#first record
version = resarr[0]
sname = "CURRENT"
skind = resarr[2]
srevision = resarr[3]
surl = resarr[4]
slastauthor = resarr[11]
else
sname = resarr[0]
skind = resarr[1]
srevision = resarr[2]
surl = resarr[3]
slastauthor = resarr[10]
end
print_status("[#{target_host}] #{skind} #{sname} [#{slastauthor}]")
if slastauthor and slastauthor.length > 0
report_note(
:host => target_host,
:proto => 'HTTP',
:port => rport,
:type => 'USERNAME',
:data => "#{slastauthor}"
)
end
if skind
if skind == 'dir'
report_note(
:host => target_host,
:proto => 'HTTP',
:port => rport,
:type => 'DIRECTORY',
:data => "#{sname}"
)
end
if skind == 'file'
report_note(
:host => target_host,
:proto => 'HTTP',
:port => rport,
:type => 'FILE',
:data => "#{sname}"
)
if datastore['GET_SOURCE']
print_status("- Trying to get file #{sname} source code.")
begin
turl = tpath+'.svn/text-base/'+sname+'.svn-base'
print_status("- Location: #{turl}")
srcres = send_request_cgi({
'uri' => turl,
'method' => 'GET',
'version' => '1.0',
}, 10)
if srcres and srcres.body.length > 0
if datastore['SHOW_SOURCE']
print_status("#{srcres.body}")
end
report_note(
:host => target_host,
:proto => 'HTTP',
:port => rport,
:type => 'SOURCE_CODE',
:data => "#{sname} Code: #{srcres.body}"
)
end
rescue ::Rex::ConnectionRefused, ::Rex::HostUnreachable, ::Rex::ConnectionTimeout
rescue ::Timeout::Error, ::Errno::EPIPE
end
end
end
end
n += 1
end
print_status("Done. #{n} records.")
end
rescue ::Rex::ConnectionRefused, ::Rex::HostUnreachable, ::Rex::ConnectionTimeout
rescue ::Timeout::Error, ::Errno::EPIPE
end
end
end
| 24.02521 | 118 | 0.56943 |
6204c53a1ad6e72b3cbde32b547900f968af303d | 19,025 | describe :process_spawn, :shared => true do
before :each do
@name = tmp("kernel_spawn.txt")
end
after :each do
rm_r @name
end
it "executes the given command" do
lambda { Process.wait @object.spawn("echo spawn") }.should output_to_fd("spawn\n")
end
it "returns the process ID of the new process as a Fixnum" do
pid = @object.spawn(ruby_cmd("exit"))
Process.wait pid
pid.should be_an_instance_of(Fixnum)
end
it "returns immediately" do
start = Time.now
pid = @object.spawn(ruby_cmd("sleep 10"))
(Time.now - start).should < 5
Process.kill :KILL, pid
Process.wait pid
end
# argv processing
describe "with a single argument" do
it "subjects the specified command to shell expansion" do
lambda { Process.wait @object.spawn("echo *") }.should_not output_to_fd("*\n")
end
it "creates an argument array with shell parsing semantics for whitespace" do
lambda { Process.wait @object.spawn("echo a b c d") }.should output_to_fd("a b c d\n")
end
it "calls #to_str to convert the argument to a String" do
o = mock("to_str")
o.should_receive(:to_str).and_return("echo foo")
lambda { Process.wait @object.spawn(o) }.should output_to_fd("foo\n")
end
it "raises an ArgumentError if the command includes a null byte" do
lambda { @object.spawn "\000" }.should raise_error(ArgumentError)
end
it "raises a TypeError if the argument does not respond to #to_str" do
lambda { @object.spawn :echo }.should raise_error(TypeError)
end
end
describe "with multiple arguments" do
it "does not subject the arguments to shell expansion" do
lambda { Process.wait @object.spawn("echo", "*") }.should output_to_fd("*\n")
end
it "preserves whitespace in passed arguments" do
lambda { Process.wait @object.spawn("echo", "a b c d") }.should output_to_fd("a b c d\n")
end
it "calls #to_str to convert the arguments to Strings" do
o = mock("to_str")
o.should_receive(:to_str).and_return("foo")
lambda { Process.wait @object.spawn("echo", o) }.should output_to_fd("foo\n")
end
it "raises an ArgumentError if an argument includes a null byte" do
lambda { @object.spawn "echo", "\000" }.should raise_error(ArgumentError)
end
it "raises a TypeError if an argument does not respond to #to_str" do
lambda { @object.spawn "echo", :foo }.should raise_error(TypeError)
end
end
describe "with a command array" do
it "uses the first element as the command name and the second as the argv[0] value" do
lambda { Process.wait @object.spawn(["/bin/sh", "argv_zero"], "-c", "echo $0") }.should output_to_fd("argv_zero\n")
end
it "does not subject the arguments to shell expansion" do
lambda { Process.wait @object.spawn(["echo", "echo"], "*") }.should output_to_fd("*\n")
end
it "preserves whitespace in passed arguments" do
lambda { Process.wait @object.spawn(["echo", "echo"], "a b c d") }.should output_to_fd("a b c d\n")
end
it "calls #to_ary to convert the argument to an Array" do
o = mock("to_ary")
o.should_receive(:to_ary).and_return(["/bin/sh", "argv_zero"])
lambda { Process.wait @object.spawn(o, "-c", "echo $0") }.should output_to_fd("argv_zero\n")
end
it "calls #to_str to convert the first element to a String" do
o = mock("to_str")
o.should_receive(:to_str).and_return("echo")
lambda { Process.wait @object.spawn([o, "echo"], "foo") }.should output_to_fd("foo\n")
end
it "calls #to_str to convert the second element to a String" do
o = mock("to_str")
o.should_receive(:to_str).and_return("echo")
lambda { Process.wait @object.spawn(["echo", o], "foo") }.should output_to_fd("foo\n")
end
it "raises an ArgumentError if the Array does not have exactly two elements" do
lambda { @object.spawn([]) }.should raise_error(ArgumentError)
lambda { @object.spawn([:a]) }.should raise_error(ArgumentError)
lambda { @object.spawn([:a, :b, :c]) }.should raise_error(ArgumentError)
end
it "raises an ArgumentError if the Strings in the Array include a null byte" do
lambda { @object.spawn ["\000", "echo"] }.should raise_error(ArgumentError)
lambda { @object.spawn ["echo", "\000"] }.should raise_error(ArgumentError)
end
it "raises a TypeError if an element in the Array does not respond to #to_str" do
lambda { @object.spawn ["echo", :echo] }.should raise_error(TypeError)
lambda { @object.spawn [:echo, "echo"] }.should raise_error(TypeError)
end
end
# env handling
after :each do
ENV.delete("FOO")
end
it "sets environment variables in the child environment" do
lambda do
Process.wait @object.spawn({"FOO" => "BAR"}, ruby_cmd('print ENV["FOO"]'))
end.should output_to_fd("BAR")
end
it "unsets environment variables whose value is nil" do
ENV["FOO"] = "BAR"
lambda do
Process.wait @object.spawn({"FOO" => nil}, ruby_cmd('print ENV["FOO"]'))
end.should output_to_fd("")
end
it "calls #to_hash to convert the environment" do
o = mock("to_hash")
o.should_receive(:to_hash).and_return({"FOO" => "BAR"})
lambda do
Process.wait @object.spawn(o, ruby_cmd('print ENV["FOO"]'))
end.should output_to_fd("BAR")
end
it "calls #to_str to convert the environment keys" do
o = mock("to_str")
o.should_receive(:to_str).and_return("FOO")
lambda do
Process.wait @object.spawn({o => "BAR"}, ruby_cmd('print ENV["FOO"]'))
end.should output_to_fd("BAR")
end
it "calls #to_str to convert the environment values" do
o = mock("to_str")
o.should_receive(:to_str).and_return("BAR")
lambda do
Process.wait @object.spawn({"FOO" => o}, ruby_cmd('print ENV["FOO"]'))
end.should output_to_fd("BAR")
end
it "raises an ArgumentError if an environment key includes an equals sign" do
lambda do
@object.spawn({"FOO=" => "BAR"}, ruby_cmd('print ENV["FOO"]'))
end.should raise_error(ArgumentError)
end
it "raises an ArgumentError if an environment key includes a null byte" do
lambda do
@object.spawn({"\000" => "BAR"}, ruby_cmd('print ENV["FOO"]'))
end.should raise_error(ArgumentError)
end
it "raises an ArgumentError if an environment value includes a null byte" do
lambda do
@object.spawn({"FOO" => "\000"}, ruby_cmd('print ENV["FOO"]'))
end.should raise_error(ArgumentError)
end
# :unsetenv_others
it "unsets other environment variables when given a true :unsetenv_others option" do
ENV["FOO"] = "BAR"
lambda do
Process.wait @object.spawn(ruby_cmd('print ENV["FOO"]'), :unsetenv_others => true)
end.should output_to_fd("")
end
it "unsets other environment variables when given a non-false :unsetenv_others option" do
ENV["FOO"] = "BAR"
lambda do
Process.wait @object.spawn(ruby_cmd('print ENV["FOO"]'), :unsetenv_others => :true)
end.should output_to_fd("")
end
it "does not unset other environment variables when given a false :unsetenv_others option" do
ENV["FOO"] = "BAR"
lambda do
Process.wait @object.spawn(ruby_cmd('print ENV["FOO"]'), :unsetenv_others => false)
end.should output_to_fd("BAR")
end
it "does not unset other environment variables when given a nil :unsetenv_others option" do
ENV["FOO"] = "BAR"
lambda do
Process.wait @object.spawn(ruby_cmd('print ENV["FOO"]'), :unsetenv_others => nil)
end.should output_to_fd("BAR")
end
it "does not unset environment variables included in the environment hash" do
lambda do
Process.wait @object.spawn({"FOO" => "BAR"}, ruby_cmd('print ENV["FOO"]'), :unsetenv_others => true)
end.should output_to_fd("BAR")
end
# :pgroup
platform_is_not :windows do
it "joins the current process group by default" do
lambda do
Process.wait @object.spawn(ruby_cmd("print Process.getpgid(Process.pid)"))
end.should output_to_fd(Process.getpgid(Process.pid).to_s)
end
it "joins the current process if :pgroup => false" do
lambda do
Process.wait @object.spawn(ruby_cmd("print Process.getpgid(Process.pid)"), :pgroup => false)
end.should output_to_fd(Process.getpgid(Process.pid).to_s)
end
it "joins the current process if :pgroup => nil" do
lambda do
Process.wait @object.spawn(ruby_cmd("print Process.getpgid(Process.pid)"), :pgroup => nil)
end.should output_to_fd(Process.getpgid(Process.pid).to_s)
end
it "joins a new process group if :pgroup => true" do
process = lambda do
Process.wait @object.spawn(ruby_cmd("print Process.getpgid(Process.pid)"), :pgroup => true)
end
process.should_not output_to_fd(Process.getpgid(Process.pid).to_s)
process.should output_to_fd(/\d+/)
end
it "joins a new process group if :pgroup => 0" do
process = lambda do
Process.wait @object.spawn(ruby_cmd("print Process.getpgid(Process.pid)"), :pgroup => 0)
end
process.should_not output_to_fd(Process.getpgid(Process.pid).to_s)
process.should output_to_fd(/\d+/)
end
it "joins the specified process group if :pgroup => pgid" do
lambda do
Process.wait @object.spawn(ruby_cmd("print Process.getpgid(Process.pid)"), :pgroup => 123)
end.should_not output_to_fd("123")
end
it "raises an ArgumentError if given a negative :pgroup option" do
lambda { @object.spawn("echo", :pgroup => -1) }.should raise_error(ArgumentError)
end
it "raises a TypeError if given a symbol as :pgroup option" do
lambda { @object.spawn("echo", :pgroup => :true) }.should raise_error(TypeError)
end
end
platform_is :windows do
it "raises an ArgumentError if given :pgroup option" do
lambda { @object.spawn("echo", :pgroup => false) }.should raise_error(ArgumentError)
end
end
# :rlimit_core
# :rlimit_cpu
# :rlimit_data
# :chdir
it "uses the current working directory as its working directory" do
lambda do
Process.wait @object.spawn(ruby_cmd("print Dir.pwd"))
end.should output_to_fd(Dir.pwd)
end
describe "when passed :chdir" do
before do
@dir = tmp("spawn_chdir", false)
Dir.mkdir @dir
end
after do
rm_r @dir
end
it "changes to the directory passed for :chdir" do
lambda do
Process.wait @object.spawn(ruby_cmd("print Dir.pwd"), :chdir => @dir)
end.should output_to_fd(@dir)
end
it "calls #to_path to convert the :chdir value" do
dir = mock("spawn_to_path")
dir.should_receive(:to_path).and_return(@dir)
lambda do
Process.wait @object.spawn(ruby_cmd("print Dir.pwd"), :chdir => dir)
end.should output_to_fd(@dir)
end
end
# :umask
it "uses the current umask by default" do
lambda do
Process.wait @object.spawn(ruby_cmd("print File.umask"))
end.should output_to_fd(File.umask.to_s)
end
it "sets the umask if given the :umask option" do
lambda do
Process.wait @object.spawn(ruby_cmd("print File.umask"), :umask => 146)
end.should output_to_fd("146")
end
# redirection
it "redirects STDOUT to the given file descriptior if :out => Fixnum" do
File.open(@name, 'w') do |file|
lambda do
Process.wait @object.spawn(ruby_cmd("print :glark"), :out => file.fileno)
end.should output_to_fd("glark", file)
end
end
it "redirects STDOUT to the given file if :out => IO" do
File.open(@name, 'w') do |file|
lambda do
Process.wait @object.spawn(ruby_cmd("print :glark"), :out => file)
end.should output_to_fd("glark", file)
end
end
it "redirects STDOUT to the given file if :out => String" do
Process.wait @object.spawn(ruby_cmd("print :glark"), :out => @name)
@name.should have_data("glark")
end
it "redirects STDOUT to the given file if :out => [String name, String mode]" do
Process.wait @object.spawn(ruby_cmd("print :glark"), :out => [@name, 'w'])
@name.should have_data("glark")
end
it "redirects STDERR to the given file descriptior if :err => Fixnum" do
File.open(@name, 'w') do |file|
lambda do
Process.wait @object.spawn(ruby_cmd("STDERR.print :glark"), :err => file.fileno)
end.should output_to_fd("glark", file)
end
end
it "redirects STDERR to the given file descriptor if :err => IO" do
File.open(@name, 'w') do |file|
lambda do
Process.wait @object.spawn(ruby_cmd("STDERR.print :glark"), :err => file)
end.should output_to_fd("glark", file)
end
end
it "redirects STDERR to the given file if :err => String" do
Process.wait @object.spawn(ruby_cmd("STDERR.print :glark"), :err => @name)
@name.should have_data("glark")
end
it "redirects both STDERR and STDOUT to the given file descriptior" do
File.open(@name, 'w') do |file|
lambda do
Process.wait @object.spawn(ruby_cmd("print(:glark); STDOUT.flush; STDERR.print(:bang)"),
[:out, :err] => file.fileno)
end.should output_to_fd("glarkbang", file)
end
end
it "redirects both STDERR and STDOUT to the given IO" do
File.open(@name, 'w') do |file|
lambda do
Process.wait @object.spawn(ruby_cmd("print(:glark); STDOUT.flush; STDERR.print(:bang)"),
[:out, :err] => file)
end.should output_to_fd("glarkbang", file)
end
end
it "does NOT redirect both STDERR and STDOUT at the time to the given name" do
# this behavior is not guaranteed; it may be changed after 1.9.3 or later. [ruby-dev:41433]
touch @name
Process.wait @object.spawn(ruby_cmd("print(:glark); STDOUT.flush; STDERR.print(:bang)"),
[:out, :err] => @name)
@name.should have_data("")
end
context "when passed :close_others => true" do
before :each do
@output = tmp("spawn_close_others_true")
@options = { :close_others => true }
@command = %[Process.wait spawn("#{RUBY_EXE}", "-e", "%s", #{@options.inspect})]
end
after :each do
rm_r @output
end
it "closes file descriptors >= 3 in the child process" do
IO.pipe do |r, w|
begin
pid = @object.spawn(ruby_cmd(""), @options)
w.close
lambda { r.read_nonblock(1) }.should raise_error(EOFError)
ensure
Process.kill(:TERM, pid)
Process.wait(pid)
end
end
end
it "does not close STDIN" do
cmd = @command % ["STDOUT.puts STDIN.read(0).inspect"]
ruby_exe(cmd, :args => "> #{@output}")
@output.should have_data(%[""\n])
end
it "does not close STDOUT" do
cmd = @command % ["STDOUT.puts 'hello'"]
ruby_exe(cmd, :args => "> #{@output}")
@output.should have_data("hello\n")
end
it "does not close STDERR" do
cmd = @command % ["STDERR.puts 'hello'"]
ruby_exe(cmd, :args => "2> #{@output}")
@output.should have_data("hello\n")
end
end
context "when passed :close_others => false" do
before :each do
@output = tmp("spawn_close_others_false")
@options = { :close_others => false }
@command = %[Process.wait spawn("#{RUBY_EXE}", "-e", "%s", #{@options.inspect})]
end
after :each do
rm_r @output
end
it "closes file descriptors >= 3 in the child process because they are set close_on_exec by default" do
IO.pipe do |r, w|
begin
pid = @object.spawn(ruby_cmd(""), @options)
w.close
lambda { r.read_nonblock(1) }.should raise_error(EOFError)
ensure
Process.kill(:TERM, pid)
Process.wait(pid)
end
end
end
it "does not close file descriptors >= 3 in the child process if fds are set close_on_exec=false" do
IO.pipe do |r, w|
r.close_on_exec = false
w.close_on_exec = false
begin
pid = @object.spawn(ruby_cmd(""), @options)
w.close
lambda { r.read_nonblock(1) }.should raise_error(Errno::EAGAIN)
ensure
Process.kill(:TERM, pid)
Process.wait(pid)
end
end
end
it "does not close STDIN" do
cmd = @command % ["STDOUT.puts STDIN.read(0).inspect"]
ruby_exe(cmd, :args => "> #{@output}")
@output.should have_data(%[""\n])
end
it "does not close STDOUT" do
cmd = @command % ["STDOUT.puts 'hello'"]
ruby_exe(cmd, :args => "> #{@output}")
@output.should have_data("hello\n")
end
it "does not close STDERR" do
cmd = @command % ["STDERR.puts 'hello'"]
ruby_exe(cmd, :args => "2> #{@output}")
@output.should have_data("hello\n")
end
end
# error handling
it "raises an ArgumentError if passed no command arguments" do
lambda { @object.spawn }.should raise_error(ArgumentError)
end
it "raises an ArgumentError if passed env or options but no command arguments" do
lambda { @object.spawn({}) }.should raise_error(ArgumentError)
end
it "raises an ArgumentError if passed env and options but no command arguments" do
lambda { @object.spawn({}, {}) }.should raise_error(ArgumentError)
end
it "raises an Errno::ENOENT for an empty string" do
lambda { @object.spawn "" }.should raise_error(Errno::ENOENT)
end
it "raises an Errno::ENOENT if the command does not exist" do
lambda { @object.spawn "nonesuch" }.should raise_error(Errno::ENOENT)
end
unless File.executable?(__FILE__) # Some FS (e.g. vboxfs) locate all files executable
it "raises an Errno::EACCES when the file does not have execute permissions" do
lambda { @object.spawn __FILE__ }.should raise_error(Errno::EACCES)
end
end
it "raises an Errno::EACCES when passed a directory" do
lambda { @object.spawn File.dirname(__FILE__) }.should raise_error(Errno::EACCES)
end
it "raises an ArgumentError when passed a string key in options" do
lambda { @object.spawn("echo", "chdir" => Dir.pwd) }.should raise_error(ArgumentError)
end
it "raises an ArgumentError when passed an unknown option key" do
lambda { @object.spawn("echo", :nonesuch => :foo) }.should raise_error(ArgumentError)
end
describe "with Integer option keys" do
before :each do
@name = tmp("spawn_fd_map.txt")
@io = new_io @name, "w+"
@io.sync = true
end
after :each do
@io.close unless @io.closed?
rm_r @name
end
it "maps the key to a file descriptor in the child that inherits the file descriptor from the parent specified by the value" do
child_fd = @io.fileno + 1
args = ruby_cmd(fixture(__FILE__, "map_fd.rb"), :args => [child_fd.to_s])
pid = @object.spawn(*args, { child_fd => @io })
Process.waitpid pid
@io.rewind
@io.read.should == "writing to fd: #{child_fd}"
end
end
end
| 33.144599 | 131 | 0.642943 |
ed69bbd006e81198ddc9c6f8dcfa22d7178a6044 | 292 | module AdaptivePayments
class InvoiceData < JsonModel
attribute :items, NodeList[InvoiceItem], :param => "item"
attribute :total_tax, Decimal, :param => "totalTax"
attribute :total_shipping, Decimal, :param => "totalShipping"
end
end
| 36.5 | 79 | 0.619863 |
e2a483414568d6d817ee44735b19f48b808b5bff | 409 | # frozen_string_literal: false
# working with hash
@my_hash = {
name: 'Thiago',
surname: 'Ribeiro',
age: 29
}
puts @my_hash
@my_hash.each do |k, v|
puts "key: #{k} with value #{v}"
end
puts @my_hash[:name].to_s
@my_hash[:name] = 'Daniel'
puts @my_hash[:name].to_s
# filtering the value by key
def get_value(key)
@my_hash.collect { |k, v| v if k == key.to_sym }
end
puts get_value('age').to_s
| 15.730769 | 51 | 0.655257 |
5d459f8021910663e90f60c639fe1b868a12d14e | 2,132 | class Libpagemaker < Formula
desc "Imports file format of Aldus/Adobe PageMaker documents"
homepage "https://wiki.documentfoundation.org/DLP/Libraries/libpagemaker"
url "https://dev-www.libreoffice.org/src/libpagemaker/libpagemaker-0.0.4.tar.xz"
sha256 "66adacd705a7d19895e08eac46d1e851332adf2e736c566bef1164e7a442519d"
license "MPL-2.0"
livecheck do
url "https://dev-www.libreoffice.org/src/"
regex(/href=["']?libpagemaker[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "e95a8d6dca9411adefbeb5bebd6e34112f0deec1ec9fe0d8f9bea5502f2a7a37"
sha256 cellar: :any, big_sur: "ccdd8cd950304039a111f5ee80658b809c040d83f6321701384bc96dc596b569"
sha256 cellar: :any, catalina: "9759e3d26a09e7b99bbf3c49f05bfa7724334b639245f5791d9bada9df977d68"
sha256 cellar: :any, mojave: "05fafc8fea710cc53cd310192364d72b9458114b5404fdff8f6adbff2f9175bf"
sha256 cellar: :any, high_sierra: "db0f93e5cf4cb6dfe4810b7cb8240db5c2c439a717d09def2f6163e3db6984c6"
sha256 cellar: :any, sierra: "0809994f61c8cd34e4edca3496273f293d314e89da5e8ec2a3df280cf436ba37"
sha256 cellar: :any, el_capitan: "10c23ab2759830f22ff8080cd4da18252fb719445bd651ab4664e785682c100a"
end
depends_on "boost" => :build
depends_on "pkg-config" => :build
depends_on "librevenge"
def install
system "./configure", "--without-docs",
"--disable-dependency-tracking",
"--enable-static=no",
"--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.cpp").write <<~EOS
#include <libpagemaker/libpagemaker.h>
int main() {
libpagemaker::PMDocument::isSupported(0);
}
EOS
system ENV.cxx, "test.cpp", "-o", "test",
"-I#{Formula["librevenge"].include}/librevenge-0.0",
"-I#{include}/libpagemaker-0.0",
"-L#{Formula["librevenge"].lib}",
"-L#{lib}",
"-lrevenge-0.0",
"-lpagemaker-0.0"
system "./test"
end
end
| 41 | 106 | 0.657129 |
1af07f06d3f5476e777879c57afafc77f9c355ec | 2,126 | class HomeController < ApplicationController
skip_filter :authenticate_user!
def index
if !current_user.nil?
if current_user.role.authority=='jobseeker'
session['jprofile']=Jobseeker.find_by(user_id:current_user.id).jprofile.id
session['jobseeker']=Jobseeker.find_by(user_id:current_user.id).id
#redirect_to jprofiles_path
elsif current_user.role.authority=='employer'
session['eprofile']=Employer.find_by(user_id:current_user.id).eprofile.id
session['employer']=Employer.find_by(user_id:current_user.id).id
#redirect_to eprofiles_path
elsif current_user.role.authority=='admin'
redirect_to admin_index_path
elsif current_user.role.authority=='staff'
session['staff_user']= current_user
session['staff_user_name']= current_user.name
@user = current_user.employer.user
session['eprofile']=Employer.find_by(user_id: @user.id).eprofile.id
session['employer']=Employer.find_by(user_id: @user.id).id
sign_out current_user
sign_in @user
end
end
@testimonials = Testimonial.all.limit(4).order('created_at desc')
@jobs = Job.where(deleted: false, status: true)
@jprofiles = Jprofile.all
#@jprofiles = Jprofile.where(deleted: false, active: true)
@eprofiles = Eprofile.all
@users=User.all
@res= Sunspot.search(Job) do
with(:job_type, params[:job_type].downcase) if params['job_type'].present?
facet(:job_type)
with(:companyname, params[:company].downcase) if params['company'].present?
facet(:companyname)
with(:industry, params[:industry].downcase) if params['industry'].present?
facet(:industry)
with(:state, params[:state].downcase) if params['state'].present?
facet(:state)
end
@res1= Sunspot.search(Jprofile) do
with(:state, params[:state].downcase) if params['state'].present?
facet(:state)
with(:skills, params[:skills].downcase) if params['skills'].present?
facet(:skills)
with(:educations, params[:education].downcase) if params['education'].present?
facet(:educations)
end
end
end
| 32.212121 | 82 | 0.698024 |
f8fdaf706becb538ce19ab631eb9356b9c367b27 | 841 | class Hebcal < Formula
desc "Perpetual Jewish calendar for the command-line"
homepage "https://github.com/hebcal/hebcal"
url "https://github.com/hebcal/hebcal/archive/v4.10.tar.gz"
sha256 "c3728870c95b9289df9f6a4ef3cdd2303ee3d7978c5114013b04703528a4923e"
bottle do
cellar :any_skip_relocation
sha256 "124d8d686c08c0df51c677510126ed92bc2b0731cc2627944f4d698c0b8dab9c" => :sierra
sha256 "b74724fc183f43732db2b1ae910eb566f1555be92e5f6e90aa04089bbdbb2cc6" => :el_capitan
sha256 "bb04224c664ab61c340428dd364a0456166e96e55242dd573084371d44a50874" => :yosemite
end
depends_on "autoconf" => :build
depends_on "automake" => :build
def install
system "./configure", "--prefix=#{prefix}", "ACLOCAL=aclocal", "AUTOMAKE=automake"
system "make", "install"
end
test do
system "#{bin}/hebcal"
end
end
| 32.346154 | 92 | 0.762188 |
5d4cf2eeec78acbdb0e6658d01dddccf385d48f4 | 154 | chef_client_updater "Install Chef #{node['chef_client_updater']['version']}" do
channel 'current'
version '12.13.37'
post_install_action 'kill'
end
| 25.666667 | 79 | 0.746753 |
bfb995c11865ef3c02b1b807851ecb1e5df95ffb | 1,133 | =begin
#Datadog API V1 Collection
#Collection of all Datadog Public endpoints.
The version of the OpenAPI document: 1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.0.0-SNAPSHOT
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for DatadogAPIClient::V1::SLOResponse
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe DatadogAPIClient::V1::SLOResponse do
let(:instance) { DatadogAPIClient::V1::SLOResponse.new }
describe 'test an instance of SLOResponse' do
it 'should create an instance of SLOResponse' do
expect(instance).to be_instance_of(DatadogAPIClient::V1::SLOResponse)
end
end
describe 'test attribute "data"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "errors"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 27.634146 | 102 | 0.748455 |
620db9ef714f514e6cfdf02a1cd556b075681995 | 5,922 | require_relative 'docker_server_group'
require_relative 'docker_server'
require_relative 'service'
require 'uri'
module Centurion::DeployDSL
def on_each_docker_host(&block)
build_server_group.tap { |hosts| hosts.each { |host| block.call(host) } }
end
def on_first_docker_host(&block)
build_server_group.tap { |hosts| block.call(hosts.first) }
end
def env_vars(new_vars)
current = fetch(:env_vars, {})
new_vars.each_pair do |new_key, new_value|
current[new_key.to_s] = new_value.respond_to?(:call) ? new_value : new_value.to_s
end
set(:env_vars, current)
end
def labels(new_labels)
current = fetch(:labels, {})
new_labels.each_pair do |new_key, new_value|
current[new_key.to_s] = new_value.to_s
end
set(:labels, current)
end
def add_capability(new_cap_adds)
if !valid_capability?(new_cap_adds)
abort("Invalid capability addition #{new_cap_adds} specified.")
end
current = fetch(:cap_adds, [])
set(:cap_adds, current << new_cap_adds)
end
def drop_capability(new_cap_drops)
if !valid_capability?(new_cap_drops)
abort("Invalid capability drop #{new_cap_drops} specified.")
end
current = fetch(:cap_drops, [])
set(:cap_drops, current << new_cap_drops)
end
def host(hostname)
current = fetch(:hosts, [])
current << hostname
set(:hosts, current)
end
def memory(memory)
set(:memory, memory)
end
def cpu_shares(cpu_shares)
set(:cpu_shares, cpu_shares)
end
def command(command)
set(:command, command)
end
def ipc_mode(mode)
set(:ipc_mode, mode)
end
def localhost
# DOCKER_HOST is like 'tcp://127.0.0.1:2375'
docker_host_uri = URI.parse(ENV['DOCKER_HOST'] || "tcp://127.0.0.1")
host_and_port = [docker_host_uri.host, docker_host_uri.port].compact.join(':')
host(host_and_port)
end
def host_port(port, options)
validate_options_keys(options, [ :host_ip, :container_port, :type ])
require_options_keys(options, [ :container_port ])
set(:port_bindings, fetch(:port_bindings, []).tap do |bindings|
bindings << Centurion::Service::PortBinding.new(port, options[:container_port], options[:type] || 'tcp', options[:host_ip])
end)
end
def network_mode(mode)
if %w(bridge host).include?(mode) or mode =~ /container.*/
set(:network_mode, mode)
else
abort("invalid value for network_mode: #{mode}, value must be one of 'bridge', 'host', or 'container:<name|id>'")
end
end
def public_port_for(port_bindings)
# port_bindings = [#<struct Centurion::Service::PortBinding
# host_port=17090,
# container_port=80,
# type="tcp",
# host_ip=nil>]
port_bindings.first.host_port
end
def host_volume(volume, options)
validate_options_keys(options, [ :container_volume ])
require_options_keys(options, [ :container_volume ])
set(:binds, fetch(:binds, []).tap do |volumes|
volumes << Centurion::Service::Volume.new(volume, options[:container_volume])
end)
end
def get_current_tags_for(image)
build_server_group.inject([]) do |memo, target_server|
tags = target_server.current_tags_for(image)
memo += [{ server: target_server.hostname, tags: tags }] if tags
memo
end
end
def registry(type)
set(:registry, type.to_s)
end
def health_check(method)
abort("Health check expects a callable (lambda, proc, method), but #{method.class} was specified") unless method.respond_to?(:call)
set(:health_check, method)
end
def extra_host(ip, name)
current = fetch(:extra_hosts, [])
current.push("#{name}:#{ip}")
set(:extra_hosts, current)
end
def defined_service
Centurion::Service.from_env
end
def defined_health_check
Centurion::HealthCheck.new(fetch(:health_check, method(:http_status_ok?)),
fetch(:status_endpoint, '/'),
fetch(:rolling_deploy_wait_time, 5),
fetch(:rolling_deploy_retries, 24))
end
def defined_restart_policy
Centurion::Service::RestartPolicy.new(fetch(:restart_policy_name, 'on-failure'), fetch(:restart_policy_max_retry_count, 10))
end
private
def build_server_group
hosts, docker_path = fetch(:hosts, []), fetch(:docker_path)
Centurion::DockerServerGroup.new(hosts, docker_path, build_server_params)
end
def validate_options_keys(options, valid_keys)
unless options.keys.all? { |k| valid_keys.include?(k) }
raise ArgumentError.new('Options passed with invalid key!')
end
end
def require_options_keys(options, required_keys)
missing = required_keys.reject { |k| options.keys.include?(k) }
unless missing.empty?
raise ArgumentError.new("Options must contain #{missing.inspect}")
end
end
def valid_capability?(capability)
%w(ALL SETPCAP SYS_MODULE SYS_RAWIO SYS_PACCT SYS_ADMIN SYS_NICE
SYS_RESOURCE SYS_TIME SYS_TTY_CONFIG MKNOD AUDIT_WRITE AUDIT_CONTROL
MAC_OVERRIDE MAC_ADMIN NET_ADMIN SYSLOG CHOWN NET_RAW DAC_OVERRIDE FOWNER
DAC_READ_SEARCH FSETID KILL SETGID SETUID LINUX_IMMUTABLE
NET_BIND_SERVICE NET_BROADCAST IPC_LOCK IPC_OWNER SYS_CHROOT SYS_PTRACE
SYS_BOOT LEASE SETFCAP WAKE_ALARM BLOCK_SUSPEND).include?(capability)
end
def tls_paths_available?
Centurion::DockerViaCli.tls_keys.all? { |key| fetch(key).present? }
end
def build_server_params
opts = {}
if fetch(:tlsverify)
opts[:tls] = fetch(:tlsverify || tls_paths_available?)
opts[:tlscacert] = fetch(:tlscacert)
opts[:tlscert] = fetch(:tlscert)
opts[:tlskey] = fetch(:tlskey)
end
if fetch(:ssh, false) == true
opts[:ssh] = true
# nil is OK for both of these, defaults applied internally
opts[:ssh_user] = fetch(:ssh_user)
opts[:ssh_log_level] = fetch(:ssh_log_level)
end
opts
end
end
| 29.172414 | 135 | 0.685917 |
bf296bb02590b164b32218f157e594825158d797 | 181 | class CreateComments < ActiveRecord::Migration
def change
create_table :comments do |t|
t.text :body
t.integer :post_id
t.integer :user_id
end
end
end
| 18.1 | 46 | 0.662983 |
39407e08cf8d754c044feb485e3dcfe48ef8202a | 913 | # returns true if the letters could be re-arranged into a palindrome
# aka, if all but one letter appears twice
def palindrome_permutation?(string)
# create hash
hash = {}
# set output to true
output = true
# for each string character,
string.each_char do |char|
# put it into the hash
# the number of times it apppears
if hash.has_key?(char)
hash[char] += 1
else
hash[char] = 1
end
end
# make a boolean for odds
odd = false
# for each string character,
string.each_char do |char2|
# if it appears in the hash
# an odd number of times
if hash[char2].odd?
# and odd is false
# set odd to true
if !odd
odd = true
else
# if odd is already true,
# set output to false
# b/c there are multiple odds
output = false
end
end
end
# return output
return output
end
| 20.75 | 68 | 0.606791 |
3961f28d838a049e610bdae2d8ac3ea37311e13c | 2,462 | module Lita
# A namespace to hold all subclasses of {Adapter}.
module Adapters
# An adapter that runs Lita in a UNIX shell.
class Shell < Adapter
config :private_chat, default: false
# Creates a "Shell User" and then loops a prompt and input, passing the
# incoming messages to the robot.
# @return [void]
def run
user = User.create(1, name: "Shell User")
room = robot.config.adapters.shell.private_chat ? nil : "shell"
@source = Source.new(user: user, room: room)
puts t("startup_message")
robot.trigger(:connected)
run_loop
end
# Outputs outgoing messages to the shell.
# @param _target [Lita::Source] Unused, since there is only one user in the
# shell environment.
# @param strings [Array<String>] An array of strings to output.
# @return [void]
def send_messages(_target, strings)
strings = Array(strings)
strings.reject!(&:empty?)
unless RbConfig::CONFIG["host_os"] =~ /mswin|mingw/ || !$stdout.tty?
strings.map! { |string| "\e[32m#{string}\e[0m" }
end
puts strings
end
# Adds a blank line for a nice looking exit.
# @return [void]
def shut_down
puts
end
private
def build_message(input, source)
message = Message.new(robot, input, source)
message.command! if robot.config.adapters.shell.private_chat
message
end
def normalize_history(input)
if input == "" || (Readline::HISTORY.size >= 2 && input == Readline::HISTORY[-2])
Readline::HISTORY.pop
end
end
def normalize_input(input)
input.chomp.strip
end
def read_input
input = Readline.readline("#{robot.name} > ", true)
# Input read via rb-readline will always be encoded as US-ASCII.
# @see https://github.com/luislavena/rb-readline/blob/master/lib/readline.rb#L1
input.force_encoding(Encoding.default_external) if input
end
def run_loop
loop do
input = read_input
if input.nil?
puts
break
end
input = normalize_input(input)
normalize_history(input)
break if input == "exit" || input == "quit"
robot.receive(build_message(input, @source))
end
end
end
Lita.register_adapter(:shell, Shell)
end
end
| 29.309524 | 89 | 0.595451 |
87ed72fa1288da47f32eaf4f1b36baa9e3dc80bc | 6,247 | I18n::Backend::ActiveRecord::Translation.create!(
[
{locale: "de", key: "propertyStates.underConstruction", value: "Im Bau"},
{locale: "de", key: "propertyStates.brandNew", value: "Neubau"},
{locale: "de", key: "propertyStates.segundaMano", value: "Aus zweiter hand"},
{locale: "de", key: "propertyStates.nuevo", value: "Neu"},
{locale: "de", key: "propertyStates.enConstruccion", value: "Wird noch gebaut"},
{locale: "de", key: "propertyStates.aReformar", value: "Renovierungsbedürftig"},
{locale: "de", key: "propertyTypes.edificioResidencial", value: "Wohnhaus"},
{locale: "de", key: "propertyTypes.villa", value: "Villa"},
{locale: "de", key: "propertyOrigin.bank", value: "Zwangsversteigerungen"},
{locale: "de", key: "propertyOrigin.new", value: "Neubau"},
{locale: "de", key: "propertyOrigin.private", value: "Privat verkauf"},
{locale: "de", key: "propertyLabels.sold", value: "Verkauft"},
{locale: "de", key: "propertyLabels.reserved", value: "reserviert"},
{locale: "de", key: "extras.porche", value: "Vorhalle"},
{locale: "de", key: "extras.aireAcondicionado", value: "Klimaanlage"},
{locale: "de", key: "extras.alarma", value: "Alarmanlage"},
{locale: "de", key: "extras.amueblado", value: "Einrichtung"},
{locale: "de", key: "extras.armariosEmpotrados", value: "Garderobe"},
{locale: "de", key: "extras.ascensor", value: "Fahrstuhl"},
{locale: "de", key: "extras.balcon", value: "Balkon"},
{locale: "de", key: "extras.banoTurco", value: "Dampfbad"},
{locale: "de", key: "extras.calefaccionCentral", value: "Zentral heizung"},
{locale: "de", key: "extras.calefaccionElectrica", value: "Elektrische heizung"},
{locale: "de", key: "extras.calefaccionPropano", value: "Propan heizung"},
{locale: "de", key: "extras.cocinaIndependiente", value: "Independent küche"},
# {locale: "de", key: "extras.electrodomesticos", value: "White Weiße Ware - unknown"},
{locale: "de", key: "extras.energiaSolar", value: "Solar energie"},
{locale: "de", key: "extras.garajeComunitario", value: "Gemeinschafts garage"},
{locale: "de", key: "extras.garajePrivado", value: "Private Private"},
{locale: "de", key: "extras.gresCeramica", value: "Ceramic Steinfußboden"},
{locale: "de", key: "extras.horno", value: "Ofen"},
{locale: "de", key: "extras.jacuzzi", value: "Jacuzzi"},
{locale: "de", key: "extras.jardinComunitario", value: "Gemeinschafts garten"},
{locale: "de", key: "extras.jardinPrivado", value: "Privater garten"},
{locale: "de", key: "extras.lavadero", value: "Waschküche"},
{locale: "de", key: "extras.lavadora", value: "Waschmaschine"},
{locale: "de", key: "extras.microondas", value: "Mikrowelle"},
{locale: "de", key: "extras.nevera", value: "Kühlschrank"},
{locale: "de", key: "extras.parquet", value: "Holzfußboden"},
{locale: "de", key: "extras.piscinaClimatizada", value: "Beheizter pool"},
{locale: "de", key: "extras.piscinaComunitaria", value: "Gemeinschafts pool"},
{locale: "de", key: "extras.piscinaPrivada", value: "Privater pool"},
{locale: "de", key: "extras.porche", value: "Porch"},
{locale: "de", key: "extras.puertaBlindada", value: "Stahltür"},
{locale: "de", key: "extras.sauna", value: "Sauna"},
{locale: "de", key: "extras.servPorteria", value: "Hausmeister"},
{locale: "de", key: "extras.sueloMarmol", value: "Marmorfußboden"},
{locale: "de", key: "extras.terraza", value: "Terrasse"},
{locale: "de", key: "extras.trastero", value: "Abstellraum"},
{locale: "de", key: "extras.tv", value: "Fehrnseher"},
{locale: "de", key: "extras.videoportero", value: "Zugangskontrolle"},
{locale: "de", key: "extras.vigilancia", value: "Sicherheitsfirma"},
{locale: "de", key: "extras.vistasAlMar", value: "Meerblick"},
{locale: "de", key: "extras.zComunitaria", value: "Gemeinschaftsbereich"},
{locale: "de", key: "extras.zonaDeportiva", value: "Sportplatz"},
{locale: "de", key: "extras.cercaDeServicios", value: "Einkauf in der Nähe"},
{locale: "de", key: "extras.calefaccionGasCiudad", value: "Naturgas heizung"},
{locale: "de", key: "extras.calefaccionGasoleo", value: "Diesel heizung"},
{locale: "de", key: "extras.zonasInfantiles", value: "Kinderspielplatz"},
{locale: "de", key: "extras.sueloRadiante", value: "Fußbodenheizung"},
{locale: "de", key: "extras.semiamueblado", value: "Halbeingerichtet"},
{locale: "de", key: "extras.chimenea", value: "Feuerstelle"},
{locale: "de", key: "extras.barbacoa", value: "Grill"},
# {locale: "de", key: "extras.porsche", value: "Porsche"}, Porsche
{locale: "de", key: "extras.solarium", value: "Solarium"},
{locale: "de", key: "extras.patioInterior", value: "Hinterhof"},
{locale: "de", key: "extras.vistasALaMontana", value: "Blick auf die berge"},
{locale: "de", key: "extras.vistasAlJardin", value: "Blick auf den garten"},
{locale: "de", key: "extras.urbanizacion", value: "Kleinstadt"},
{locale: "de", key: "extras.zonaTranquila", value: "Ruhige Gegend"},
{locale: "de", key: "propertyTypes.apartamento", value: "Apartment"},
{locale: "de", key: "propertyTypes.chaletIndependiente", value: "Chalet"},
{locale: "de", key: "propertyTypes.bungalow", value: "Bungalow"},
{locale: "de", key: "propertyTypes.inversion", value: "Investition"},
{locale: "de", key: "propertyTypes.solar", value: "Grundstück"},
{locale: "de", key: "propertyTypes.duplex", value: "Duplex"},
{locale: "de", key: "propertyTypes.piso", value: "Wohnung"},
{locale: "de", key: "propertyTypes.hotel", value: "Hotel"},
{locale: "de", key: "propertyTypes.chaletAdosado", value: "Halb freistehend"},
{locale: "de", key: "propertyTypes.atico", value: "Penthouse"},
{locale: "de", key: "propertyTypes.estudio", value: "Studio"},
{locale: "de", key: "propertyTypes.garaje", value: "Stellplatz"},
{locale: "de", key: "propertyTypes.local", value: "Geschäftsräume"},
{locale: "de", key: "propertyTypes.trastero", value: "Geschäft"},
{locale: "de", key: "propertyTypes.casaRural", value: "Landhaus"}
])
| 67.172043 | 92 | 0.638386 |
f70f826cb02a1d235343a620ab2c77bffc695224 | 122 | module YaAcl
class Resource
attr_reader :name
def initialize(name)
@name = name.to_sym
end
end
end
| 12.2 | 25 | 0.647541 |
18042dcfc88c409558c8e0d5da2d8b9c682b83df | 423 | require 'spec_helper'
# Specs in this file have access to a helper object that includes
# the IndividualHelper. For example:
#
# describe IndividualHelper do
# describe "string concat" do
# it "concats two strings with spaces" do
# expect(helper.concat_strings("this","that")).to eq("this that")
# end
# end
# end
describe IndividualHelper do
pending "add some examples to (or delete) #{__FILE__}"
end
| 26.4375 | 71 | 0.713948 |
218f34bff695aba7bdc458850b7ee36b142bd906 | 2,035 | module Dag
#Validations on model instance creation. Ensures no duplicate links, no cycles, and correct count and direct attributes
class CreateCorrectnessValidator < ActiveModel::Validator
def validate(record)
# record.errors[:base] << 'Link already exists between these points' if has_duplicates(record)
record.errors[:base] << 'Link already exists in the opposite direction' if has_long_cycles(record)
record.errors[:base] << 'Link must start and end in different places' if has_short_cycles(record)
cnt = check_possible(record)
record.errors[:base] << 'Cannot create a direct link with a count other than 0' if cnt == 1
record.errors[:base] << 'Cannot create an indirect link with a count less than 1' if cnt == 2
end
private
#check for duplicates
def has_duplicates(record)
return false
# record.class.find_link(record.source, record.sink)
end
#check for long cycles
def has_long_cycles(record)
record.class.find_link(record.sink, record.source)
end
#check for short cycles
def has_short_cycles(record)
record.sink.matches?(record.source)
end
#check not impossible
def check_possible(record)
record.direct? ? (record.count != 0 ? 1 : 0) : (record.count < 1 ? 2 : 0)
end
end
#Validations on update. Makes sure that something changed, that not making a lonely link indirect, and count is correct.
class UpdateCorrectnessValidator < ActiveModel::Validator
def validate(record)
record.errors[:base] << "No changes" unless record.changed?
record.errors[:base] << "Do not manually change the count value" if manual_change(record)
record.errors[:base] << "Cannot make a direct link with count 1 indirect" if direct_indirect(record)
end
private
def manual_change(record)
record.direct_changed? && record.count_changed?
end
def direct_indirect(record)
record.direct_changed? && !record.direct? && record.count == 1
end
end
end
| 33.916667 | 123 | 0.698771 |
0888b993a353c24bdfa419f360ff586f6731d7a6 | 1,440 | # Not the same thing as a Regimen. A farm_event is a "dumb" list of sequecnes that
# are executed at fixed intervals. FarmEvents are less flexible than Regimens
# because they can only perform one sequence. Also unlike Regimens, they can run
# forever.
class FarmEvent < ApplicationRecord
NEVER = "never"
UNITS_OF_TIME = %w(never minutely hourly daily weekly monthly yearly)
EXECUTABLE_CLASSES = [Sequence, Regimen]
FE_USE = "still in use by some farm events"
WITH_YEAR = "%m/%d/%y"
NO_YEAR = "%m/%d"
belongs_to :device
belongs_to :executable, polymorphic: true
validates :device_id, presence: true
validates :executable, presence: true
validate :within_20_year_window
has_one :fragment, as: :owner
def within_20_year_window
too_early = start_time && start_time < (Time.now - 20.years)
too_late = end_time && end_time > (Time.now + 20.years)
errors.add :start_time, "too far in the past" if too_early
errors.add :end_time, "too far in the future" if too_late
end
# Check if an executable is in use.
def self.if_still_using(executable)
yield if self.where(executable: executable).any?
end
def fancy_name
start_time.strftime(start_time.year == Time.now.year ? NO_YEAR : WITH_YEAR)
end
# Is this resource allowed to take ownership of a celeryscript fragment?
def fragment_owner?
true
end
end
| 35.121951 | 82 | 0.695139 |
ac493a7bce939e606a96700c9d3531e97ceef154 | 536 | # frozen_string_literal: true
require_relative '../../../step/waterfall_auction'
module Engine
module Game
module G18Scan
module Step
class WaterfallAuction < Engine::Step::WaterfallAuction
protected
def buy_company(player, company, price)
super
return unless (minor = @game.minor_by_id(company.sym))
@game.log << "Minor #{minor.name} floats"
minor.owner = player
minor.float!
end
end
end
end
end
end
| 20.615385 | 66 | 0.583955 |
ab2e215e4478d94188c076f5ae6e0930f40cd775 | 831 | # frozen_string_literal: true
require "rails_helper"
describe ControlledVocabularies::WorkType do
subject { described_class.new(uri) }
context "when the uri is in LCSH" do
let(:uri) do
RDF::URI.new("http://id.loc.gov/authorities/subjects/sh2010014379")
end
it { is_expected.to be_valid }
end
context "when the uri is in AAT" do
let(:uri) { RDF::URI.new("http://vocab.getty.edu/aat/300046300") }
it { is_expected.to be_valid }
end
context "when the uri is not in the vocabulary" do
let(:uri) { RDF::URI.new("http://foo.bar/authorities/names/n79081574") }
it { is_expected.not_to be_valid }
end
describe "#solrize" do
let(:uri) do
RDF::URI.new("http://id.loc.gov/authorities/subjects/sh2010014379")
end
it { is_expected.to respond_to :solrize }
end
end
| 23.083333 | 76 | 0.67509 |
6ad2a8997055c49ac61e4eae596199aab9cb48fc | 6,683 | require 'rails_helper'
module CCMS
module Submitters # rubocop:disable Metrics/ModuleLength
RSpec.describe AddCaseService do
let(:legal_aid_application) do
create :legal_aid_application,
:with_proceeding_types,
:with_everything_and_address,
:with_cfe_v3_result,
:with_positive_benefit_check_result,
:with_substantive_scope_limitation,
office_id: office.id,
populate_vehicle: true
end
let(:applicant) { legal_aid_application.applicant }
let(:office) { create :office }
let(:submission) { create :submission, :applicant_ref_obtained, legal_aid_application: legal_aid_application }
let(:history) { SubmissionHistory.find_by(submission_id: submission.id) }
let(:endpoint) { 'https://sitsoa10.laadev.co.uk/soa-infra/services/default/CaseServices/CaseServices_ep' }
let(:response_body) { ccms_data_from_file 'case_add_response.xml' }
subject { described_class.new(submission) }
around do |example|
VCR.turn_off!
example.run
VCR.turn_on!
end
before do
# stub a post request - any body, any headers
stub_request(:post, endpoint).to_return(body: response_body, status: 200)
# stub the transaction request id that we expect in the response
allow_any_instance_of(CCMS::Requestors::CaseAddRequestor).to receive(:transaction_request_id).and_return('20190301030405123456')
end
context 'operation successful' do
it 'sets state to case_submitted' do
subject.call
expect(submission.aasm_state).to eq 'case_submitted'
end
it 'records the transaction id of the request' do
subject.call
expect(submission.case_add_transaction_id).to eq '20190301030405123456'
end
context 'there are documents to upload' do
let(:submission) { create :submission, :document_ids_obtained, legal_aid_application: legal_aid_application }
it 'writes a history record' do
expect { subject.call }.to change { SubmissionHistory.count }.by(1)
expect(history.from_state).to eq 'document_ids_obtained'
expect(history.to_state).to eq 'case_submitted'
expect(history.success).to be true
expect(history.details).to be_nil
end
it 'stores the reqeust body in the submission history record' do
subject.call
expect(history.request).to be_soap_envelope_with(
command: 'ns4:CaseAddRQ',
transaction_id: '20190301030405123456',
matching: [
'<ns2:PreferredAddress>CLIENT</ns2:PreferredAddress>',
"<ns2:ProviderOfficeID>#{legal_aid_application.office.ccms_id}</ns2:ProviderOfficeID>"
]
)
end
it 'stores the response body in the submission history record' do
subject.call
expect(history.response).to eq response_body
end
end
context 'there are no documents to upload' do
it 'writes a history record' do
expect { subject.call }.to change { SubmissionHistory.count }.by(1)
expect(history.from_state).to eq 'applicant_ref_obtained'
expect(history.to_state).to eq 'case_submitted'
expect(history.success).to be true
expect(history.details).to be_nil
end
it 'stores the request body in the submission history record' do
subject.call
expect(history.request).to be_soap_envelope_with(
command: 'ns4:CaseAddRQ',
transaction_id: '20190301030405123456',
matching: [
"<ns2:ProviderOfficeID>#{legal_aid_application.office.ccms_id}</ns2:ProviderOfficeID>"
]
)
end
it 'writes the response body to the history record' do
subject.call
expect(history.response).to eq response_body
end
end
end
context 'operation in error' do
context 'error when adding a case' do
let(:error) { [CCMS::CCMSError, Savon::Error, StandardError] }
before do
expect_any_instance_of(CCMS::Requestors::CaseAddRequestor).to receive(:call).and_raise(error.sample, 'oops')
end
it 'puts it into failed state' do
subject.call
expect(submission.aasm_state).to eq 'failed'
end
it 'records the error in the submission history' do
expect { subject.call }.to change { SubmissionHistory.count }.by(1)
expect(history.from_state).to eq 'applicant_ref_obtained'
expect(history.to_state).to eq 'failed'
expect(history.success).to be false
expect(history.details).to match(/#{error}/)
expect(history.details).to match(/oops/)
expect(history.request).to be_soap_envelope_with(
command: 'ns4:CaseAddRQ',
transaction_id: '20190301030405123456',
matching: [
'<ns2:PreferredAddress>CLIENT</ns2:PreferredAddress>',
"<ns2:ProviderOfficeID>#{legal_aid_application.office.ccms_id}</ns2:ProviderOfficeID>"
]
)
end
end
context 'unsuccessful response from CCMS adding a case' do
let(:response_body) { ccms_data_from_file 'case_add_response_failure.xml' }
it 'puts it into failed state' do
subject.call
expect(submission.aasm_state).to eq 'failed'
end
it 'records the error in the submission history' do
expect { subject.call }.to change { SubmissionHistory.count }.by(1)
expect(history.from_state).to eq 'applicant_ref_obtained'
expect(history.to_state).to eq 'failed'
expect(history.success).to be false
end
it 'stores the reqeust body in the submission history record' do
subject.call
expect(history.request).to be_soap_envelope_with(
command: 'ns4:CaseAddRQ',
transaction_id: '20190301030405123456',
matching: [
"<ns2:ProviderOfficeID>#{legal_aid_application.office.ccms_id}</ns2:ProviderOfficeID>"
]
)
end
it 'stores the response body in the submission history record' do
subject.call
expect(history.response).to eq response_body
end
end
end
end
end
end
| 39.081871 | 136 | 0.616789 |
5da80289acff675072ae15b7fcfc1e74e99734da | 1,064 | class OpinionsController < ApplicationController
before_action :set_opinion, only: %i[show edit update destroy]
def index
@opinion = Opinion.new
@opinions = Opinion.all.includes(:author).order('created_at DESC')
@users = User.all - helpers.current_user.following - [helpers.current_user]
end
def create
@opinion = Opinion.new(AuthorId: session[:user_id], Text: opinion_params[:Text])
respond_to do |format|
if @opinion.save
format.html { redirect_to opinions_path, notice: 'Opinion was successfully created.' }
format.json { render :show, status: :created, location: @opinion }
else
format.html { render :new }
end
end
end
def destroy
@opinion.destroy
respond_to do |format|
format.html { redirect_to opinions_url, notice: 'Opinion was successfully destroyed.' }
format.json { head :no_content }
end
end
private
def set_opinion
@opinion = Opinion.find(params[:id])
end
def opinion_params
params.require(:opinion).permit(:Text)
end
end
| 25.95122 | 94 | 0.683271 |
338d53a938540846408780c1ec875646e642743f | 352 | #module UserHashFactory
def unregistered_user(account_id = nil)
hash = {
first_name: RandomWord.nouns.next,
last_name: RandomWord.nouns.next,
password: "secret123"
}
hash[:email] = "#{hash[:first_name]}.#{hash[:last_name]}@corp.com"
hash[:account_id] = account_id if (account_id != nil)
return hash
end
#end | 29.333333 | 70 | 0.653409 |
d5d981bf51a029ba6cbe4109b4f8f1bfaf5580e5 | 236 | class Excursion < ApplicationRecord
belongs_to :port
has_many :passenger_excursions
has_many :passengers, through: :passenger_excursions
def excursion_pluralize(excursion)
pluralize(excursion.duration, "hour")
end
end
| 21.454545 | 54 | 0.783898 |
abde27f86d07b10fd17f5f101ee2c8dea3c2ba2c | 4,851 | namespace :dev do
desc "TODO"
task setup: :environment do
puts "Configuring Development environment"
show_spinner("Dropping db...") do
%x(rails db:drop)
end
show_spinner("Creating db...") do
%x(rails db:create)
end
show_spinner("Migrating db...") do
%x(rails db:migrate)
end
show_spinner("Creating Apps...") do
1..20.times do
App.create!(
app_name: Faker::Company.name,
owner_country: Faker::Address.country
)
end
end
show_spinner("Creating admins...") do
App.all.each do |app|
2.times do
Admin.create!(
email: Faker::Internet.email,
password: "12345678",
first_name: Faker::Name.first_name,
last_name: Faker::Name.last_name,
is_god: [true, false].sample,
app_id: 1
)
end
Permission.create!(
models_create: [:content, :symptom],
models_read: [:all],
models_update: [:content, :symptom],
models_destroy: [:content],
models_manage: [],
)
Manager.create!(
email: Faker::Internet.email,
password: "12345678",
name: Faker::Name.first_name,
app_id: 1
)
end
end
show_spinner("Creating 100 example users...") do
100.times do |i|
User.create!(
user_name: Faker::Name.name,
email: Faker::Internet.email,
password: "12345678",
birthdate: Faker::Date.birthday(18, 65),
country: Faker::Address.country,
gender: Faker::Gender.type,
race: "human",
is_professional: false,
app: App.all.first
)
end
end
show_spinner("Inserting Kinships on created users...") do
kinships = ["Pai", "Mãe", "Filho", "Conjuge"]
User.all.each do |user|
3.times do
Household.create!(
description: Faker::Name.name,
birthdate: Faker::Date.birthday(18, 65),
country: Faker::Address.country,
gender: ["male", "female"].sample,
race: "human",
kinship: kinships.sample,
user_id: user.id
)
end
end
end
show_spinner("Creating 10 example content...") do
10.times do |j|
Content.create!(
title: Faker::Movies::LordOfTheRings.character,
content_type: Faker::Music.genre,
body: Faker::Lorem.paragraph([1,2,3,4].sample, false, [1,2,3,4].sample),
app_id: App.all.sample.id
)
end
end
show_spinner("Creating 10 symptoms...") do
App.all.each do |a|
10.times do |j|
Symptom.create!(
description: Faker::Name.name,
code: Faker::Number.number,
priority: rand(0..10),
details: Faker::Quotes::Shakespeare.as_you_like_it_quote,
app_id: a.id
)
end
end
end
show_spinner("Creating 50 example Public Hospitals...") do
50.times do |k|
PublicHospital.create!(
description:Faker::Company.name,
latitude: Faker::Address.latitude ,
longitude: Faker::Address.longitude,
kind: Faker::Movies::StarWars.character,
phone: Faker::PhoneNumber.phone_number,
details: Faker::Lorem.paragraph([1,2].sample, false, [1,2].sample),
app_id: App.all.sample.id
)
end
end
show_spinner("Create some surveys") do
symptom_arr = []
Symptom.all.each do |s|
symptom_arr.append(s.description)
end
User.all.each do |user|
3.times do
symptom_can = [nil, symptom_arr]
Survey.create!(
latitude: 40.741934119747704,
longitude: -73.98951017150449,
symptom: symptom_can.sample,
user_id: user.id,
)
end
end
end
end
task create_prod: :environment do
if Rails.env.production?
show_spinner("Criando Aplicativo do Brasil...") do
App.create(
owner_country: "Brasil",
app_name: "Guardioes da Saude",
twitter: "appguardioes"
)
end
2.times do
Survey.create!(
latitude: 40.741934119747704,
longitude: -73.98951017150449,
symptom: symptom_arr,
user_id: u.id,
)
end
end
end
def show_spinner(start_msg, end_msg = "done")
pastel = Pastel.new
format = "[#{pastel.yellow(':spinner')}] " + pastel.yellow("#{start_msg}")
spinner = TTY::Spinner.new(format, success_mark: pastel.green('+'))
spinner.auto_spin
yield
spinner.success(pastel.green("#{end_msg}"))
end
end | 26.653846 | 82 | 0.542981 |
28f88247d3756381b81b5aa9de3594774dbec657 | 1,397 | # Any code below will be evaluated/executed within the scope of the caller.
# How to reference the author of an article
Governor.author = Proc.new do
if defined?(Devise)
send("current_#{Devise.default_scope}")
elsif respond_to?(:current_user)
current_user
else
raise "Please define Governor.author. Run `rails generator governor:configure` to install an initializer."
end
end
# Rules for authorizing a particular action on a particular article
Governor.authorize_if do |action, article|
case action.to_sym
when :new, :create
if defined?(Devise) && respond_to?("#{Devise.default_scope}_signed_in?")
send("#{Devise.default_scope}_signed_in?")
elsif respond_to?(:signed_in?)
signed_in?
elsif respond_to?(:current_user)
current_user.present?
else
raise "Please define Governor.authorize_if. Run `rails generator governor:configure` to install an initializer."
end
when :edit, :update, :destroy
article.author == instance_eval(&Governor.author)
else
raise ArgumentError.new('action must be new, create, edit, update, or destroy')
end
end
# What should Governor do if someone tries to do something they weren't
# authorized to do?
Governor.if_not_allowed do
if defined?(Devise)
send("authenticate_#{Devise.default_scope}!")
elsif respond_to?(:deny_access)
deny_access
else
redirect_to :root
end
end | 31.75 | 118 | 0.733715 |
38cb76dc8a0632310c027db9546753a2c6f3c63c | 5,261 | module ExternalVariableProcessing
# issues
# testing
# add checking for colon
## need to clean this up
## need to test
def process_external_vars(klass)
vars = eval "#{klass}.instance_variables"
local_vars = []
vars.each { |v| local_vars << ":#{v.gsub("@", "")}" }
loc_vars = local_vars.join(", ")
# add accessors
klass.module_eval "class << self; attr_accessor #{loc_vars} end"
local_vars.each do |symbol|
name = symbol.gsub(":","")
t_var = eval "#{klass}.#{name}"
pre_process_vars(name, t_var)
end
end
def pre_process_vars(name, var)
# puts
# puts
case var
when Integer
# puts "pre_process: #{name}, #{var}, #{var.inspect} got #{var.class} 29"
value = var
type = "int"
post_process_vars(name, type, value)
when Float
# puts "pre_process: #{name}, #{var}, #{var.inspect} got #{var.class} 34"
value = var
type = "float"
post_process_vars(name, type, value)
when String
# puts "pre_process: #{name}, #{var.inspect} got #{var.class} on 39"
if var.match(",").nil? && var =~ /long|byte|unsigned|int|short/
# puts "pre_process #{name}, #{var.inspect} got #{var.class} level three sublevel"
type = var
value = nil
post_process_vars(name, type, value)
else
value = var.split(",").first.lstrip
type = var.split(",")[1].nil? ? nil : var.split(",")[1].lstrip
translate_variables( name , type, value )
end
when TrueClass
# puts "pre_process: #{name}, #{var}, #{var.inspect} got #{var.class} on 50"
value = 1
type = "bool"
post_process_vars(name, type, value)
when FalseClass
# puts "pre_process: #{name}, #{var}, #{var.inspect} got #{var.class} on 55"
value = 0
type = "bool"
post_process_vars(name, type, value)
when Array
post_process_arrays(name, var)
else
raise ArgumentError, "not sure what to do here... got #{name} with value #{var} which is a #{var.class}"
end
end
def translate_variables(name, type = nil, value = nil)
unless type.nil?
check_variable_type(type)
end
# classify the values
if value.class == Fixnum
# puts "translate_variables: #{name}, #{value}, #{type} is a fixnum, got #{value.class} on 74"
elsif value.class == Float
# puts "translate_variables: #{name}, #{value}, #{type} is a float, got #{value.class} on 76"
elsif value =~ /^-(\d|x)*$/
value = value.to_i
type = "int" if type.nil?
elsif value =~ /^-(\d|\.|x)*$/
value = value.to_f
unless type.nil?
raise ArgumentError, "#{value} should be a float got #{type}" unless type == "float"
end
type = "float" if type.nil?
elsif value[0,1] !~ /\d/
# puts value[0,1]
# puts "translate_variables: #{name}, #{value}, #{type} is a number of some type, got #{value.class} on 79"
type = "char*"
value = "\"#{value}\""
elsif value !~ /(\.|x)/
# puts "translate_variables: #{name}, #{value}, #{type} is an integer, got #{value.class} on 83"
value = value.to_i
type = "int" if type.nil?
elsif value =~ /(\d*\.\d*)/ # and no
# puts "translate_variables: #{name}, #{value}, #{type} is a float, got #{value.class} on 87"
value = value.to_f
type = "float"
elsif value =~ /0x\d\d/
# puts "translate_variables: #{name}, #{value}, #{type} is a byte, got #{value.class} on 91"
type = "byte"
else
raise ArgumentError, "not sure what to do with a value of #{value} with a type like #{type}"
end
post_process_vars(name, type, value)
end
def post_process_vars(name, type, value = nil)
value = " = #{value}" if value
$external_var_identifiers << "__#{name}" unless $external_var_identifiers.include?("__#{name}")
$external_vars << "#{type} __#{name}#{value};"
end
def post_process_arrays(name, var)
type = c_type(var[0])
$array_types[name] = type
assignment = var.inspect.gsub("[","{").gsub("]","}")
c_style_array = "#{type} __#{name}[] = #{assignment};"
$external_var_identifiers << "__#{name}" unless $external_var_identifiers.include?("__#{name}")
$external_array_vars << c_style_array unless $external_array_vars.include?(c_style_array)
end
def check_variable_type(type)
unless type =~ /#{C_VAR_TYPES}/
raise ArgumentError, "the following variable types are supported \n #{C_VAR_TYPES.gsub("|",", ")} got #{type}"
end
end
def c_type(typ)
type =
case typ
when Integer
"int"
when String
"char*"
when TrueClass
"bool"
when FalseClass
"bool"
else
raise "Bug! Unknown type #{typ.inspect} in c_type"
end
type
end
end | 34.385621 | 119 | 0.541342 |
7a9603ea4a020c428afb39082999952f71f5c6f1 | 883 | require 'spec_helper'
require_relative '../../../../apps/web/views/books/new'
class NewBookParams < Hanami::Action::Params
params do
required(:book).schema do
required(:title).filled(:str?)
required(:author).filled(:str?)
end
end
end
describe Web::Views::Books::New do
let(:params) { NewBookParams.new(book: {}) }
let(:exposures) { Hash[params: params, format: :html] }
let(:template) do
Hanami::View::Template.new('apps/web/templates/books/new.html.erb')
end
let(:view) { Web::Views::Books::New.new(template, exposures) }
let(:rendered) { view.render }
it 'displays list of errors when params contains errors' do
params.valid? # trigger validations
rendered.must_include('There was a problem with your submission')
rendered.must_include('Title is missing')
rendered.must_include('Author is missing')
end
end
| 29.433333 | 71 | 0.682899 |
4a105648a4237af85c84ac7987bc1d5c391d9407 | 1,772 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-transcribestreamingservice/types'
require_relative 'aws-sdk-transcribestreamingservice/client_api'
require_relative 'aws-sdk-transcribestreamingservice/client'
require_relative 'aws-sdk-transcribestreamingservice/errors'
require_relative 'aws-sdk-transcribestreamingservice/resource'
require_relative 'aws-sdk-transcribestreamingservice/customizations'
require_relative 'aws-sdk-transcribestreamingservice/async_client'
require_relative 'aws-sdk-transcribestreamingservice/event_streams'
# This module provides support for Amazon Transcribe Streaming Service. This module is available in the
# `aws-sdk-transcribestreamingservice` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# transcribe_streaming_service = Aws::TranscribeStreamingService::Client.new
# resp = transcribe_streaming_service.start_stream_transcription(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from Amazon Transcribe Streaming Service are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::TranscribeStreamingService::Errors::ServiceError
# # rescues all Amazon Transcribe Streaming Service API errors
# end
#
# See {Errors} for more information.
#
# @service
module Aws::TranscribeStreamingService
GEM_VERSION = '1.16.1'
end
| 32.218182 | 103 | 0.793454 |
e92000194b1a80494622cc88ee284642068e3073 | 4,169 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Mutations::DesignManagement::Upload do
include DesignManagementTestHelpers
include ConcurrentHelpers
let(:issue) { create(:issue) }
let(:user) { issue.author }
let(:project) { issue.project }
subject(:mutation) do
described_class.new(object: nil, context: { current_user: user }, field: nil)
end
def run_mutation(files_to_upload = files, project_path = project.full_path, iid = issue.iid)
mutation = described_class.new(object: nil, context: { current_user: user }, field: nil)
mutation.resolve(project_path: project_path, iid: iid, files: files_to_upload)
end
describe "#resolve" do
let(:files) { [fixture_file_upload('spec/fixtures/dk.png')] }
subject(:resolve) do
mutation.resolve(project_path: project.full_path, iid: issue.iid, files: files)
end
shared_examples "resource not available" do
it "raises an error" do
expect { resolve }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
context "when the feature is not available" do
before do
enable_design_management(false)
end
it_behaves_like "resource not available"
end
context "when the feature is available" do
before do
enable_design_management
end
describe 'contention in the design repo' do
before do
issue.design_collection.repository.create_if_not_exists
end
let(:files) do
['dk.png', 'rails_sample.jpg', 'banana_sample.gif']
.cycle
.take(Concurrent.processor_count * 2)
.map { |f| RenameableUpload.unique_file(f) }
end
def creates_designs
prior_count = DesignManagement::Design.count
expect { yield }.not_to raise_error
expect(DesignManagement::Design.count).to eq(prior_count + files.size)
end
describe 'running requests in parallel' do
it 'does not cause errors' do
creates_designs do
run_parallel(files.map { |f| -> { run_mutation([f]) } })
end
end
end
describe 'running requests in parallel on different issues' do
it 'does not cause errors' do
creates_designs do
issues = create_list(:issue, files.size, author: user)
issues.each { |i| i.project.add_developer(user) }
blocks = files.zip(issues).map do |(f, i)|
-> { run_mutation([f], i.project.full_path, i.iid) }
end
run_parallel(blocks)
end
end
end
describe 'running requests in serial' do
it 'does not cause errors' do
creates_designs do
files.each do |f|
run_mutation([f])
end
end
end
end
end
context "when the user is not allowed to upload designs" do
let(:user) { create(:user) }
it_behaves_like "resource not available"
end
context "with a valid design" do
it "returns the updated designs" do
expect(resolve[:errors]).to eq []
expect(resolve[:designs].map(&:filename)).to contain_exactly("dk.png")
end
end
context "when passing an invalid project" do
let(:project) { build(:project) }
it_behaves_like "resource not available"
end
context "when passing an invalid issue" do
let(:issue) { build(:issue) }
it_behaves_like "resource not available"
end
context "when creating designs causes errors" do
before do
fake_service = double(::DesignManagement::SaveDesignsService)
allow(fake_service).to receive(:execute).and_return(status: :error, message: "Something failed")
allow(::DesignManagement::SaveDesignsService).to receive(:new).and_return(fake_service)
end
it "wraps the errors" do
expect(resolve[:errors]).to eq(["Something failed"])
expect(resolve[:designs]).to eq([])
end
end
end
end
end
| 29.567376 | 106 | 0.614296 |
ab6dc6cc55df5814425ddf449c4507af6277964e | 4,437 | require 'sinatra/reloader' if development?
require 'pry' if development?
require 'json'
class App < Sinatra::Base
set :sessions, true
set :inline_templates, true
set :logging, true
configure :development do
register Sinatra::Reloader
end
def client(token_method = :post)
OAuth2::Client.new(
ENV.fetch('PROCORE_CLIENT_ID', 'proauth-local'),
ENV.fetch('PROCORE_CLIENT_SECRET', 'pleaseUseA4RealSecret.'),
site: ENV.fetch('PROCORE_API_URL') {"http://localhost:3000"},
)
end
def access_token
OAuth2::AccessToken.new(client, session[:access_token], refresh_token: session[:refresh_token])
end
def redirect_uri
ENV.fetch('PROCORE_OAUTH2_REDIRECT_URI') {'http://localhost:5000/callback'}
end
def authorized_api_request(path, query_string=nil)
HTTParty.get("#{client.site}/#{path}?#{query_string}",
headers: {
'Authorization' => "Bearer #{session[:access_token]}",
'Accept' => 'application/json',
})
end
get '/' do
erb :home
end
get '/sign_in' do
redirect client.auth_code.authorize_url(redirect_uri: redirect_uri)
end
get '/sign_out' do
session[:access_token] = nil
session[:refresh_token] = nil
erb :sign_out
end
get '/callback' do
token = client.auth_code.get_token(params[:code], redirect_uri: redirect_uri)
session[:access_token] = token.token
session[:refresh_token] = token.refresh_token
erb :callback
end
get '/refresh' do
token = access_token.refresh!
session[:access_token] = token.token
session[:refresh_token] = token.refresh_token
erb :refresh
end
get '/api/*' do
result = authorized_api_request(params[:splat].join('/'), request.query_string)
json JSON.parse(result.body)
end
end
__END__
@@layout
<!doctype html>
<html class="no-js" lang="">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<title>Oauth Sample Client</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<link rel="stylesheet" href="bootstrap.min.css" media="all" charset="utf-8">
<% if session[:access_token] %>
<script type="text/javascript">
var apiUrl = '<%= "#{ENV['PROCORE_API_URL']}" %>'
var authHeader = 'Bearer <%= session[:access_token] %>'
</script>
<!-- <script src="xmlhttp_request_example.js" charset="utf-8"></script> -->
<script src="fetch_example.js" charset="utf-8"></script>
<% end %>
</head>
<body>
<div class="container">
<div class="page-header clearfix">
<h3 class="text-muted">Procore Oauth Client App</h3>
</div>
<div>
<%= yield %>
</div>
</div>
</body>
</html>
@@callback
<script type="text/javascript">
localStorage.setItem('accessToken', '<%= session[:access_token] %>')
localStorage.setItem('refreshToken', '<%= session[:refresh_token] %>')
localStorage.setItem('apiUrl', '<%= ENV['PROCORE_API_URL'] %>')
window.location = "/";
</script>
@@refresh
<script type="text/javascript">
localStorage.setItem('accessToken', '<%= session[:access_token] %>')
localStorage.removeItem('refreshToken')
window.location = "/";
</script>
@@sign_out
<script type="text/javascript">
console.log('signing out');
localStorage.removeItem('accessToken')
localStorage.removeItem('refreshToken')
window.location = "/";
</script>
@@home
<div class="grid">
<% if session[:access_token] %>
access token <pre><%= session[:access_token] %></pre>
refresh token <pre><a href='/refresh'><%= session[:refresh_token] %></a></pre>
<a href='/sign_out' class='button btn btn-large btn-block btn-warning'>Sign Out</a>
<br>
<fieldset>
<legend>Initate a CORS api request from this browser</legend>
<%= client.site %>/vapid/companies:
<pre id='api-log'>
<code class='lang-js'></code>
</pre>
</fieldset>
<fieldset>
<legend>Initiate an authorized api calls from server:</legend>
<ul class='list'>
<li>
<a href='/api/vapid/companies'>/api/vapid/companies</a>
</li>
<li>
<a href='/api/oauth/token/info'>/api/oauth/tokens/info</a>
</li>
</ul>
</fieldset>
<% else %>
<a href='/sign_in' class='button btn btn-large btn-block btn-primary'>Sign In</a>
<% end %>
</div>
| 27.054878 | 99 | 0.631057 |
264dc3ddd403ceba4172c091869fa9a299f07a14 | 977 | cask 'bettertouchtool' do
if MacOS.version <= :mavericks
version '2.05'
sha256 '41013cfeffee286a038363651db3dd315ff3a1e0cf07774d9ce852111be50a5a'
# bettertouchtool.net/releases was verified as official when first introduced to the cask
url "https://bettertouchtool.net/releases/btt#{version}_final_10_9.zip"
else
version '3.02'
sha256 'ecfccf7b368eb86886ad347685783d3d45f994d514dbb3b71693b8debe342d6c'
# bettertouchtool.net/releases was verified as official when first introduced to the cask
url "https://bettertouchtool.net/releases/btt#{version}.zip"
appcast 'https://bettertouchtool.net/releases/'
end
name 'BetterTouchTool'
homepage 'https://folivora.ai/'
auto_updates true
app 'BetterTouchTool.app'
uninstall login_item: 'BetterTouchTool'
zap trash: [
'~/Library/Preferences/com.hegenberg.BetterTouchTool.plist',
'~/Library/Application Support/BetterTouchTool',
]
end
| 31.516129 | 93 | 0.734903 |
6aaaa0c2b7bb5e7b3c08a0f8552ead3d73cdf192 | 1,573 | # module for GoOpen LRv1 -> v2 migration
module GoOpenMigration
# migration table (populated via Talend job)
class GoOpenV1Staging < ActiveRecord::Base
establish_connection :goopen_migration
self.table_name = 'go_open_v1_staging'
end
def self.migrate
retries ||= 5
migrate_validated_entries
rescue ActiveRecord::StatementInvalid, PG::UnableToSend => e
puts "Failed connection: retries=#{retries}"
if (retries -= 1).positive?
GoOpenV1Staging.establish_connection :goopen_migration
retry
else
puts 'Failed migration!', e
end
end
def self.migrate_validated_entries
qset = GoOpenV1Staging.where(load_status: 'VALIDATED')
qset.find_in_batches(batch_size: 500) do |group|
group.each do |item|
resource = JSON.parse(item.transformed_json)
_envlp, errors = EnvelopeBuilder.new(envelope_params(resource)).build
update_status(item) if errors.blank? && !development?
end
print '#'
end
end
def self.update_status(item)
item.update load_status: 'COMPLETE'
end
def self.envelope_params(resource)
{
envelope_type: 'resource_data',
envelope_version: '1.0.0',
envelope_community: 'learning_registry',
resource: JWT.encode(resource, private_key, 'RS256'),
resource_format: 'json',
resource_encoding: 'jwt',
resource_public_key: MR.test_keys.public
}
end
def self.private_key
OpenSSL::PKey::RSA.new MR.test_keys.private
end
def self.development?
ENV['RACK_ENV'] == 'development'
end
end
| 26.661017 | 77 | 0.695486 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.