hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
0163ab3e2db7724139c2831cb68d1ceb66accedd | 5,416 | # -*- encoding : utf-8 -*-
# 勘定にもたせる口座連携関係の機能を記述。
module Account::Linking
def self.included(base)
base.has_many :link_requests, -> { includes(:sender) },
class_name: "AccountLinkRequest",
foreign_key: "account_id",
dependent: :destroy
base.has_one :link, :class_name => "AccountLink", :foreign_key => "account_id", :dependent => :destroy
end
# 送受信に関わらず、連携先の設定されている口座かどうかを返す
def linked?
self.link != nil || !self.link_requests.empty?
end
# 送信先となっている口座(/口座プロキシ)を返す
# TODO: 名前を変更したい
# def linked_account(force = false)
def destination_account(force = false)
@destination_account = nil if force
@destination_account ||= (link ? link.target_account : nil)
end
# 送受信に関わらず、連携先の口座情報をハッシュの配列で返す。送信があるものを一番上にする。
def linked_account_summaries(force = false)
@linked_account_summaries = nil if force
return @linked_account_summaries if @linked_account_summaries
summaries = link_requests.map{|r| r.sender_account.to_summary.merge(:from => true, :request_id => r.id)}
if link
if interactive = summaries.detect{|s| s[:ex_id] == link.target_ex_account_id}
interactive[:to] = true
summaries.delete(interactive)
summaries.insert(0, interactive)
else
summaries.insert(0, link.target_account.to_summary.merge(:to => true))
end
end
@linked_account_summaries = summaries
end
# 要請されて、この口座のあるシステムの指定されたEntryと紐づくEntryおよびDealを作成/更新する
def update_link_to(linked_ex_entry_id, linked_ex_deal_id, linked_user_id, linked_entry_amount, linked_entry_summary, linked_entry_date, linked_ex_entry_confirmed)
# すでに紐づいたAccountEntryが存在する場合
my_entry = entries.find_by(linked_ex_entry_id: linked_ex_entry_id, linked_user_id: linked_user_id)
# 存在し、確認済で金額が同じ(正負逆の同額)なら変更不要
# 確認状態の変更は別途処理が走る
if my_entry
if !my_entry.deal.confirmed? || my_entry.amount != linked_entry_amount * -1
# 未確認か金額が変わっていた場合は、未確認なら取引を削除、確認済ならリンクを削除する
# これにより、その相手の処理ではこのifに来ず、mate_entryとして先に処理されたものを発見できる
my_entry.unlink
# 同じ取引内に、今回リクエストのあった相手側のDealとすでに紐付いているEntryがあったら、そのリンクも同時に削除する
my_entry.deal.entries(true).select{|e| e.id != my_entry.id && e.linked_ex_deal_id = linked_ex_deal_id && e.linked_user_id == linked_user_id}.each do |co_linked_entry|
co_linked_entry.unlink
end
my_entry = nil
end
else
mate_entry = user.entries.find_by(linked_ex_deal_id: linked_ex_deal_id, linked_user_id: linked_user_id)
if mate_entry
# まだlinked_ex_entry_idが入っていなくても、今回リクエストのあった相手側のDealとすでに紐付いているAccountEntryがあれば、それの相手が求める勘定となる
# entry数が2でないものはデータ不正
raise "entry size should be 2" unless mate_entry.deal.entries.size == 2
my_entry = mate_entry.deal.entries.detect{|e| e.id != mate_entry.id}
my_entry.account_id = self.id
my_entry.linked_ex_entry_id = linked_ex_entry_id
my_entry.linked_ex_deal_id = linked_ex_deal_id
my_entry.linked_user_id = linked_user_id
my_entry.linked_ex_entry_confirmed = linked_ex_entry_confirmed
my_entry.skip_linking = true
my_entry.save!
end
end
unless my_entry
# 新しく作成する
mate_account = self.partner_account || user.default_asset_other_than(self)
raise "#user #{user.login} 側で相手勘定を決められませんでした" unless mate_account
deal = Deal::General.new(
:summary => linked_entry_summary,
:date => linked_entry_date,
:confirmed => false)
deal.user_id = self.user_id
my_entry = deal.creditor_entries.build(
:account_id => self.id,
:amount => linked_entry_amount * -1, :skip_linking => true)
my_entry.linked_ex_entry_id = linked_ex_entry_id
my_entry.linked_ex_deal_id = linked_ex_deal_id
my_entry.linked_user_id = linked_user_id
my_entry.linked_ex_entry_confirmed = linked_ex_entry_confirmed
deal.debtor_entries.build(
:account_id => mate_account.id,
:amount => linked_entry_amount, :skip_linking => true)
deal.save!
end
# 相手に新しいこちらのAccountEntry情報を送り返す
return [my_entry.id, my_entry.deal_id, my_entry.deal.confirmed?]
end
def unlink_to(linked_ex_entry_id, linked_user_id)
my_entry = entries.find_by(linked_ex_entry_id: linked_ex_entry_id, linked_user_id: linked_user_id)
my_entry.unlink if my_entry
end
def set_link(target_user, target_account, interactive = false)
# target_userがリンクを張れる相手かチェックする
raise PossibleError, "指定されたユーザーが見つからないか、相互にフレンド登録された状態ではありません" unless target_user && user.friend?(target_user)
# target_ex_account_idがリンクを張れる相手かチェックする
raise PossibleError, "#{target_user.login}さんには指定された口座がありません。" unless target_account
target_summary = target_account.to_summary
# TODO: ex いらない
target_ex_account_id = target_summary[:ex_id]
raise PossibleError, "#{self.class.type_name} には #{Account.const_get(target_summary[:base_type].to_s.camelize).type_name} を連動できません。" unless linkable_to?(target_summary[:base_type])
# 自分側のリンクを作る
self.link = AccountLink.new(:target_user_id => target_user.id, :target_ex_account_id => target_ex_account_id)
raise "link could not be saved. #{link.errors.full_messages.join(' ')}" if self.link.new_record?
# AccountHasDifferentLinkError が発生する場合がある
target_user.link_account(target_account.id, user_id, id) if interactive
self.link
end
end
| 40.721805 | 184 | 0.724705 |
91e892efdc15a9955ff92270e60b45d4e851f42a | 41 | class Message < ActiveRecord::Base
end | 13.666667 | 34 | 0.756098 |
87ddeffbb681eec7ae91208f9d2603306adfad68 | 3,760 | # frozen_string_literal: true
require 'spec_helper'
require 'unit/cloud/shared_stuff'
describe Bosh::AzureCloud::Cloud do
include_context 'shared stuff'
describe '#snapshot_disk' do
let(:metadata) { {} }
let(:snapshot_cid) { 'fake-snapshot-cid' }
let(:snapshot_id_object) { instance_double(Bosh::AzureCloud::DiskId) }
let(:resource_group_name) { 'fake-resource-group-name' }
let(:caching) { 'fake-cacing' }
let(:disk_cid) { 'fake-disk-cid' }
let(:disk_id_object) { instance_double(Bosh::AzureCloud::DiskId) }
before do
allow(Bosh::AzureCloud::DiskId).to receive(:parse)
.with(disk_cid, MOCK_RESOURCE_GROUP_NAME)
.and_return(disk_id_object)
allow(snapshot_id_object).to receive(:to_s)
.and_return(snapshot_cid)
allow(disk_id_object).to receive(:resource_group_name)
.and_return(resource_group_name)
allow(disk_id_object).to receive(:caching)
.and_return(caching)
allow(telemetry_manager).to receive(:monitor)
.with('snapshot_disk', id: disk_cid).and_call_original
end
context 'when the disk is a managed disk' do
context 'when the disk starts with bosh-disk-data' do
let(:disk_name) { 'bosh-disk-data-fake-guid' }
before do
allow(disk_id_object).to receive(:disk_name)
.and_return(disk_name)
end
it 'should take a managed snapshot of the disk' do
expect(Bosh::AzureCloud::DiskId).to receive(:create)
.with(caching, true, resource_group_name: resource_group_name)
.and_return(snapshot_id_object)
expect(disk_manager2).to receive(:snapshot_disk)
.with(snapshot_id_object, disk_name, metadata)
expect(cloud.snapshot_disk(disk_cid, metadata)).to eq(snapshot_cid)
end
end
context 'when the disk NOT start with bosh-disk-data' do
let(:disk_name) { 'fakestorageaccountname-fake-guid' }
before do
allow(disk_id_object).to receive(:disk_name)
.and_return(disk_name)
expect(disk_manager2).to receive(:get_data_disk)
.with(disk_id_object)
.and_return(name: disk_cid)
end
it 'should take a managed snapshot of the disk' do
expect(Bosh::AzureCloud::DiskId).to receive(:create)
.with(caching, true, resource_group_name: resource_group_name)
.and_return(snapshot_id_object)
expect(disk_manager2).to receive(:snapshot_disk)
.with(snapshot_id_object, disk_name, metadata)
expect(cloud.snapshot_disk(disk_cid, metadata)).to eq(snapshot_cid)
end
end
end
context 'when the disk is an unmanaged disk' do
let(:storage_account_name) { 'fake-storage-account-name' }
let(:disk_name) { 'fake-disk-name' }
let(:snapshot_name) { 'fake-snapshot-name' }
before do
allow(disk_id_object).to receive(:disk_name)
.and_return(disk_name)
allow(disk_id_object).to receive(:storage_account_name)
.and_return(storage_account_name)
allow(disk_manager2).to receive(:get_data_disk)
.with(disk_id_object)
.and_return(nil)
end
it 'should take an unmanaged snapshot of the disk' do
expect(disk_manager).to receive(:snapshot_disk)
.with(storage_account_name, disk_name, metadata)
.and_return(snapshot_name)
expect(Bosh::AzureCloud::DiskId).to receive(:create)
.with(caching, false, disk_name: snapshot_name, storage_account_name: storage_account_name)
.and_return(snapshot_id_object)
expect(cloud.snapshot_disk(disk_cid, metadata)).to eq(snapshot_cid)
end
end
end
end
| 35.471698 | 101 | 0.663298 |
ff0b16acbf317200e4270033c8d7b5ed58b76a64 | 543 | Pod::Spec.new do |s|
s.name = "YHKit"
s.version = "1.0.0"
s.summary = "YHKit is a easy test."
s.description = <<-DESC
This description is used to generate tags and improve search results;
DESC
s.homepage = "https://github.com/bill19/YHKitDemo"
s.license = "MIT"
s.author = { "HaoSun" => "[email protected]" }
s.source = { :git => "https://github.com/bill19/YHKitDemo.git", :tag => "1.0.0" }
s.source_files = "YHKit/**/*"
#s.dependency 'Masonry', '~> 1.0.1'
s.requires_arc = true
end
| 27.15 | 87 | 0.604052 |
d5bbd72c9a7c3c39d42a55b047ccd77799a9a4d0 | 310 | class Team < ApplicationRecord
mount_uploader :avatar, AvatarUploader
belongs_to :league, optional: true
has_many :players, dependent: :destroy
has_many :team_one, :class_name => 'Match', :foreign_key => 'team_one'
has_many :team_two, :class_name => 'Match', :foreign_key => 'team_two'
end
| 38.75 | 74 | 0.716129 |
4abd5b91847171b15a6803f0127ab5358bc104f4 | 49,888 | require 'rails_helper'
require 'permissions_spec_helper'
RSpec.describe PackagesController, type: :controller do
describe '#upload' do
let(:package) { VCAP::CloudController::PackageModel.make }
let(:space) { package.space }
let(:org) { space.organization }
let(:params) { { 'bits_path' => 'path/to/bits' } }
let(:form_headers) { { 'CONTENT_TYPE' => 'application/x-www-form-urlencoded' } }
let(:user) { set_current_user(VCAP::CloudController::User.make) }
before do
@request.env.merge!(form_headers)
allow_user_read_access_for(user, spaces: [space])
allow_user_write_access(user, space: space)
end
it 'returns 200 and updates the package state' do
post :upload, params: params.merge(guid: package.guid)
expect(response.status).to eq(200)
expect(MultiJson.load(response.body)['guid']).to eq(package.guid)
expect(package.reload.state).to eq(VCAP::CloudController::PackageModel::PENDING_STATE)
end
context 'when the bits service is enabled' do
let(:bits_service_double) { double('bits_service') }
let(:blob_double) { double('blob') }
let(:bits_service_public_upload_url) { 'https://some.public/signed/url/to/upload/package' }
before do
VCAP::CloudController::Config.config.set(:bits_service, { enabled: true })
allow_any_instance_of(CloudController::DependencyLocator).to receive(:package_blobstore).
and_return(bits_service_double)
allow(bits_service_double).to receive(:blob).and_return(blob_double)
allow(blob_double).to receive(:public_upload_url).and_return(bits_service_public_upload_url)
end
context 'when the user can write to the space' do
it 'returns a bits service upload link' do
post :upload, params: params.merge(guid: package.guid)
expect(response.status).to eq(200)
expect(MultiJson.load(response.body)['links']['upload']['href']).to match(bits_service_public_upload_url)
end
end
end
context 'when uploading with resources' do
let(:params) do
{ 'bits_path' => 'path/to/bits', guid: package.guid }
end
context 'with unsupported options' do
let(:new_options) do
{
cached_resources: JSON.dump([{ 'fn' => 'lol', 'sha1' => 'abc', 'size' => 2048 }]),
}
end
it 'returns a 422 and the package' do
post :upload, params: params.merge(new_options), as: :json
expect(response.status).to eq(422), response.body
expect(response.body).to include 'UnprocessableEntity'
expect(response.body).to include "Unknown field(s): 'cached_resources'"
end
end
context 'with invalid json resources' do
let(:new_options) do
{
resources: '[abcddf]',
}
end
it 'returns a 422 and the package' do
post :upload, params: params.merge(new_options), as: :json
expect(response.status).to eq(422), response.body
expect(response.body).to include 'UnprocessableEntity'
end
end
context 'with correctly named cached resources' do
shared_examples_for :uploading_successfully do
let(:uploader) { instance_double(VCAP::CloudController::PackageUpload, upload_async: nil) }
before do
allow(VCAP::CloudController::PackageUpload).to receive(:new).and_return(uploader)
end
it 'returns a 201 and the package' do
post :upload, params: params.merge(new_options), as: :json
expect(response.status).to eq(200), response.body
expect(MultiJson.load(response.body)['guid']).to eq(package.guid)
expect(package.reload.state).to eq(VCAP::CloudController::PackageModel::CREATED_STATE)
expect(uploader).to have_received(:upload_async) do |args|
expect(args[:message].resources).to match_array([{ fn: 'lol', sha1: 'abc', size: 2048, mode: '645' }])
end
end
end
context 'v2 resource format' do
let(:new_options) do
{
resources: JSON.dump([{ 'fn' => 'lol', 'sha1' => 'abc', 'size' => 2048, 'mode' => '645' }]),
}
end
include_examples :uploading_successfully
end
context 'v3 resource format' do
let(:new_options) do
{
resources: JSON.dump([{ 'path' => 'lol', 'checksum' => { 'value' => 'abc' }, 'size_in_bytes' => 2048, 'mode' => '645' }]),
}
end
include_examples :uploading_successfully
end
end
end
context 'when app_bits_upload is disabled' do
before do
VCAP::CloudController::FeatureFlag.make(name: 'app_bits_upload', enabled: false, error_message: nil)
end
context 'non-admin user' do
it 'raises 403' do
post :upload, params: params.merge(guid: package.guid), as: :json
expect(response.status).to eq(403)
expect(response.body).to include('FeatureDisabled')
expect(response.body).to include('app_bits_upload')
end
end
context 'admin user' do
before { set_current_user_as_admin(user: user) }
it 'returns 200 and updates the package state' do
post :upload, params: params.merge(guid: package.guid), as: :json
expect(response.status).to eq(200)
expect(MultiJson.load(response.body)['guid']).to eq(package.guid)
expect(package.reload.state).to eq(VCAP::CloudController::PackageModel::PENDING_STATE)
end
end
end
context 'when the package type is not bits' do
before do
package.type = 'docker'
package.save
end
it 'returns a 422 Unprocessable' do
post :upload, params: params.merge(guid: package.guid), as: :json
expect(response.status).to eq(422)
expect(response.body).to include('UnprocessableEntity')
expect(response.body).to include('Package type must be bits.')
end
end
context 'when the package does not exist' do
it 'returns a 404 ResourceNotFound error' do
post :upload, params: params.merge(guid: 'not-real'), as: :json
expect(response.status).to eq(404)
expect(response.body).to include('ResourceNotFound')
end
end
context 'when the message is not valid' do
let(:params) { {} }
it 'returns a 422 UnprocessableEntity error' do
post :upload, params: params.merge(guid: package.guid), as: :json
expect(response.status).to eq(422)
expect(response.body).to include('UnprocessableEntity')
end
end
context 'when the bits have already been uploaded' do
before do
package.state = VCAP::CloudController::PackageModel::READY_STATE
package.save
end
it 'returns a 400 PackageBitsAlreadyUploaded error' do
post :upload, params: params.merge(guid: package.guid), as: :json
expect(response.status).to eq(400)
expect(response.body).to include('PackageBitsAlreadyUploaded')
end
end
context 'when the package is invalid' do
before do
allow_any_instance_of(VCAP::CloudController::PackageUpload).to receive(:upload_async).and_raise(VCAP::CloudController::PackageUpload::InvalidPackage.new('err'))
end
it 'returns 422' do
post :upload, params: params.merge(guid: package.guid), as: :json
expect(response.status).to eq(422)
expect(response.body).to include('UnprocessableEntity')
end
end
context 'permissions' do
context 'when the user does not have write scope' do
before do
set_current_user(user, scopes: ['cloud_controller.read'])
end
it 'returns an Unauthorized error' do
post :upload, params: params.merge(guid: package.guid), as: :json
expect(response.status).to eq(403)
expect(response.body).to include('NotAuthorized')
end
end
context 'when the user cannot read the package' do
before do
disallow_user_read_access(user, space: space)
end
it 'returns a 404' do
post :upload, params: params.merge(guid: package.guid), as: :json
expect(response.status).to eq(404)
expect(response.body).to include('ResourceNotFound')
end
end
context 'when the user can read but not write to the space' do
before do
allow_user_read_access_for(user, spaces: [space])
disallow_user_write_access(user, space: space)
end
it 'returns a 403' do
post :upload, params: params.merge(guid: package.guid), as: :json
expect(response.status).to eq(403)
expect(response.body).to include('NotAuthorized')
end
end
end
end
describe '#download' do
let(:package) { VCAP::CloudController::PackageModel.make(state: 'READY') }
let(:space) { package.space }
let(:org) { space.organization }
let(:user) { set_current_user(VCAP::CloudController::User.make, email: 'utako') }
before do
blob = instance_double(CloudController::Blobstore::FogBlob, public_download_url: 'http://package.example.com')
allow_any_instance_of(CloudController::Blobstore::Client).to receive(:blob).and_return(blob)
allow_any_instance_of(CloudController::Blobstore::Client).to receive(:local?).and_return(false)
allow_user_read_access_for(user, spaces: [space])
allow_user_secret_access(user, space: space)
end
it 'returns 302 and the redirect' do
get :download, params: { guid: package.guid }
expect(response.status).to eq(302)
expect(response.headers['Location']).to eq('http://package.example.com')
end
context 'when the package is not of type bits' do
before do
package.type = 'docker'
package.save
end
it 'returns 422' do
get :download, params: { guid: package.guid }
expect(response.status).to eq(422)
expect(response.body).to include('UnprocessableEntity')
end
end
context 'when the package has no bits' do
before do
package.state = VCAP::CloudController::PackageModel::CREATED_STATE
package.save
end
it 'returns 422' do
get :download, params: { guid: package.guid }
expect(response.status).to eq(422)
expect(response.body).to include('UnprocessableEntity')
end
end
context 'when the package cannot be found' do
it 'returns 404' do
get :download, params: { guid: 'a-bogus-guid' }
expect(response.status).to eq(404)
expect(response.body).to include('ResourceNotFound')
end
end
context 'permissions' do
context 'user does not have read scope' do
before do
set_current_user(VCAP::CloudController::User.make, scopes: ['cloud_controller.write'])
end
it 'returns an Unauthorized error' do
get :download, params: { guid: package.guid }
expect(response.status).to eq(403)
expect(response.body).to include('NotAuthorized')
end
end
context 'user does not have package read permissions' do
before do
disallow_user_read_access(user, space: space)
end
it 'returns 404' do
get :download, params: { guid: package.guid }
expect(response.status).to eq(404)
expect(response.body).to include('ResourceNotFound')
end
end
context 'user does not have package secrets permissions' do
before do
disallow_user_secret_access(user, space: space)
end
it 'returns 403' do
get :download, params: { guid: package.guid }
expect(response.status).to eq(403)
expect(response.body).to include('NotAuthorized')
end
end
end
end
describe '#show' do
let(:package) { VCAP::CloudController::PackageModel.make }
let(:space) { package.space }
let(:user) { set_current_user(VCAP::CloudController::User.make) }
before do
allow_user_read_access_for(user, spaces: [space])
disallow_user_write_access(user, space: space)
end
it 'returns a 200 OK and the package' do
get :show, params: { guid: package.guid }
expect(response.status).to eq(200)
expect(MultiJson.load(response.body)['guid']).to eq(package.guid)
end
context 'when the package does not exist' do
it 'returns a 404 Not Found' do
get :show, params: { guid: 'made-up-guid' }
expect(response.status).to eq(404)
expect(response.body).to include('ResourceNotFound')
end
end
context 'permissions' do
context 'when the user does not have the read scope' do
before do
set_current_user(user, scopes: ['cloud_controller.write'])
end
it 'returns a 403 NotAuthorized error' do
get :show, params: { guid: package.guid }
expect(response.status).to eq(403)
expect(response.body).to include('NotAuthorized')
end
end
context 'when the user can not read from the space' do
before do
disallow_user_read_access(user, space: space)
end
it 'returns a 404 not found' do
get :show, params: { guid: package.guid }
expect(response.status).to eq(404)
expect(response.body).to include('ResourceNotFound')
end
end
context 'when the bits service is enabled' do
let(:bits_service_double) { double('bits_service') }
let(:blob_double) { double('blob') }
let(:bits_service_public_upload_url) { "https://some.public/signed/url/to/upload/package#{package.guid}" }
before do
VCAP::CloudController::Config.config.set(:bits_service, { enabled: true })
allow_any_instance_of(CloudController::DependencyLocator).to receive(:package_blobstore).
and_return(bits_service_double)
allow(bits_service_double).to receive(:blob).and_return(blob_double)
allow(blob_double).to receive(:public_upload_url).and_return(bits_service_public_upload_url)
end
context 'when the user can write to the space' do
before do
allow_user_write_access(user, space: space)
end
it 'returns a bits service upload link' do
get :show, params: { guid: package.guid }
expect(response.status).to eq(200)
expect(MultiJson.load(response.body)['links']['upload']['href']).to eq(bits_service_public_upload_url)
end
end
context 'when the user can NOT write to the space' do
before do
disallow_user_write_access(user, space: space)
end
it 'does not return a bits service upload link' do
get :show, params: { guid: package.guid }
expect(response.status).to eq(200)
expect(MultiJson.load(response.body)['links']['upload']).to be_nil
end
end
end
end
end
describe '#update' do
let!(:org) { VCAP::CloudController::Organization.make(name: "Harold's Farm") }
let!(:space) { VCAP::CloudController::Space.make(name: 'roosters', organization: org) }
let(:app_model) { VCAP::CloudController::AppModel.make(name: 'needed to put the package in the space', space: space) }
let(:package) { VCAP::CloudController::PackageModel.make(app: app_model) }
let(:user) { set_current_user(VCAP::CloudController::User.make) }
let(:labels) do
{
fruit: 'pears',
truck: 'hino'
}
end
let(:annotations) do
{
potato: 'celandine',
beet: 'formanova',
}
end
let!(:update_message) do
{
metadata: {
labels: {
fruit: 'passionfruit'
},
annotations: {
potato: 'adora'
}
}
}
end
before do
VCAP::CloudController::LabelsUpdate.update(package, labels, VCAP::CloudController::PackageLabelModel)
VCAP::CloudController::AnnotationsUpdate.update(package, annotations, VCAP::CloudController::PackageAnnotationModel)
end
context 'when the user is an admin' do
before do
set_current_user_as_admin
end
it 'updates the package' do
patch :update, params: { guid: package.guid }.merge(update_message), as: :json
expect(response.status).to eq(200)
expect(parsed_body['metadata']['labels']).to eq({ 'fruit' => 'passionfruit', 'truck' => 'hino' })
expect(parsed_body['metadata']['annotations']).to eq({ 'potato' => 'adora', 'beet' => 'formanova' })
package.reload
expect(package.labels.map { |label| { key: label.key_name, value: label.value } }).
to match_array([{ key: 'fruit', value: 'passionfruit' }, { key: 'truck', value: 'hino' }])
expect(package.annotations.map { |a| { key: a.key, value: a.value } }).
to match_array([{ key: 'potato', value: 'adora' }, { key: 'beet', value: 'formanova' }])
end
context 'when a label is deleted' do
let(:request_body) do
{
metadata: {
labels: {
fruit: nil
}
}
}
end
it 'succeeds' do
patch :update, params: { guid: package.guid }.merge(request_body), as: :json
expect(response.status).to eq(200)
expect(parsed_body['metadata']['labels']).to eq({ 'truck' => 'hino' })
expect(package.labels.map { |label| { key: label.key_name, value: label.value } }).to match_array([{ key: 'truck', value: 'hino' }])
end
end
context 'when an empty request is sent' do
let(:request_body) do
{}
end
it 'succeeds' do
patch :update, params: { guid: package.guid }.merge(request_body), as: :json
expect(response.status).to eq(200)
package.reload
expect(parsed_body['guid']).to eq(package.guid)
end
end
context 'when the message is invalid' do
before do
set_current_user_as_admin
end
let!(:update_message2) { update_message.merge({ animals: 'Cows' }) }
it 'fails' do
patch :update, params: { guid: package.guid }.merge(update_message2), as: :json
expect(response.status).to eq(422)
end
end
context 'when there is no such package' do
it 'fails' do
patch :update, params: { guid: "Greg's missing package" }.merge(update_message), as: :json
expect(response.status).to eq(404)
end
end
context 'when there is an invalid label' do
let(:request_body) do
{
metadata: {
labels: {
'cloudfoundry.org/label': 'value'
}
}
}
end
it 'displays an informative error' do
patch :update, params: { guid: package.guid }.merge(request_body), as: :json
expect(response.status).to eq(422)
expect(response).to have_error_message(/label [\w\s]+ error/)
end
end
context 'when there is an invalid annotation' do
let(:request_body) do
{
metadata: {
annotations: {
key: 'big' * 5000
}
}
}
end
it 'displays an informative error' do
patch :update, params: { guid: package.guid }.merge(request_body), as: :json
expect(response.status).to eq(422)
expect(response).to have_error_message(/is greater than 5000 characters/)
end
end
context 'when there are too many annotations' do
let(:request_body) do
{
metadata: {
annotations: {
radish: 'daikon',
potato: 'idaho'
}
}
}
end
before do
VCAP::CloudController::Config.config.set(:max_annotations_per_resource, 2)
end
it 'fails with a 422' do
patch :update, params: { guid: package.guid }.merge(request_body), as: :json
expect(response.status).to eq(422)
expect(response).to have_error_message(/exceed maximum of 2/)
end
end
context 'when an annotation is deleted' do
let(:request_body) do
{
metadata: {
annotations: {
potato: nil
}
}
}
end
it 'succeeds' do
patch :update, params: { guid: package.guid }.merge(request_body), as: :json
expect(response.status).to eq(200)
expect(parsed_body['metadata']['annotations']).to eq({ 'beet' => 'formanova' })
package.reload
expect(package.annotations.map { |a| { key: a.key, value: a.value } }).to match_array([{ key: 'beet', value: 'formanova' }])
end
end
end
describe 'authorization' do
it_behaves_like 'permissions endpoint' do
let(:roles_to_http_responses) do
{
'admin' => 200,
'admin_read_only' => 403,
'global_auditor' => 403,
'space_developer' => 200,
'space_manager' => 403,
'space_auditor' => 403,
'org_manager' => 403,
'org_auditor' => 404,
'org_billing_manager' => 404,
}
end
let(:api_call) { lambda { patch :update, params: { guid: package.guid }.merge(update_message), as: :json } }
end
context 'when the bits service is enabled' do
let(:bits_service_double) { double('bits_service') }
let(:blob_double) { double('blob') }
let(:bits_service_public_upload_url) { "https://some.public/signed/url/to/upload/package#{package.guid}" }
let(:user) { set_current_user(VCAP::CloudController::User.make) }
before do
VCAP::CloudController::Config.config.set(:bits_service, { enabled: true })
allow_any_instance_of(CloudController::DependencyLocator).to receive(:package_blobstore).
and_return(bits_service_double)
allow(bits_service_double).to receive(:blob).and_return(blob_double)
allow(blob_double).to receive(:public_upload_url).and_return(bits_service_public_upload_url)
allow_user_read_access_for(user, orgs: [org], spaces: [space])
end
context 'when the user can write to the space' do
before do
allow_user_write_access(user, space: space)
end
it 'returns a bits service upload link' do
patch :update, params: { guid: package.guid }.merge(update_message), as: :json
expect(response.status).to eq(200)
expect(MultiJson.load(response.body)['links']['upload']['href']).to eq(bits_service_public_upload_url)
end
end
end
context 'permissions' do
let(:user) { set_current_user(VCAP::CloudController::User.make) }
context 'when the user cannot read the app' do
before do
disallow_user_read_access(user, space: space)
end
it 'returns a 404 ResourceNotFound error' do
patch :update, params: { guid: app_model.guid }.merge(update_message), as: :json
expect(response.status).to eq 404
expect(response.body).to include 'ResourceNotFound'
end
end
context 'when the user can read but cannot write to the app' do
before do
allow_user_read_access_for(user, spaces: [space])
disallow_user_write_access(user, space: space)
end
it 'raises ApiError NotAuthorized' do
patch :update, params: { guid: package.guid }.merge(update_message), as: :json
expect(response.status).to eq 403
expect(response.body).to include 'NotAuthorized'
end
end
end
end
end
describe '#destroy' do
let(:package) { VCAP::CloudController::PackageModel.make }
let(:user) { set_current_user(VCAP::CloudController::User.make) }
let(:space) { package.space }
let(:package_delete_stub) { instance_double(VCAP::CloudController::PackageDelete) }
before do
allow_user_read_access_for(user, spaces: [space])
allow_user_write_access(user, space: space)
allow(VCAP::CloudController::Jobs::DeleteActionJob).to receive(:new).and_call_original
allow(VCAP::CloudController::PackageDelete).to receive(:new).and_return(package_delete_stub)
end
context 'when the package does not exist' do
it 'returns a 404 Not Found' do
delete :destroy, params: { guid: 'nono' }
expect(response.status).to eq(404)
expect(response.body).to include('ResourceNotFound')
end
end
context 'permissions' do
context 'when the user does not have write scope' do
before do
set_current_user(user, scopes: ['cloud_controller.read'])
end
it 'returns an Unauthorized error' do
delete :destroy, params: { guid: package.guid }
expect(response.status).to eq(403)
expect(response.body).to include('NotAuthorized')
end
end
context 'when the user cannot read the package' do
before do
disallow_user_read_access(user, space: space)
end
it 'returns a 404 ResourceNotFound error' do
delete :destroy, params: { guid: package.guid }
expect(response.status).to eq(404)
expect(response.body).to include('ResourceNotFound')
end
end
context 'when the user can read but cannot write to the package' do
before do
allow_user_read_access_for(user, spaces: [space])
disallow_user_write_access(user, space: space)
end
it 'raises ApiError NotAuthorized' do
delete :destroy, params: { guid: package.guid }
expect(response.status).to eq(403)
expect(response.body).to include('NotAuthorized')
end
end
end
it 'successfully deletes the package in a background job' do
delete :destroy, params: { guid: package.guid }
package_delete_jobs = Delayed::Job.where(Sequel.lit("handler like '%PackageDelete%'"))
expect(package_delete_jobs.count).to eq 1
package_delete_jobs.first
expect(VCAP::CloudController::PackageModel.find(guid: package.guid)).not_to be_nil
expect(VCAP::CloudController::Jobs::DeleteActionJob).to have_received(:new).with(
VCAP::CloudController::PackageModel,
package.guid,
package_delete_stub,
)
end
it 'creates a job to track the deletion and returns it in the location header' do
expect {
delete :destroy, params: { guid: package.guid }
}.to change {
VCAP::CloudController::PollableJobModel.count
}.by(1)
job = VCAP::CloudController::PollableJobModel.last
enqueued_job = Delayed::Job.last
expect(job.delayed_job_guid).to eq(enqueued_job.guid)
expect(job.operation).to eq('package.delete')
expect(job.state).to eq('PROCESSING')
expect(job.resource_guid).to eq(package.guid)
expect(job.resource_type).to eq('package')
expect(response.status).to eq(202)
expect(response.headers['Location']).to include "#{link_prefix}/v3/jobs/#{job.guid}"
end
end
describe '#index' do
let(:user) { set_current_user(VCAP::CloudController::User.make) }
let(:app_model) { VCAP::CloudController::AppModel.make }
let(:space) { app_model.space }
let(:space1) { VCAP::CloudController::Space.make }
let(:space2) { VCAP::CloudController::Space.make }
let(:space3) { VCAP::CloudController::Space.make }
let(:user_spaces) { [space, space1, space2, space3] }
let!(:user_package_1) { VCAP::CloudController::PackageModel.make(app_guid: app_model.guid) }
let!(:user_package_2) { VCAP::CloudController::PackageModel.make(app_guid: app_model.guid) }
let!(:admin_package) { VCAP::CloudController::PackageModel.make }
before do
allow_user_read_access_for(user, spaces: user_spaces)
end
it 'returns 200' do
get :index
expect(response.status).to eq(200)
end
it 'lists the packages visible to the user' do
get :index
response_guids = parsed_body['resources'].map { |r| r['guid'] }
expect(response_guids).to match_array([user_package_1, user_package_2].map(&:guid))
end
it 'returns pagination links for /v3/packages' do
get :index
expect(parsed_body['pagination']['first']['href']).to start_with("#{link_prefix}/v3/packages")
end
context 'when accessed as an app subresource' do
it 'uses the app as a filter' do
app = VCAP::CloudController::AppModel.make(space: space)
package_1 = VCAP::CloudController::PackageModel.make(app_guid: app.guid)
package_2 = VCAP::CloudController::PackageModel.make(app_guid: app.guid)
VCAP::CloudController::PackageModel.make
get :index, params: { app_guid: app.guid }
expect(response.status).to eq(200)
response_guids = parsed_body['resources'].map { |r| r['guid'] }
expect(response_guids).to match_array([package_1.guid, package_2.guid])
end
it "doesn't allow filtering on space_guids in a nested query" do
app = VCAP::CloudController::AppModel.make(space: space, guid: 'speshal-app-guid')
get :index, params: { app_guid: app.guid, page: 1, per_page: 10, states: 'AWAITING_UPLOAD',
space_guids: user_spaces.map(&:guid).join(',') }
expect(response.status).to eq(400)
expect(response.body).to include("Unknown query parameter(s): \'space_guids\'")
end
it 'uses the app and pagination as query parameters' do
app = VCAP::CloudController::AppModel.make(space: space, guid: 'speshal-app-guid')
package_1 = VCAP::CloudController::PackageModel.make(app_guid: app.guid, guid: 'package-1')
package_2 = VCAP::CloudController::PackageModel.make(app_guid: app.guid, guid: 'package-2')
VCAP::CloudController::PackageModel.make
get :index, params: { app_guids: app.guid, page: 1, per_page: 10, states: 'AWAITING_UPLOAD', }
expect(response.status).to eq(200)
response_guids = parsed_body['resources'].map { |r| r['guid'] }
expect(response_guids).to match_array([package_1.guid, package_2.guid])
end
it 'provides the correct base url in the pagination links' do
get :index, params: { app_guid: app_model.guid }
expect(parsed_body['pagination']['first']['href']).to include("#{link_prefix}/v3/apps/#{app_model.guid}/packages")
end
context 'the app does not exist' do
it 'returns a 404 Resource Not Found' do
get :index, params: { app_guid: 'hello-i-do-not-exist' }
expect(response.status).to eq 404
expect(response.body).to include 'ResourceNotFound'
end
end
context 'when the user does not have permissions to read the app' do
before do
disallow_user_read_access(user, space: space)
end
it 'returns a 404 Resource Not Found error' do
get :index, params: { app_guid: app_model.guid }
expect(response.body).to include 'ResourceNotFound'
expect(response.status).to eq 404
end
end
end
context 'when the user has global read access' do
before do
allow_user_global_read_access(user)
end
it 'lists all the packages' do
get :index
response_guids = parsed_body['resources'].map { |r| r['guid'] }
expect(response_guids).to match_array([user_package_1, user_package_2, admin_package].map(&:guid))
end
end
context 'when pagination options are specified' do
let(:page) { 1 }
let(:per_page) { 1 }
let(:params) { { 'page' => page, 'per_page' => per_page } }
it 'paginates the response' do
get :index, params: params
parsed_response = parsed_body
response_guids = parsed_response['resources'].map { |r| r['guid'] }
expect(parsed_response['pagination']['total_results']).to eq(2)
expect(response_guids.length).to eq(per_page)
end
end
context 'when parameters are invalid' do
context 'because there are unknown parameters' do
let(:params) { { 'invalid' => 'thing', 'bad' => 'stuff' } }
it 'returns an 400 Bad Request' do
get :index, params: params
expect(response.status).to eq(400)
expect(response.body).to include('BadQueryParameter')
m = /Unknown query parameter\(s\): '(\w+)', '(\w+)'/.match(response.body)
expect(m).not_to be_nil
expect([m[1], m[2]]).to match_array(%w/bad invalid/)
end
end
context 'because there are invalid values in parameters' do
let(:params) { { 'per_page' => 9999999999 } }
it 'returns an 400 Bad Request' do
get :index, params: params
expect(response.status).to eq(400)
expect(response.body).to include('BadQueryParameter')
expect(response.body).to include('Per page must be between')
end
end
end
context 'permissions' do
context 'when the user can read but not write to the space' do
it 'returns a 200 OK' do
get :index
expect(response.status).to eq(200)
end
end
context 'when the user does not have the read scope' do
before do
set_current_user(VCAP::CloudController::User.make, scopes: [])
end
it 'returns a 403 NotAuthorized error' do
get :index
expect(response.status).to eq(403)
expect(response.body).to include('NotAuthorized')
end
end
end
end
describe '#create' do
context 'when creating a new package' do
let(:app_model) { VCAP::CloudController::AppModel.make }
let(:app_guid) { app_model.guid }
let(:space) { app_model.space }
let(:org) { space.organization }
let(:request_body) do
{
type: 'bits',
relationships: { app: { data: { guid: app_guid } } }
}
end
let(:user) { set_current_user(VCAP::CloudController::User.make) }
before do
allow_user_read_access_for(user, spaces: [space])
allow_user_write_access(user, space: space)
end
context 'bits' do
it 'returns a 201 and the package' do
expect(app_model.packages.count).to eq(0)
post :create, params: request_body, as: :json
expect(response.status).to eq 201
expect(app_model.reload.packages.count).to eq(1)
created_package = app_model.packages.first
response_guid = parsed_body['guid']
expect(response_guid).to eq created_package.guid
end
context 'with an invalid type field' do
let(:request_body) do
{
type: 'ninja',
relationships: { app: { data: { guid: app_model.guid } } }
}
end
it 'returns an UnprocessableEntity error' do
post :create, params: request_body, as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
expect(response.body).to include "must be one of 'bits, docker'"
end
end
context 'when the app does not exist' do
let(:app_guid) { 'bogus-guid' }
it 'returns a 422 UnprocessableEntity error' do
post :create, params: request_body, as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
end
end
context 'when the package is invalid' do
before do
allow(VCAP::CloudController::PackageCreate).to receive(:create).and_raise(VCAP::CloudController::PackageCreate::InvalidPackage.new('err'))
end
it 'returns 422' do
post :create, params: { app_guid: app_model.guid }.merge(request_body), as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
end
end
context 'when the bits service is enabled' do
let(:bits_service_double) { double('bits_service') }
let(:blob_double) { double('blob') }
let(:bits_service_public_upload_url) { 'https://some.public/signed/url/to/upload/package' }
before do
VCAP::CloudController::Config.config.set(:bits_service, { enabled: true })
allow_any_instance_of(CloudController::DependencyLocator).to receive(:package_blobstore).
and_return(bits_service_double)
allow(bits_service_double).to receive(:blob).and_return(blob_double)
allow(blob_double).to receive(:public_upload_url).and_return(bits_service_public_upload_url)
end
context 'when the user can write to the space' do
it 'returns a bits service upload link' do
post :create, params: request_body, as: :json
expect(response.status).to eq(201)
expect(MultiJson.load(response.body)['links']['upload']['href']).to match(bits_service_public_upload_url)
end
end
end
context 'when the existing app is a Docker app' do
let(:app_model) { VCAP::CloudController::AppModel.make(:docker) }
it 'returns 422' do
post :create, params: request_body, as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
expect(response).to have_error_message('Cannot create bits package for a Docker app.')
end
end
context 'permissions' do
context 'when the user does not have write scope' do
before do
set_current_user(user, scopes: ['cloud_controller.read'])
end
it 'returns a 403 NotAuthorized error' do
post :create, params: { app_guid: app_model.guid }.merge(request_body), as: :json
expect(response.status).to eq 403
expect(response.body).to include 'NotAuthorized'
end
end
context 'when the user cannot read the app' do
before do
disallow_user_read_access(user, space: space)
end
it 'returns a 422 UnprocessableEntity error' do
post :create, params: { app_guid: app_model.guid }.merge(request_body), as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
end
end
context 'when the user can read but not write to the space' do
before do
disallow_user_write_access(user, space: space)
end
it 'returns a 422 UnprocessableEntity error' do
post :create, params: { app_guid: app_model.guid }.merge(request_body), as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
end
end
end
end
context 'docker' do
let(:app_model) { VCAP::CloudController::AppModel.make(:docker) }
let(:image) { 'registry/image:latest' }
let(:docker_username) { 'naruto' }
let(:docker_password) { 'oturan' }
let(:request_body) do
{
relationships: { app: { data: { guid: app_model.guid } } },
type: 'docker',
data: {
image: image,
username: docker_username,
password: docker_password
}
}
end
it 'returns a 201' do
expect(app_model.packages.count).to eq(0)
post :create, params: request_body, as: :json
expect(response.status).to eq 201
app_model.reload
package = app_model.packages.first
expect(package.type).to eq('docker')
expect(package.image).to eq('registry/image:latest')
expect(package.docker_username).to eq(docker_username)
expect(package.docker_password).to eq(docker_password)
end
context 'when the existing app is a buildpack app' do
let(:app_model) { VCAP::CloudController::AppModel.make }
it 'returns 422' do
post :create, params: request_body, as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
expect(response).to have_error_message('Cannot create Docker package for a buildpack app.')
end
end
end
context 'with metadata' do
let(:metadata_request_body) { request_body.merge(metadata) }
context 'when the label is invalid' do
let(:metadata) do
{
metadata: {
labels: {
'cloudfoundry.org/release' => 'stable'
}
}
}
end
it 'returns an UnprocessableEntity error' do
post :create, params: metadata_request_body, as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
expect(response).to have_error_message(/label [\w\s]+ error/)
end
end
context 'when the annotation is invalid' do
let(:metadata) do
{
metadata: {
annotations: {
'' => 'stable'
}
}
}
end
it 'returns an UnprocessableEntity error' do
post :create, params: metadata_request_body, as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
expect(response).to have_error_message(/annotation [\w\s]+ error/)
end
end
context 'when the metadata is valid' do
let(:metadata) do
{
metadata: {
labels: {
'release' => 'stable'
},
annotations: {
'notes' => 'detailed information'
}
}
}
end
it 'Returns a 201 and the app with metadata' do
post :create, params: metadata_request_body, as: :json
response_body = parsed_body
response_metadata = response_body['metadata']
expect(response.status).to eq(201)
expect(response_metadata['labels']['release']).to eq 'stable'
expect(response_metadata['annotations']['notes']).to eq 'detailed information'
end
end
end
end
context 'when copying an existing package' do
let(:source_app_model) { VCAP::CloudController::AppModel.make }
let(:original_package) { VCAP::CloudController::PackageModel.make(type: 'bits', app_guid: source_app_model.guid) }
let(:target_app_model) { VCAP::CloudController::AppModel.make }
let(:user) { set_current_user(VCAP::CloudController::User.make) }
let(:source_space) { source_app_model.space }
let(:destination_space) { target_app_model.space }
let(:relationship_request_body) { { relationships: { app: { data: { guid: target_app_model.guid } } } } }
before do
allow_user_read_access_for(user, spaces: [source_space, destination_space])
allow_user_write_access(user, space: source_space)
allow_user_write_access(user, space: destination_space)
end
it 'returns a 201 and the response' do
expect(target_app_model.packages.count).to eq(0)
post :create, params: { source_guid: original_package.guid }.merge(relationship_request_body), as: :json
copied_package = target_app_model.reload.packages.first
response_guid = parsed_body['guid']
expect(response.status).to eq 201
expect(copied_package.type).to eq(original_package.type)
expect(response_guid).to eq copied_package.guid
end
context 'when the bits service is enabled' do
let(:bits_service_double) { double('bits_service') }
let(:blob_double) { double('blob') }
let(:bits_service_public_upload_url) { 'https://some.public/signed/url/to/upload/package' }
before do
VCAP::CloudController::Config.config.set(:bits_service, { enabled: true })
allow_any_instance_of(CloudController::DependencyLocator).to receive(:package_blobstore).
and_return(bits_service_double)
allow(bits_service_double).to receive(:blob).and_return(blob_double)
allow(blob_double).to receive(:public_upload_url).and_return(bits_service_public_upload_url)
end
context 'when the user can write to the space' do
it 'returns a bits service upload link' do
post :create, params: { source_guid: original_package.guid }.merge(relationship_request_body), as: :json
expect(response.status).to eq(201)
expect(MultiJson.load(response.body)['links']['upload']['href']).to match(bits_service_public_upload_url)
end
end
end
context 'permissions' do
context 'when the user does not have write scope' do
before do
set_current_user(VCAP::CloudController::User.make, scopes: ['cloud_controller.read'])
end
it 'returns a 403 NotAuthorized error' do
post :create, params: { source_guid: original_package.guid }.merge(relationship_request_body), as: :json
expect(response.status).to eq 403
expect(response.body).to include 'NotAuthorized'
end
end
context 'when the user cannot read the source package' do
before do
disallow_user_read_access(user, space: source_space)
end
it 'returns a 422 UnprocessableEntity error' do
post :create, params: { source_guid: original_package.guid }.merge(relationship_request_body), as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
end
end
context 'when the user cannot modify the source target_app' do
before do
allow_user_read_access_for(user, spaces: [source_space, destination_space])
disallow_user_write_access(user, space: source_space)
end
it 'returns a 422 UnprocessableEntity error' do
post :create, params: { source_guid: original_package.guid }.merge(relationship_request_body), as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
end
end
context 'when the user cannot read the target app' do
before do
disallow_user_read_access(user, space: destination_space)
end
it 'returns a 422 UnprocessableEntity error' do
post :create, params: { source_guid: original_package.guid }.merge(relationship_request_body), as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
end
end
context 'when the user cannot create the package' do
before do
allow_user_read_access_for(user, spaces: [destination_space])
disallow_user_write_access(user, space: destination_space)
end
it 'returns a 422 UnprocessableEntity error' do
post :create, params: { source_guid: original_package.guid }.merge(relationship_request_body), as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
end
end
end
context 'when the source package does not exist' do
it 'returns a 422 UnprocessableEntity error' do
post :create, params: { source_guid: 'bogus package guid' }.merge(relationship_request_body), as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
end
end
context 'when the target target_app does not exist' do
let(:relationship_request_body) { { relationships: { app: { data: { guid: 'bogus' } } } } }
it 'returns a 422 UnprocessableEntity error' do
post :create, params: { source_guid: original_package.guid }.merge(relationship_request_body), as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
end
end
context 'when the package is invalid' do
before do
allow_any_instance_of(VCAP::CloudController::PackageCopy).to receive(:copy).and_raise(VCAP::CloudController::PackageCopy::InvalidPackage.new('ruh roh'))
end
it 'returns 422' do
post :create, params: { source_guid: original_package.guid }.merge(relationship_request_body), as: :json
expect(response.status).to eq 422
expect(response.body).to include 'UnprocessableEntity'
expect(response.body).to include 'ruh roh'
end
end
end
end
end
| 34.935574 | 168 | 0.612993 |
ab147130c0c2233f21344ab40454ebd6d1338f4b | 1,327 | # frozen_string_literal: true
# The MIT License (MIT)
#
# Copyright <YEAR> <COPYRIGHT HOLDER>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Auto-generated by gapic-generator-ruby v0.0.1.dev.1. DO NOT EDIT!
require "google/showcase/v1beta1/identity/client"
require "google/showcase/v1beta1/identity/credentials"
| 45.758621 | 79 | 0.779201 |
ab5b0a393808caabdff263e07e42119cd66e5b70 | 927 | module Spree
module Api
UsersController.class_eval do
before_action :authenticate_user, :except => [:sign_up, :sign_in]
def sign_up
@user = Spree::User.find_by_email(params[:user][:email])
if @user.present?
render "spree/api/users/user_exists", :status => 401 and return
end
@user = Spree::User.new(user_params)
if [email protected]
unauthorized
return
end
@user.generate_spree_api_key!
end
def sign_in
@user = Spree::User.find_by_email(params[:user][:email])
if [email protected]? || [email protected]_password?(params[:user][:password])
unauthorized
return
end
@user.generate_spree_api_key! if @user.spree_api_key.blank?
end
def user_params
params.require(:user).permit(:email, :password, :password_confirmation)
end
end
end
end
| 22.609756 | 79 | 0.599784 |
62e5c8ad308ba89d4bba284e09fb0f7bf1dcba54 | 2,014 | class Pastebinit < Formula
desc "Send things to pastebin from the command-line"
homepage "https://launchpad.net/pastebinit"
url "https://launchpad.net/pastebinit/trunk/1.5/+download/pastebinit-1.5.tar.gz"
sha256 "0d931dddb3744ed38aa2d319dd2d8a2f38a391011ff99db68ce7c83ab8f5b62f"
license "GPL-2.0"
revision 3
livecheck do
url :stable
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "90c20fef3e5c3e0944fadf42e45692288edb5e5ee241a4d936fe509c2e8ec16d"
sha256 cellar: :any_skip_relocation, big_sur: "43c42eb708a8452001802163a22e637ff7685c1e9fbd72b58102a68ccdffaf52"
sha256 cellar: :any_skip_relocation, catalina: "f24d4dbd9723f5726c7786af82cd16df86485ea3ae075906531f82d0544ec688"
sha256 cellar: :any_skip_relocation, mojave: "d2195934de64bf7814790b59d2429b90cb58e492f13f08430958b82ec3bd652d"
sha256 cellar: :any_skip_relocation, high_sierra: "4ca0432c7652ab49ee0f61823335d0e0ea70caaf220f4654291406dcb425cd23"
sha256 cellar: :any_skip_relocation, x86_64_linux: "e2b65d2fe25cf3a5d8ed4f1ed5687f531c6a90b164567ccaa89d0f0d78fc3370"
end
depends_on "docbook2x" => :build if OS.mac? # broken on linux
depends_on "[email protected]"
# Remove for next release
patch do
url "https://github.com/lubuntu-team/pastebinit/commit/ab05aa431a6bf76b28586ad97c98069b8de5e46a.patch?full_index=1"
sha256 "1abd0ec274cf0952a371e6738fcd3ece67bb9a4dd52f997296cd107f035f5690"
end
def install
inreplace "pastebinit" do |s|
s.gsub! "/usr/bin/python3", Formula["[email protected]"].opt_bin/"python3"
s.gsub! "/usr/local/etc/pastebin.d", etc/"pastebin.d"
end
system "docbook2man", "pastebinit.xml" if OS.mac?
bin.install "pastebinit"
etc.install "pastebin.d"
man1.install "PASTEBINIT.1" => "pastebinit.1" if OS.mac?
libexec.install %w[po utils]
end
test do
url = pipe_output("#{bin}/pastebinit -a test -b paste.ubuntu.com", "Hello, world!").chomp
assert_match "://paste.ubuntu.com/", url
end
end
| 41.102041 | 122 | 0.763654 |
ac910e6a50d819b3e5809b583ab5d1264cf8455c | 1,509 | # frozen_string_literal: true
ENV['RAILS_ENV'] ||= 'test'
$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "dotenv/load"
require "jsonb_accessor"
require "pry"
require "pry-nav"
require "pry-doc"
require "awesome_print"
require "database_cleaner"
require "yaml"
class StaticProduct < ActiveRecord::Base
self.table_name = "products"
belongs_to :product_category
end
class Product < StaticProduct
jsonb_accessor :options, title: :string, rank: :integer, made_at: :datetime
end
class ProductCategory < ActiveRecord::Base
jsonb_accessor :options, title: :string
has_many :products
end
RSpec::Matchers.define :attr_accessorize do |attribute_name|
match do |actual|
actual.respond_to?(attribute_name) && actual.respond_to?("#{attribute_name}=")
end
end
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.filter_run :focus
config.run_all_when_everything_filtered = true
config.disable_monkey_patching!
config.default_formatter = "doc" if config.files_to_run.one?
config.profile_examples = 0
config.order = :random
Kernel.srand config.seed
config.before :suite do
dbconfig = YAML.load(File.open("db/config.yml"))
ActiveRecord::Base.establish_connection(dbconfig["test"])
end
config.before do
DatabaseCleaner.clean_with(:truncation)
end
end
| 24.737705 | 82 | 0.762757 |
4a22004d4bf72609a9a7e1cb2e4545aca217afc9 | 4,415 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Shell
module Commands
class Grant < Command
def help
return <<-EOF
Grant users specific rights.
Syntax: grant <user or @group>, <permissions> [, <table> [, <column family> [, <column qualifier>]]]
Syntax: grant <user or @group>, <permissions>, <@namespace>
permissions is either zero or more letters from the set "RWXCA".
READ('R'), WRITE('W'), EXEC('X'), CREATE('C'), ADMIN('A')
Note: Groups and users are granted access in the same way, but groups are prefixed with an '@'
character. Tables and namespaces are specified the same way, but namespaces are
prefixed with an '@' character.
For example:
hbase> grant 'bobsmith', 'RWXCA'
hbase> grant '@admins', 'RWXCA'
hbase> grant 'bobsmith', 'RWXCA', '@ns1'
hbase> grant 'bobsmith', 'RW', 't1', 'f1', 'col1'
hbase> grant 'bobsmith', 'RW', 'ns1:t1', 'f1', 'col1'
EOF
end
def command(*args)
# command form is ambiguous at first argument
table_name = user = args[0]
raise(ArgumentError, "First argument should be a String") unless user.kind_of?(String)
if args[1].kind_of?(String)
# Original form of the command
# user in args[0]
# permissions in args[1]
# table_name in args[2]
# family in args[3] or nil
# qualifier in args[4] or nil
permissions = args[1]
raise(ArgumentError, "Permissions are not of String type") unless permissions.kind_of?(
String)
table_name = family = qualifier = nil
table_name = args[2] # will be nil if unset
if not table_name.nil?
raise(ArgumentError, "Table name is not of String type") unless table_name.kind_of?(
String)
family = args[3] # will be nil if unset
if not family.nil?
raise(ArgumentError, "Family is not of String type") unless family.kind_of?(String)
qualifier = args[4] # will be nil if unset
if not qualifier.nil?
raise(ArgumentError, "Qualifier is not of String type") unless qualifier.kind_of?(
String)
end
end
end
format_simple_command do
security_admin.grant(user, permissions, table_name, family, qualifier)
end
elsif args[1].kind_of?(Hash)
# New form of the command, a cell ACL update
# table_name in args[0], a string
# a Hash mapping users (or groups) to permisisons in args[1]
# a Hash argument suitable for passing to Table#_get_scanner in args[2]
# Useful for feature testing and debugging.
permissions = args[1]
raise(ArgumentError, "Permissions are not of Hash type") unless permissions.kind_of?(Hash)
scan = args[2]
raise(ArgumentError, "Scanner specification is not a Hash") unless scan.kind_of?(Hash)
t = table(table_name)
now = Time.now
scanner = t._get_scanner(scan)
count = 0
iter = scanner.iterator
while iter.hasNext
row = iter.next
row.list.each do |cell|
put = org.apache.hadoop.hbase.client.Put.new(row.getRow)
put.add(cell)
t.set_cell_permissions(put, permissions)
t.table.put(put)
end
count += 1
end
formatter.footer(now, count)
else
raise(ArgumentError, "Second argument should be a String or Hash")
end
end
end
end
end
| 36.791667 | 100 | 0.616535 |
bb5c4bc9c5c2230ea43eee8178e0895c70943ae1 | 1,148 | describe "mail_shared/_email_reply.html.erb" do
it "renders 'Send a Comment' link with approve button" do
approval = create(:approval)
create(:api_token, step: approval)
proposal = approval.proposal
render(
partial: "mail_shared/call_to_action/email_reply",
locals: { show_step_actions: true, step: approval.decorate, proposal: proposal }
)
expect(rendered).to include approval.decorate.action_name
expect(rendered).to include I18n.t("mailer.view_or_modify_request_cta")
expect(rendered).not_to include I18n.t("mailer.view_request_cta")
end
it "renders 'View This Request' link without approve button" do
approval = create(:approval)
create(:api_token, step: approval)
proposal = approval.proposal
render(
partial: "mail_shared/call_to_action/email_reply",
locals: { show_step_actions: false, step: approval.decorate, proposal: proposal }
)
expect(rendered).not_to include approval.decorate.action_name
expect(rendered).not_to include I18n.t("mailer.view_or_modify_request_cta")
expect(rendered).to include I18n.t("mailer.view_request_cta")
end
end
| 38.266667 | 87 | 0.736934 |
6a50e2b7d160dd8d83ff846fce7c2cf372bb0a66 | 847 | class UsersController < ApplicationController
def search
search = User.search do
fulltext params[:q]
end
@users = search.results
render 'index'
end
def index
@users = User.all
end
def show
@user = User.find(params[:id])
end
def new
@user = User.new
end
def edit
@user = User.find(params[:id])
end
def create
@user = User.new(user_params)
if @user.save
redirect_to @user
else
render 'new'
end
end
def update
@user = User.find(params[:id])
if @user.update(user_params)
redirect_to @user
else
render 'edit'
end
end
def destroy
@user = User.find(params[:id])
@user.destroy
redirect_to users_path
end
private
def user_params
params.require(:user).permit(:name, :phone_number)
end
end
| 13.885246 | 56 | 0.606848 |
7adf3c8ee5414a5b72d865ca9d34d7af6df537e7 | 11,744 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/rest"
require "google/cloud/compute/v1/compute_small_pb"
require "google/cloud/compute/v1/region_operations"
class ::Google::Cloud::Compute::V1::RegionOperations::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_count, :requests
def initialize response, &block
@response = response
@block = block
@call_count = 0
@requests = []
end
def make_get_request uri:, params: {}, options: {}
make_http_request :get, uri: uri, body: nil, params: params, options: options
end
def make_delete_request uri:, params: {}, options: {}
make_http_request :delete, uri: uri, body: nil, params: params, options: options
end
def make_post_request uri:, body: nil, params: {}, options: {}
make_http_request :post, uri: uri, body: body, params: params, options: options
end
def make_patch_request uri:, body:, params: {}, options: {}
make_http_request :patch, uri: uri, body: body, params: params, options: options
end
def make_put_request uri:, body:, params: {}, options: {}
make_http_request :put, uri: uri, body: body, params: params, options: options
end
def make_http_request *args, **kwargs
@call_count += 1
@requests << @block&.call(*args, **kwargs)
@response
end
end
def test_delete
# Create test objects.
client_result = ::Google::Cloud::Compute::V1::DeleteRegionOperationResponse.new
http_response = OpenStruct.new body: client_result.to_json
call_options = {}
# Create request parameters for a unary method.
operation = "hello world"
project = "hello world"
region = "hello world"
delete_client_stub = ClientStub.new http_response do |verb, uri:, body:, params:, options:|
assert_equal :delete, verb
assert options.metadata.key? :"x-goog-api-client"
assert options.metadata[:"x-goog-api-client"].include? "rest"
refute options.metadata[:"x-goog-api-client"].include? "grpc"
assert_nil body
end
Gapic::Rest::ClientStub.stub :new, delete_client_stub do
# Create client
client = ::Google::Cloud::Compute::V1::RegionOperations::Rest::Client.new do |config|
config.credentials = :dummy_value
end
# Use hash object
client.delete({ operation: operation, project: project, region: region }) do |result, response|
assert_equal http_response, response
end
# Use named arguments
client.delete operation: operation, project: project, region: region do |result, response|
assert_equal http_response, response
end
# Use protobuf object
client.delete ::Google::Cloud::Compute::V1::DeleteRegionOperationRequest.new(operation: operation, project: project, region: region) do |result, response|
assert_equal http_response, response
end
# Use hash object with options
client.delete({ operation: operation, project: project, region: region }, call_options) do |result, response|
assert_equal http_response, response
end
# Use protobuf object with options
client.delete(::Google::Cloud::Compute::V1::DeleteRegionOperationRequest.new(operation: operation, project: project, region: region), call_options) do |result, response|
assert_equal http_response, response
end
# Verify method calls
assert_equal 5, delete_client_stub.call_count
end
end
def test_get
# Create test objects.
client_result = ::Google::Cloud::Compute::V1::Operation.new
http_response = OpenStruct.new body: client_result.to_json
call_options = {}
# Create request parameters for a unary method.
operation = "hello world"
project = "hello world"
region = "hello world"
get_client_stub = ClientStub.new http_response do |verb, uri:, body:, params:, options:|
assert_equal :get, verb
assert options.metadata.key? :"x-goog-api-client"
assert options.metadata[:"x-goog-api-client"].include? "rest"
refute options.metadata[:"x-goog-api-client"].include? "grpc"
assert_nil body
end
Gapic::Rest::ClientStub.stub :new, get_client_stub do
# Create client
client = ::Google::Cloud::Compute::V1::RegionOperations::Rest::Client.new do |config|
config.credentials = :dummy_value
end
# Use hash object
client.get({ operation: operation, project: project, region: region }) do |result, response|
assert_equal http_response, response
end
# Use named arguments
client.get operation: operation, project: project, region: region do |result, response|
assert_equal http_response, response
end
# Use protobuf object
client.get ::Google::Cloud::Compute::V1::GetRegionOperationRequest.new(operation: operation, project: project, region: region) do |result, response|
assert_equal http_response, response
end
# Use hash object with options
client.get({ operation: operation, project: project, region: region }, call_options) do |result, response|
assert_equal http_response, response
end
# Use protobuf object with options
client.get(::Google::Cloud::Compute::V1::GetRegionOperationRequest.new(operation: operation, project: project, region: region), call_options) do |result, response|
assert_equal http_response, response
end
# Verify method calls
assert_equal 5, get_client_stub.call_count
end
end
def test_list
# Create test objects.
client_result = ::Google::Cloud::Compute::V1::OperationList.new
http_response = OpenStruct.new body: client_result.to_json
call_options = {}
# Create request parameters for a unary method.
filter = "hello world"
max_results = 42
order_by = "hello world"
page_token = "hello world"
project = "hello world"
region = "hello world"
return_partial_success = true
list_client_stub = ClientStub.new http_response do |verb, uri:, body:, params:, options:|
assert_equal :get, verb
assert options.metadata.key? :"x-goog-api-client"
assert options.metadata[:"x-goog-api-client"].include? "rest"
refute options.metadata[:"x-goog-api-client"].include? "grpc"
assert params.key? "filter"
assert params.key? "maxResults"
assert params.key? "orderBy"
assert params.key? "pageToken"
assert params.key? "returnPartialSuccess"
assert_nil body
end
Gapic::Rest::ClientStub.stub :new, list_client_stub do
# Create client
client = ::Google::Cloud::Compute::V1::RegionOperations::Rest::Client.new do |config|
config.credentials = :dummy_value
end
# Use hash object
client.list({ filter: filter, max_results: max_results, order_by: order_by, page_token: page_token, project: project, region: region, return_partial_success: return_partial_success }) do |result, response|
assert_equal http_response, response
end
# Use named arguments
client.list filter: filter, max_results: max_results, order_by: order_by, page_token: page_token, project: project, region: region, return_partial_success: return_partial_success do |result, response|
assert_equal http_response, response
end
# Use protobuf object
client.list ::Google::Cloud::Compute::V1::ListRegionOperationsRequest.new(filter: filter, max_results: max_results, order_by: order_by, page_token: page_token, project: project, region: region, return_partial_success: return_partial_success) do |result, response|
assert_equal http_response, response
end
# Use hash object with options
client.list({ filter: filter, max_results: max_results, order_by: order_by, page_token: page_token, project: project, region: region, return_partial_success: return_partial_success }, call_options) do |result, response|
assert_equal http_response, response
end
# Use protobuf object with options
client.list(::Google::Cloud::Compute::V1::ListRegionOperationsRequest.new(filter: filter, max_results: max_results, order_by: order_by, page_token: page_token, project: project, region: region, return_partial_success: return_partial_success), call_options) do |result, response|
assert_equal http_response, response
end
# Verify method calls
assert_equal 5, list_client_stub.call_count
end
end
def test_wait
# Create test objects.
client_result = ::Google::Cloud::Compute::V1::Operation.new
http_response = OpenStruct.new body: client_result.to_json
call_options = {}
# Create request parameters for a unary method.
operation = "hello world"
project = "hello world"
region = "hello world"
wait_client_stub = ClientStub.new http_response do |verb, uri:, body:, params:, options:|
assert_equal :post, verb
assert options.metadata.key? :"x-goog-api-client"
assert options.metadata[:"x-goog-api-client"].include? "rest"
refute options.metadata[:"x-goog-api-client"].include? "grpc"
assert_nil body
end
Gapic::Rest::ClientStub.stub :new, wait_client_stub do
# Create client
client = ::Google::Cloud::Compute::V1::RegionOperations::Rest::Client.new do |config|
config.credentials = :dummy_value
end
# Use hash object
client.wait({ operation: operation, project: project, region: region }) do |result, response|
assert_equal http_response, response
end
# Use named arguments
client.wait operation: operation, project: project, region: region do |result, response|
assert_equal http_response, response
end
# Use protobuf object
client.wait ::Google::Cloud::Compute::V1::WaitRegionOperationRequest.new(operation: operation, project: project, region: region) do |result, response|
assert_equal http_response, response
end
# Use hash object with options
client.wait({ operation: operation, project: project, region: region }, call_options) do |result, response|
assert_equal http_response, response
end
# Use protobuf object with options
client.wait(::Google::Cloud::Compute::V1::WaitRegionOperationRequest.new(operation: operation, project: project, region: region), call_options) do |result, response|
assert_equal http_response, response
end
# Verify method calls
assert_equal 5, wait_client_stub.call_count
end
end
def test_configure
credentials_token = :dummy_value
client = block_config = config = nil
Gapic::Rest::ClientStub.stub :new, nil do
client = ::Google::Cloud::Compute::V1::RegionOperations::Rest::Client.new do |config|
config.credentials = credentials_token
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::Compute::V1::RegionOperations::Rest::Client::Configuration, config
end
end
| 36.246914 | 284 | 0.696185 |
ab9ba6f80911bb5e1e8b6af3ba5a3389f2f17b92 | 940 | # frozen_string_literal: true
require_relative "../../helpers/toggle"
module Byebug
#
# Reopens the +enable+ command to define the +breakpoints+ subcommand
#
class EnableCommand < Command
#
# Enables all or specific breakpoints
#
class BreakpointsCommand < Command
include Helpers::ToggleHelper
self.allow_in_post_mortem = true
def self.regexp
/^\s* b(?:reakpoints)? (?:\s+ (.+))? \s*$/x
end
def self.description
<<-DESCRIPTION
en[able] b[reakpoints][ <ids>]
#{short_description}
Give breakpoint numbers (separated by spaces) as arguments or no
argument at all if you want to enable every breakpoint.
DESCRIPTION
end
def self.short_description
"Enable all or specific breakpoints"
end
def execute
enable_disable_breakpoints("enable", @match[1])
end
end
end
end
| 21.860465 | 74 | 0.624468 |
f8ddaabda8accc84981008e925ea1b03ff16ffc6 | 133 | class TokyoMetro::Initializer::ApiKey::List < Array
def set
self.each do | namespace |
namespace.set
end
end
end
| 13.3 | 51 | 0.654135 |
bfc8709d727f1a1f291b2d0f1c1bd1fd0ced4149 | 5,507 | =begin
Ruby InsightVM API Client
OpenAPI spec version: 3
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.0
=end
require 'date'
module Rapid7VmConsole
#
class CreatedReferenceScanIDLink
# The identifier of the resource created.
attr_accessor :id
# Hypermedia links to corresponding or related resources.
attr_accessor :links
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'id' => :'id',
:'links' => :'links'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'id' => :'Integer',
:'links' => :'Array<Link>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'links')
if (value = attributes[:'links']).is_a?(Array)
self.links = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id &&
links == o.links
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id, links].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = Rapid7VmConsole.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 28.096939 | 107 | 0.609769 |
1899f87937c48c926e6c3489ee6edf5a4cd7b5fd | 4,803 | require 'rubyXL'
require 'rubyXL/convenience_methods/cell'
require 'rubyXL/convenience_methods/color'
require 'rubyXL/convenience_methods/font'
require 'rubyXL/convenience_methods/workbook'
require 'rubyXL/convenience_methods/worksheet'
class ReportWpByPeriodController < ApplicationController
include Downloadable
default_search_scope :report_wp_by_period
before_action :find_optional_project, :verify_reports_wp_by_period_module_active
def index
end
def create
from = params['filter']['label_by_date_from']
to = params['filter']['label_by_date_to']
@selected_wps = @project.work_packages.where(due_date: from..to)
generate_report
send_to_user filepath: @report_ready_filepath
end
def destroy
redirect_to action: 'index'
nil
end
protected
def show_local_breadcrumb
true
end
def generate_report
template_path = File.absolute_path('.') +'/'+'app/reports/templates/wp_by_period.xlsx'
@workbook = RubyXL::Parser.parse(template_path)
@workbook.calc_pr.full_calc_on_load = true
generate_title_sheet
generate_main_sheet
#+tan
dir_path = File.absolute_path('.') + '/public/reports'
if !File.directory?(dir_path)
Dir.mkdir dir_path
end
#-tan
@report_ready_filepath = dir_path + '/wp_by_period_out.xlsx'
@workbook.write(@report_ready_filepath)
exist = false
current_user.roles_for_project(@project).map do |role|
exist ||= role.role_permissions.any? {|perm| perm.permission == 'manage_documents'}
end
if exist
pid = spawn('cd ' + File.absolute_path('.') + '/unoconv && unoconv -f pdf ' + @report_ready_filepath)
@document = @project.documents.build
@document.category = DocumentCategory.find_by(name: 'Отчет о исполнении мероприятий за период')
@document.user_id = current_user.id
@document.title = 'Отчет о исполнении мероприятий за период от ' + DateTime.now.strftime("%d/%m/%Y %H:%M")
service = AddAttachmentService.new(@document, author: current_user)
attachment = service.add_attachment_old uploaded_file: File.open(@report_ready_filepath),
filename: 'wp_by_period_out.xlsx'
@document.attach_files({'0'=> {'id'=> attachment.id}})
@document.save
end
end
def generate_title_sheet
sheet = @workbook['Титульный лист']
sheet[0][0].change_contents(@project.name)
sheet[22][0].change_contents("за период с " +
params['filter']['label_by_date_from'].split('-').reverse.join('.') +
" по " +
params['filter']['label_by_date_to'].split('-').reverse.join('.'))
rescue Exception => e
Rails.logger.info(e.message)
end
def generate_main_sheet
sheet = @workbook['КТ и Мероприятия']
start_index = 4
@selected_wps.each_with_index do |wp, i|
attch = Attachment.where(container_type: 'WorkPackage', container_id: wp.id)
file_str = ""
attch.map do |a|
file_str += a.filename + ", "
end
file_str = attch.count > 0 ? file_str.first(-2) : ""
com_str = ""
cmmnt = Journal.where(journable_type: 'WorkPackage', journable_id: wp.id)
cmmnt.map do |c|
com_str += (c.notes.include?("_Обновлено автоматически") or c.notes == nil or c.notes == "") ? "" : "\n" + c.notes
end
sheet.insert_cell(start_index + i, 0, i + 1)
sheet.insert_cell(start_index + i, 1, wp.status.name)
sheet.insert_cell(start_index + i, 2, wp.subject)
sheet.insert_cell(start_index + i, 3, wp.due_date.strftime("%d.%m.%Y"))
sheet.insert_cell(start_index + i, 4, wp.fact_due_date.nil? ? "" : wp.fact_due_date.strftime("%d.%m.%Y"))
sheet.insert_cell(start_index + i, 5, wp.assigned_to.nil? ? "" : wp.assigned_to.fio)
sheet.insert_cell(start_index + i, 6, file_str)
sheet.insert_cell(start_index + i, 7, com_str)
for j in 0..7
cell_format(i, sheet, start_index, j)
end
end
end
private
def cell_format(i, sheet, start_index, j)
sheet[start_index + i][j].change_text_wrap(true)
sheet[start_index + i][j].
sheet.sheet_data[start_index + i][0].change_border(:top, 'thin')
sheet.sheet_data[start_index + i][0].change_border(:left, 'thin')
sheet.sheet_data[start_index + i][0].change_border(:right, 'thin')
sheet.sheet_data[start_index + i][0].change_border(:bottom, 'thin')
end
def find_optional_project
return true unless params[:project_id]
@project = Project.find(params[:project_id])
authorize
rescue ActiveRecord::RecordNotFound
render_404
end
def verify_reports_wp_by_period_module_active
render_403 if @project && [email protected]_enabled?('report_wp_by_period')
end
end
| 36.946154 | 122 | 0.671664 |
ed5626e52f0cbe711189e1df9050fc5cc8b1c98a | 477 | ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
require 'rails/test_help'
require "minitest/reporters"
Minitest::Reporters.use!
class ActiveSupport::TestCase
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
include ApplicationHelper
# テストユーザーがログイン中の場合にtrueを返す
def is_logged_in?
!session[:user_id].nil?
end
# Add more helper methods to be used by all tests here...
end
| 26.5 | 82 | 0.746331 |
03642b603b987605a8aad6ac3584bb7de7ce8ed7 | 11,211 | require "test_helper.rb"
class RemoteOrbitalGatewayTest < Test::Unit::TestCase
def setup
Base.mode = :test
@gateway = ActiveMerchant::Billing::OrbitalGateway.new(fixtures(:orbital_gateway))
@amount = 100
@credit_card = credit_card('4112344112344113')
@declined_card = credit_card('4000300011112220')
@options = {
:order_id => generate_unique_id,
:address => address,
:merchant_id => 'merchant1234'
}
@cards = {
:visa => "4788250000028291",
:mc => "5454545454545454",
:amex => "371449635398431",
:ds => "6011000995500000",
:diners => "36438999960016",
:jcb => "3566002020140006"}
@level_2_options = {
tax_indicator: "1",
tax: "75",
advice_addendum_1: 'taa1 - test',
advice_addendum_2: 'taa2 - test',
advice_addendum_3: 'taa3 - test',
advice_addendum_4: 'taa4 - test',
purchase_order: '123abc',
name: address[:name],
address1: address[:address1],
address2: address[:address2],
city: address[:city],
state: address[:state],
zip: address[:zip],
}
@test_suite = [
{:card => :visa, :AVSzip => 11111, :CVD => 111, :amount => 3000},
{:card => :visa, :AVSzip => 33333, :CVD => nil, :amount => 3801},
{:card => :mc, :AVSzip => 44444, :CVD => nil, :amount => 4100},
{:card => :mc, :AVSzip => 88888, :CVD => 666, :amount => 1102},
{:card => :amex, :AVSzip => 55555, :CVD => nil, :amount => 105500},
{:card => :amex, :AVSzip => 66666, :CVD => 2222, :amount => 7500},
{:card => :ds, :AVSzip => 77777, :CVD => nil, :amount => 1000},
{:card => :ds, :AVSzip => 88888, :CVD => 444, :amount => 6303},
{:card => :jcb, :AVSzip => 33333, :CVD => nil, :amount => 2900}]
end
def test_successful_purchase
assert response = @gateway.purchase(@amount, @credit_card, @options)
assert_success response
assert_equal 'Approved', response.message
end
def test_successful_purchase_with_soft_descriptor_hash
assert response = @gateway.purchase(
@amount, @credit_card, @options.merge(
soft_descriptors: {
merchant_name: 'Merch',
product_description: 'Description',
merchant_email: 'email@example',
}
)
)
assert_success response
assert_equal 'Approved', response.message
end
def test_successful_purchase_with_level_2_data
response = @gateway.purchase(@amount, @credit_card, @options.merge(level_2_data: @level_2_options))
assert_success response
assert_equal 'Approved', response.message
end
def test_successful_purchase_with_visa_network_tokenization_credit_card_with_eci
network_card = network_tokenization_credit_card('4788250000028291',
payment_cryptogram: "BwABB4JRdgAAAAAAiFF2AAAAAAA=",
transaction_id: "BwABB4JRdgAAAAAAiFF2AAAAAAA=",
verification_value: '111',
brand: 'visa',
eci: '5'
)
assert response = @gateway.purchase(3000, network_card, @options)
assert_success response
assert_equal 'Approved', response.message
assert_false response.authorization.blank?
end
def test_successful_purchase_with_master_card_network_tokenization_credit_card
network_card = network_tokenization_credit_card('4788250000028291',
payment_cryptogram: "BwABB4JRdgAAAAAAiFF2AAAAAAA=",
transaction_id: "BwABB4JRdgAAAAAAiFF2AAAAAAA=",
verification_value: '111',
brand: 'master'
)
assert response = @gateway.purchase(3000, network_card, @options)
assert_success response
assert_equal 'Approved', response.message
assert_false response.authorization.blank?
end
def test_successful_purchase_with_american_express_network_tokenization_credit_card
network_card = network_tokenization_credit_card('4788250000028291',
payment_cryptogram: "BwABB4JRdgAAAAAAiFF2AAAAAAA=",
transaction_id: "BwABB4JRdgAAAAAAiFF2AAAAAAA=",
verification_value: '111',
brand: 'american_express'
)
assert response = @gateway.purchase(3000, network_card, @options)
assert_success response
assert_equal 'Approved', response.message
assert_false response.authorization.blank?
end
def test_successful_purchase_with_discover_network_tokenization_credit_card
network_card = network_tokenization_credit_card('4788250000028291',
payment_cryptogram: "BwABB4JRdgAAAAAAiFF2AAAAAAA=",
transaction_id: "BwABB4JRdgAAAAAAiFF2AAAAAAA=",
verification_value: '111',
brand: 'discover'
)
assert response = @gateway.purchase(3000, network_card, @options)
assert_success response
assert_equal 'Approved', response.message
assert_false response.authorization.blank?
end
# Amounts of x.01 will fail
def test_unsuccessful_purchase
assert response = @gateway.purchase(101, @declined_card, @options)
assert_failure response
assert_equal 'Invalid CC Number', response.message
end
def test_authorize_and_capture
amount = @amount
assert auth = @gateway.authorize(amount, @credit_card, @options.merge(:order_id => '2'))
assert_success auth
assert_equal 'Approved', auth.message
assert auth.authorization
assert capture = @gateway.capture(amount, auth.authorization, :order_id => '2')
assert_success capture
end
def test_successful_authorize_and_capture_with_level_2_data
auth = @gateway.authorize(@amount, @credit_card, @options.merge(level_2_data: @level_2_options))
assert_success auth
assert_equal "Approved", auth.message
capture = @gateway.capture(@amount, auth.authorization, @options.merge(level_2_data: @level_2_options))
assert_success capture
end
def test_authorize_and_void
assert auth = @gateway.authorize(@amount, @credit_card, @options.merge(:order_id => '2'))
assert_success auth
assert_equal 'Approved', auth.message
assert auth.authorization
assert void = @gateway.void(auth.authorization, :order_id => '2')
assert_success void
end
def test_refund
amount = @amount
assert response = @gateway.purchase(amount, @credit_card, @options)
assert_success response
assert response.authorization
assert refund = @gateway.refund(amount, response.authorization, @options)
assert_success refund
end
def test_successful_refund_with_level_2_data
amount = @amount
assert response = @gateway.purchase(amount, @credit_card, @options.merge(level_2_data: @level_2_options))
assert_success response
assert response.authorization
assert refund = @gateway.refund(amount, response.authorization, @options.merge(level_2_data: @level_2_options))
assert_success refund
end
def test_failed_capture
assert response = @gateway.capture(@amount, '')
assert_failure response
assert_equal 'Bad data error', response.message
end
# == Certification Tests
# ==== Section A
def test_auth_only_transactions
for suite in @test_suite do
amount = suite[:amount]
card = credit_card(@cards[suite[:card]], :verification_value => suite[:CVD])
@options[:address].merge!(:zip => suite[:AVSzip])
assert response = @gateway.authorize(amount, card, @options)
assert_kind_of Response, response
# Makes it easier to fill in cert sheet if you print these to the command line
# puts "Auth/Resp Code => " + (response.params["auth_code"] || response.params["resp_code"])
# puts "AVS Resp => " + response.params["avs_resp_code"]
# puts "CVD Resp => " + response.params["cvv2_resp_code"]
# puts "TxRefNum => " + response.params["tx_ref_num"]
# puts
end
end
# ==== Section B
def test_auth_capture_transactions
for suite in @test_suite do
amount = suite[:amount]
card = credit_card(@cards[suite[:card]], :verification_value => suite[:CVD])
options = @options; options[:address].merge!(:zip => suite[:AVSzip])
assert response = @gateway.purchase(amount, card, options)
assert_kind_of Response, response
# Makes it easier to fill in cert sheet if you print these to the command line
# puts "Auth/Resp Code => " + (response.params["auth_code"] || response.params["resp_code"])
# puts "AVS Resp => " + response.params["avs_resp_code"]
# puts "CVD Resp => " + response.params["cvv2_resp_code"]
# puts "TxRefNum => " + response.params["tx_ref_num"]
# puts
end
end
# ==== Section C
def test_mark_for_capture_transactions
[[:visa, 3000],[:mc, 4100],[:amex, 105500],[:ds, 1000],[:jcb, 2900]].each do |suite|
amount = suite[1]
card = credit_card(@cards[suite[0]])
assert auth_response = @gateway.authorize(amount, card, @options)
assert capt_response = @gateway.capture(amount, auth_response.authorization)
assert_kind_of Response, capt_response
# Makes it easier to fill in cert sheet if you print these to the command line
# puts "Auth/Resp Code => " + (auth_response.params["auth_code"] || auth_response.params["resp_code"])
# puts "TxRefNum => " + capt_response.params["tx_ref_num"]
# puts
end
end
# ==== Section D
def test_refund_transactions
[[:visa, 1200],[:mc, 1100],[:amex, 105500],[:ds, 1000],[:jcb, 2900]].each do |suite|
amount = suite[1]
card = credit_card(@cards[suite[0]])
assert purchase_response = @gateway.purchase(amount, card, @options)
assert refund_response = @gateway.refund(amount, purchase_response.authorization, @options)
assert_kind_of Response, refund_response
# Makes it easier to fill in cert sheet if you print these to the command line
# puts "Auth/Resp Code => " + (purchase_response.params["auth_code"] || purchase_response.params["resp_code"])
# puts "TxRefNum => " + credit_response.params["tx_ref_num"]
# puts
end
end
# ==== Section F
def test_void_transactions
[3000, 105500, 2900].each do |amount|
assert auth_response = @gateway.authorize(amount, @credit_card, @options)
assert void_response = @gateway.void(auth_response.authorization, @options.merge(:transaction_index => 1))
assert_kind_of Response, void_response
# Makes it easier to fill in cert sheet if you print these to the command line
# puts "TxRefNum => " + void_response.params["tx_ref_num"]
# puts
end
end
def test_successful_verify
response = @gateway.verify(@credit_card, @options)
assert_success response
assert_equal 'Approved', response.message
end
def test_failed_verify
response = @gateway.verify(@declined_card, @options)
assert_failure response
assert_equal 'Invalid CC Number', response.message
end
def test_transcript_scrubbing
transcript = capture_transcript(@gateway) do
@gateway.purchase(@amount, @credit_card, @options)
end
transcript = @gateway.scrub(transcript)
assert_scrubbed(@credit_card.number, transcript)
assert_scrubbed(@credit_card.verification_value, transcript)
assert_scrubbed(@gateway.options[:password], transcript)
assert_scrubbed(@gateway.options[:login], transcript)
assert_scrubbed(@gateway.options[:merchant_id], transcript)
end
end
| 37.245847 | 116 | 0.692356 |
e29f1c3f74f7df6a24f31eb22da707844f805fbb | 576 | # frozen_string_literal: true
class OpenAccessEbookTrustMailer < ApplicationMailer
default from: "[email protected]"
def send_report(tmp_zip)
@month_year = Time.zone.now.prev_month.strftime("%B %Y")
@email_subject = "Monthly Fulcrum reports for OAeBU Data Trust"
attachment_name = "Monthly Fulcrum_Reports #{@month_year}.zip".tr(" ", "_")
attachments[attachment_name] = File.read(tmp_zip)
mail(to: Settings.open_access_ebook_trust_emails.to, cc: Settings.open_access_ebook_trust_emails.cc, subject: @email_subject)
tmp_zip.unlink
end
end
| 36 | 129 | 0.758681 |
01501374fcc839d30c8c9ac874592db675e60d81 | 734 | class Users::RegistrationsController < Devise::RegistrationsController
def create
return
if resource = User.where(id: session['devise.user_id']).first
else
build_resource
end
build_resource
if resource.save
if resource.respond_to?(:confirm!) && !resource.confirmed?
confirmation_hash = Digest::SHA2.hexdigest(resource.password_salt + resource.confirmation_token)
redirect_to awaiting_confirmation_path(I18n.locale, resource, confirmation_hash)
else
set_flash_message :notice, :signed_up
sign_in_and_redirect(resource_name, resource)
end
else
clean_up_passwords(resource)
render_with_scope :new
end
end
end
| 30.583333 | 105 | 0.694823 |
f7d0c915c85b0f870e5de9fa8331f1e52fe84d07 | 8,970 | require "builder"
module Jenkins
class JobConfigBuilder
attr_accessor :job_type
attr_accessor :steps, :rubies
attr_accessor :scm, :public_scm, :scm_branches
attr_accessor :scm, :public_scm, :git_branches
attr_accessor :assigned_node, :node_labels # TODO just one of these
attr_accessor :envfile
InvalidTemplate = Class.new(StandardError)
VALID_JOB_TEMPLATES = %w[none rails rails3 ruby rubygem]
# +job_type+ - template of default steps to create with the job
# +steps+ - array of [:method, cmd], e.g. [:build_shell_step, "bundle initial"]
# - Default is based on +job_type+.
# +scm+ - URL to the repository. Currently only support git URLs.
# +public_scm+ - convert the +scm+ URL to a publicly accessible URL for the Jenkins job config.
# +scm_branches+ - array of branches to run builds. Default: ['master']
# +rubies+ - list of RVM rubies to run tests (via Jenkins Axes).
# +assigned_node+ - restrict this job to running on slaves with these labels (space separated)
def initialize(job_type = :ruby, &block)
self.job_type = job_type.to_s if job_type
yield self
self.scm_branches ||= ["master"]
raise InvalidTemplate unless VALID_JOB_TEMPLATES.include?(job_type.to_s)
end
def builder
b = Builder::XmlMarkup.new :indent => 2
b.instruct!
b.tag!(matrix_project? ? "matrix-project" : "project") do
b.actions
b.description
b.keepDependencies false
b.properties
build_scm b
b.assignedNode assigned_node if assigned_node
b.canRoam !assigned_node
b.disabled false
b.blockBuildWhenUpstreamBuilding false
b.triggers :class => "vector"
b.concurrentBuild false
build_axes b if matrix_project?
build_steps b
b.publishers
build_wrappers b
b.runSequentially false if matrix_project?
end
end
def to_xml
builder.to_s
end
protected
# <scm class="hudson.plugins.git.GitSCM"> ... </scm>
def build_scm(b)
if scm && scm =~ /git/
scm_url = public_scm ? public_only_git_scm(scm) : scm
b.scm :class => "hudson.plugins.git.GitSCM" do
b.configVersion 1
b.remoteRepositories do
b.tag! "org.spearce.jgit.transport.RemoteConfig" do
b.string "origin"
b.int 5
b.string "fetch"
b.string "+refs/heads/*:refs/remotes/origin/*"
b.string "receivepack"
b.string "git-upload-pack"
b.string "uploadpack"
b.string "git-upload-pack"
b.string "url"
b.string scm_url
b.string "tagopt"
b.string
end
end
if scm_branches
b.branches do
scm_branches.each do |branch|
b.tag! "hudson.plugins.git.BranchSpec" do
b.name branch
end
end
end
end
b.localBranch
b.mergeOptions
b.recursiveSubmodules false
b.doGenerateSubmoduleConfigurations false
b.authorOrCommitter false
b.clean false
b.wipeOutWorkspace false
b.buildChooser :class => "hudson.plugins.git.util.DefaultBuildChooser"
b.gitTool "Default"
b.submoduleCfg :class => "list"
b.relativeTargetDir
b.excludedRegions
b.excludedUsers
end
end
end
def matrix_project?
!(rubies.blank? && node_labels.blank?)
end
# <hudson.matrix.TextAxis>
# <name>RUBY_VERSION</name>
# <values>
# <string>1.8.7</string>
# <string>1.9.2</string>
# <string>rbx-head</string>
# <string>jruby</string>
# </values>
# </hudson.matrix.TextAxis>
# <hudson.matrix.LabelAxis>
# <name>label</name>
# <values>
# <string>1.8.7</string>
# <string>ubuntu</string>
# </values>
# </hudson.matrix.LabelAxis>
def build_axes(b)
b.axes do
unless rubies.blank?
b.tag! "hudson.matrix.TextAxis" do
b.name "RUBY_VERSION"
b.values do
rubies.each do |rvm_name|
b.string rvm_name
end
end
end
end
unless node_labels.blank?
b.tag! "hudson.matrix.LabelAxis" do
b.name "label"
b.values do
node_labels.each do |label|
b.string label
end
end
end
end
end
end
# Example:
# <buildWrappers>
# <hudson.plugins.envfile.EnvFileBuildWrapper>
# <filePath>/path/to/env/file</filePath>
# </hudson.plugins.envfile.EnvFileBuildWrapper>
# </buildWrappers>
def build_wrappers(b)
if envfile
b.buildWrappers do
self.envfile = [envfile] unless envfile.is_a?(Array)
b.tag! "hudson.plugins.envfile.EnvFileBuildWrapper" do
envfile.each do |file|
b.filePath file
end
end
end
else
b.buildWrappers
end
end
# The important sequence of steps that are run to process a job build.
# Can be defaulted by the +job_type+ using +default_steps(job_type)+,
# or customized via +steps+ array.
def build_steps(b)
b.builders do
self.steps ||= default_steps(job_type)
steps.each do |step|
method, cmd = step
send(method.to_sym, b, cmd) # e.g. build_shell_step(b, "bundle install")
end
end
end
def default_steps(job_type)
steps = case job_type.to_sym
when :rails, :rails3
[
[:build_shell_step, "bundle install"],
[:build_ruby_step, <<-RUBY.gsub(/^ /, '')],
unless File.exist?("config/database.yml")
require 'fileutils'
example = Dir["config/database*"].first
puts "Using \#{example} for config/database.yml"
FileUtils.cp example, "config/database.yml"
end
RUBY
[:build_shell_step, "bundle exec rake db:create:all"],
[:build_shell_step, <<-RUBY.gsub(/^ /, '')],
if [ -f db/schema.rb ]; then
bundle exec rake db:schema:load
else
bundle exec rake db:migrate
fi
RUBY
[:build_shell_step, "bundle exec rake"]
]
when :ruby, :rubygems
[
[:build_shell_step, "bundle install"],
[:build_shell_step, "bundle exec rake"]
]
else
[ [:build_shell_step, 'echo "THERE ARE NO STEPS! Except this one..."'] ]
end
rubies.blank? ? steps : default_rvm_steps + steps
end
def default_rvm_steps
[
[:build_shell_step, "rvm $RUBY_VERSION"],
[:build_shell_step, "rvm gemset create ruby-$RUBY_VERSION && rvm gemset use ruby-$RUBY_VERSION"]
]
end
# <hudson.tasks.Shell>
# <command>echo 'THERE ARE NO STEPS! Except this one...'</command>
# </hudson.tasks.Shell>
def build_shell_step(b, command)
b.tag! "hudson.tasks.Shell" do
b.command command.to_xs.gsub("&", '&') #.gsub(%r{"}, '"').gsub(%r{'}, ''')
end
end
# <hudson.plugins.ruby.Ruby>
# <command>unless File.exist?("config/database.yml")
# require 'fileutils'
# example = Dir["config/database*"].first
# puts "Using #{example} for config/database.yml"
# FileUtils.cp example, "config/database.yml"
# end</command>
# </hudson.plugins.ruby.Ruby>
def build_ruby_step(b, command)
b.tag! "hudson.plugins.ruby.Ruby" do
b.command do
b << command.to_xs.gsub(%r{"}, '"').gsub(%r{'}, ''')
end
end
end
# Usage: build_ruby_step b, "db:schema:load"
#
# <hudson.plugins.rake.Rake>
# <rakeInstallation>(Default)</rakeInstallation>
# <rakeFile></rakeFile>
# <rakeLibDir></rakeLibDir>
# <rakeWorkingDir></rakeWorkingDir>
# <tasks>db:schema:load</tasks>
# <silent>false</silent>
# </hudson.plugins.rake.Rake>
def build_rake_step(b, tasks)
b.tag! "hudson.plugins.rake.Rake" do
b.rakeInstallation "(Default)"
b.rakeFile
b.rakeLibDir
b.rakeWorkingDir
b.tasks tasks
b.silent false
end
end
# Converts [email protected]:drnic/newgem.git into git://github.com/drnic/newgem.git
def public_only_git_scm(scm_url)
if scm_url =~ /git@([\w\-_.]+):(.+)\.git/
"git://#{$1}/#{$2}.git"
else
scm_url
end
end
end
end | 31.254355 | 104 | 0.56466 |
f86ffcf1d7f0a04d25485ac5780c1cbf67912615 | 119 | class AddParentToProjects < ActiveRecord::Migration
def change
add_column :projects, :parent, :integer
end
end
| 19.833333 | 51 | 0.764706 |
f7a0fda47696cf1f65e2c5ac0374e6d14f181440 | 1,665 | require 'test/test_helper'
require 'mongo/gridfs'
class ChunkTest < Test::Unit::TestCase
include Mongo
include GridFS
@@db = Connection.new(ENV['MONGO_RUBY_DRIVER_HOST'] || 'localhost',
ENV['MONGO_RUBY_DRIVER_PORT'] || Connection::DEFAULT_PORT).db('ruby-mongo-utils-test')
@@files = @@db.collection('gridfs.files')
@@chunks = @@db.collection('gridfs.chunks')
def setup
@@chunks.remove
@@files.remove
@f = GridStore.new(@@db, 'foobar', 'w')
@c = @f.instance_variable_get('@curr_chunk')
end
def teardown
@@chunks.remove
@@files.remove
@@db.error
end
def test_pos
assert_equal 0, @c.pos
assert @c.eof? # since data is empty
b = ByteBuffer.new
3.times { |i| b.put(i) }
c = Chunk.new(@f, 'data' => b)
assert !c.eof?
end
def test_getc
b = ByteBuffer.new
3.times { |i| b.put(i) }
c = Chunk.new(@f, 'data' => b)
assert !c.eof?
assert_equal 0, c.getc
assert !c.eof?
assert_equal 1, c.getc
assert !c.eof?
assert_equal 2, c.getc
assert c.eof?
end
def test_putc
3.times { |i| @c.putc(i) }
@c.pos = 0
assert [email protected]?
assert_equal 0, @c.getc
assert [email protected]?
assert_equal 1, @c.getc
assert [email protected]?
assert_equal 2, @c.getc
assert @c.eof?
end
def test_truncate
10.times { |i| @c.putc(i) }
assert_equal 10, @c.size
@c.pos = 3
@c.truncate
assert_equal 3, @c.size
@c.pos = 0
assert [email protected]?
assert_equal 0, @c.getc
assert [email protected]?
assert_equal 1, @c.getc
assert [email protected]?
assert_equal 2, @c.getc
assert @c.eof?
end
end
| 20.060241 | 110 | 0.584384 |
6a0e7e9e9577e65255df787fd8327ec88bbe47bd | 3,819 | class Rpm < Formula
desc "Standard unix software packaging tool"
homepage "https://rpm.org/"
url "http://ftp.rpm.org/releases/rpm-4.15.x/rpm-4.15.0.tar.bz2"
sha256 "1e06723b13591e57c99ebe2006fb8daddc4cf72efb366a64a34673ba5f61c201"
version_scheme 1
bottle do
sha256 "a758f1a11bb8b5c794d6566988cc0a1d61aa5f7b0b66b6a5b0fe33330d26aff6" => :catalina
sha256 "282452168e2b1c1635009bcd8a5dcc27580f2bcaf738f6b37fd20c383e0e17de" => :mojave
sha256 "0e1ccf75206784980f892425c2d96d5ff2b7433953c49296948eda35506e2e9f" => :high_sierra
sha256 "4fd7478f412b3587a68d6e874c91febd8cccbae1897bb7785cc84d25b84800aa" => :x86_64_linux
end
depends_on "berkeley-db"
depends_on "gettext"
depends_on "libarchive"
depends_on "libmagic"
depends_on "libomp"
depends_on "lua"
depends_on "[email protected]"
depends_on "pkg-config"
depends_on "popt"
depends_on "xz"
depends_on "zstd"
def install
ENV.prepend_path "PKG_CONFIG_PATH", Formula["lua"].opt_libexec/"lib/pkgconfig"
ENV.append "CPPFLAGS", "-I#{Formula["lua"].opt_include}/lua"
ENV.append "LDFLAGS", "-lomp"
# only rpm should go into HOMEBREW_CELLAR, not rpms built
inreplace ["macros.in", "platform.in"], "@prefix@", HOMEBREW_PREFIX
# ensure that pkg-config binary is found for dep generators
inreplace "scripts/pkgconfigdeps.sh",
"/usr/bin/pkg-config", Formula["pkg-config"].opt_bin/"pkg-config"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--localstatedir=#{var}",
"--sharedstatedir=#{var}/lib",
"--sysconfdir=#{etc}",
"--with-path-magic=#{HOMEBREW_PREFIX}/share/misc/magic",
"--enable-nls",
"--disable-plugins",
"--with-external-db",
"--with-crypto=openssl",
"--without-apidocs",
"--with-vendor=homebrew"
system "make", "install"
end
def post_install
(var/"lib/rpm").mkpath
if OS.mac?
# Attempt to fix expected location of GPG to a sane default.
inreplace lib/"rpm/macros", "/usr/bin/gpg2", HOMEBREW_PREFIX/"bin/gpg"
end
end
def test_spec
<<~EOS
Summary: Test package
Name: test
Version: 1.0
Release: 1
License: Public Domain
Group: Development/Tools
BuildArch: noarch
%description
Trivial test package
%prep
%build
%install
mkdir -p $RPM_BUILD_ROOT/tmp
touch $RPM_BUILD_ROOT/tmp/test
%files
/tmp/test
%changelog
EOS
end
def rpmdir(macro)
Pathname.new(`#{bin}/rpm --eval #{macro}`.chomp)
end
test do
(testpath/"rpmbuild").mkpath
# Falsely flagged by RuboCop.
# rubocop:disable Style/FormatStringToken
(testpath/".rpmmacros").write <<~EOS
%_topdir #{testpath}/rpmbuild
%_tmppath %{_topdir}/tmp
EOS
# rubocop:enable Style/FormatStringToken
system "#{bin}/rpm", "-vv", "-qa", "--dbpath=#{testpath}/var/lib/rpm"
assert_predicate testpath/"var/lib/rpm/Packages", :exist?,
"Failed to create 'Packages' file!"
rpmdir("%_builddir").mkpath
specfile = rpmdir("%_specdir")+"test.spec"
specfile.write(test_spec)
system "#{bin}/rpmbuild", "-ba", specfile
assert_predicate rpmdir("%_srcrpmdir")/"test-1.0-1.src.rpm", :exist?
assert_predicate rpmdir("%_rpmdir")/"noarch/test-1.0-1.noarch.rpm", :exist?
system "#{bin}/rpm", "-qpi", "--dbpath=#{testpath}/var/lib/rpm",
rpmdir("%_rpmdir")/"noarch/test-1.0-1.noarch.rpm"
end
end
| 32.092437 | 94 | 0.613511 |
e90a7fb0c4506978f991338e5f9da9c44febaa4a | 1,086 | class Tcpreplay < Formula
desc "Replay saved tcpdump files at arbitrary speeds"
homepage "https://tcpreplay.appneta.com/"
url "https://github.com/appneta/tcpreplay/releases/download/v4.2.6/tcpreplay-4.2.6.tar.gz"
sha256 "043756c532dab93e2be33a517ef46b1341f7239278a1045ae670041dd8a4531d"
bottle do
cellar :any
sha256 "9be61ec3aeeac7be8cd51225d5914a7ba7ee8f0c9fbd4393e452f6b9447a53c7" => :high_sierra
sha256 "569bdb4ac12e4ff62c723b1fdabad4b037c54423a70742306ba852f9bc43e25d" => :sierra
sha256 "b5ba1668dddf52946866c866bc1ba2ab2983b67d99c69b6c41fabe91e816139a" => :el_capitan
sha256 "3eaba6e1af68c8af3f7d1d0a15c2563b178368a74a760e3fafa5b3c4774f9129" => :yosemite
end
depends_on "libdnet"
def install
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--enable-dynamic-link"
system "make", "install"
end
test do
system bin/"tcpreplay", "--version"
end
end
| 36.2 | 93 | 0.695212 |
3880e737701806fb63af049b463864e43d6a237a | 2,158 | # encoding: UTF-8
control 'V-219229' do
title "The Ubuntu operating system must permit only authorized accounts
ownership of the audit log files."
desc "If audit information were to become compromised, then forensic
analysis and discovery of the true source of potentially malicious system
activity is impossible to achieve.
To ensure the veracity of audit information, the operating system must
protect audit information from unauthorized modification.
Audit information includes all information (e.g., audit records, audit
settings, audit reports) needed to successfully audit information system
activity.
"
desc 'rationale', ''
desc 'check', "
Verify that the audit log files are owned by \"root\" account.
First determine where the audit logs are stored with the following command:
# sudo grep -iw log_file /etc/audit/auditd.conf
log_file = /var/log/audit/audit.log
Using the path of the directory containing the audit logs, check if the
audit log files are owned by the \"root\" user by using the following command:
# sudo stat -c \"%n %U\" /var/log/audit/*
/var/log/audit/audit.log root
If the audit log files are owned by an user other than \"root\", this is a
finding.
"
desc 'fix', "
Configure the audit log files to be owned by \"root\" user.
First determine where the audit logs are stored with the following command:
# sudo grep -iw log_file /etc/audit/auditd.conf
log_file = /var/log/audit/audit.log
Using the path of the directory containing the audit logs, configure the
audit log files to be owned by \"root\" user by using the following command:
# sudo chown root /var/log/audit/*
"
impact 0.5
tag severity: 'medium'
tag gtitle: 'SRG-OS-000058-GPOS-00028'
tag satisfies: ['SRG-OS-000058-GPOS-00028', 'SRG-OS-000057-GPOS-00027']
tag gid: 'V-219229'
tag rid: 'SV-219229r508662_rule'
tag stig_id: 'UBTU-18-010306'
tag fix_id: 'F-20953r305016_fix'
tag cci: ['V-100685', 'SV-109789', 'CCI-000162', 'CCI-000163']
tag nist: ['AU-9', 'AU-9']
describe file(auditd_conf.log_file) do
it { should be_owned_by('root') }
end
end | 33.71875 | 79 | 0.716867 |
6155f03183d6efb9b5a6a213013fc8e71272365b | 1,072 | require 'cocoapods'
require 'cocoapods-mtxx-bin/gem_version'
if Pod.match_version?('~> 1.4')
require 'cocoapods-mtxx-bin/native/podfile'
require 'cocoapods-mtxx-bin/native/installation_options'
require 'cocoapods-mtxx-bin/native/specification'
require 'cocoapods-mtxx-bin/native/path_source'
require 'cocoapods-mtxx-bin/native/analyzer'
require 'cocoapods-mtxx-bin/native/installer'
require 'cocoapods-mtxx-bin/native/podfile_generator'
require 'cocoapods-mtxx-bin/native/pod_source_installer'
require 'cocoapods-mtxx-bin/native/linter'
require 'cocoapods-mtxx-bin/native/resolver'
require 'cocoapods-mtxx-bin/native/source'
require 'cocoapods-mtxx-bin/native/validator'
require 'cocoapods-mtxx-bin/native/acknowledgements'
require 'cocoapods-mtxx-bin/native/sandbox_analyzer'
require 'cocoapods-mtxx-bin/native/podspec_finder'
require 'cocoapods-mtxx-bin/native/file_accessor'
require 'cocoapods-mtxx-bin/native/pod_target_installer'
require 'cocoapods-mtxx-bin/native/target_validator'
require 'cocoapods-mtxx-bin/native/gen'
end
| 41.230769 | 58 | 0.798507 |
5dbc8657656ecbc8666daf5985dff6c39281a051 | 2,375 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.seconds.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = true
config.action_mailer.delivery_method = :test
host = 'f3a6d99925f145729a2e5a1f31727373.vfs.cloud9.us-east-1.amazonaws.com' # ここをコピペすると失敗します。自分の環境に合わせてください。
config.action_mailer.default_url_options = { host: host, protocol: 'https' }
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
# Add Cloud9 origin for Action Cable requests.
config.action_cable.allowed_request_origins = [
'https://f3a6d99925f145729a2e5a1f31727373.vfs.cloud9.us-east-1.amazonaws.com' ]
config.web_console.whitelisted_ips = '118.0.140.29'
end
| 37.698413 | 111 | 0.765474 |
d5e7b23103b886b2340e8916ea75f2abcfdb7f2c | 2,105 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "gapic/common"
require "gapic/config"
require "gapic/config/method"
require "google/ads/google_ads/version"
require "google/ads/google_ads/v9/services/keyword_plan_campaign_keyword_service/credentials"
require "google/ads/google_ads/v9/services/keyword_plan_campaign_keyword_service/paths"
require "google/ads/google_ads/v9/services/keyword_plan_campaign_keyword_service/client"
module Google
module Ads
module GoogleAds
module V9
module Services
##
# Service to manage Keyword Plan campaign keywords. KeywordPlanCampaign is
# required to add the campaign keywords. Only negative keywords are supported.
# A maximum of 1000 negative keywords are allowed per plan. This includes both
# campaign negative keywords and ad group negative keywords.
#
# To load this service and instantiate a client:
#
# require "google/ads/google_ads/v9/services/keyword_plan_campaign_keyword_service"
# client = ::Google::Ads::GoogleAds::V9::Services::KeywordPlanCampaignKeywordService::Client.new
#
module KeywordPlanCampaignKeywordService
end
end
end
end
end
end
helper_path = ::File.join __dir__, "keyword_plan_campaign_keyword_service", "helpers.rb"
require "google/ads/google_ads/v9/services/keyword_plan_campaign_keyword_service/helpers" if ::File.file? helper_path
| 38.272727 | 117 | 0.738717 |
e2b6128c4664ce32f513cec8bc695217717eecc3 | 1,288 | class Application < TestApp
get '/slow_page' do
sleep 1
"<p>Loaded!</p>"
end
get '/slow_ajax_load' do
<<-HTML
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8"/>
<title>with_js</title>
<script src="/jquery.js" type="text/javascript" charset="utf-8"></script>
<script type="text/javascript">
$(function() {
$('#ajax_load').click(function() {
$('body').load('/slow_page');
return false;
});
});
</script>
</head>
<body>
<a href="#" id="ajax_load">Click me</a>
</body>
HTML
end
get '/user_agent_detection' do
request.user_agent
end
get '/app_domain_detection' do
"http://#{request.host_with_port}/app_domain_detection"
end
get '/page_with_javascript_error' do
<<-HTML
<head>
<script type="text/javascript">
$() // is not defined
</script>
</head>
<body>
</body>
HTML
end
get '/ie_test' do
<<-HTML
<body>
<!--[if IE 6]>
This is for InternetExplorer6
<![endif]-->
<!--[if IE 7]>
This is for InternetExplorer7
<![endif]-->
<!--[if IE 8]>
This is for InternetExplorer8
<![endif]-->
</body>
HTML
end
end
if $0 == __FILE__
Rack::Handler::Mongrel.run Application, :Port => 8070
end
| 18.666667 | 77 | 0.578416 |
b9dbf05b87438fa0a23f79ab1d3c3487d531d34e | 456 | name 'maven'
maintainer 'Chef Software, Inc.'
maintainer_email '[email protected]'
license 'Apache 2.0'
description 'Application cookbook which installs and configures Maven.'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '2.0.0'
depends 'libarchive', '~> 0.6'
depends 'poise', '~> 2.2'
depends 'rc', '~> 1.1'
supports 'centos'
supports 'debian'
supports 'fedora'
supports 'redhat'
supports 'ubuntu'
supports 'windows'
| 24 | 72 | 0.739035 |
91ca0e017d90e075488709c033b002450029ad70 | 499 | module CampusSolutions
module FinancialAidExpiry
def self.expire(uid=nil)
[
MyFinancialAidData,
MyFinancialAidFundingSources,
MyFinancialAidFundingSourcesTerm,
FinancialAid::MyAidYears,
FinancialAid::MyFinaidProfile,
FinancialAid::MyFinancialAidSummary,
FinancialAid::MyHousing,
FinancialAid::MyTermsAndConditions,
FinancialAid::MyTitle4
].each do |klass|
klass.expire uid
end
end
end
end
| 24.95 | 44 | 0.669339 |
edef3731e45117ac53db63147b4198590b4bf23d | 67 | class PostController < ApplicationController
def index
end
end
| 13.4 | 44 | 0.80597 |
33a9b0debf3e4ce2e184b9c602f134ad2115cb98 | 846 | # Ensure we require the local version and not one we might have installed already
require File.join([File.dirname(__FILE__),'lib','uyirmei','version.rb'])
spec = Gem::Specification.new do |s|
s.name = 'uyirmei'
s.version = Uyirmei::VERSION
s.author = 'RC'
s.email = '[email protected]'
s.homepage = 'https://rcdexta.github.io/agaram'
s.platform = Gem::Platform::RUBY
s.summary = 'Experimental programming language in Tamizh'
s.files = `git ls-files`.split("
")
s.require_paths << 'lib'
s.rdoc_options << '--title' << 'uyirmei' << '--main' << 'README.rdoc' << '-ri'
s.bindir = 'bin'
s.executables << 'uyirmei'
s.add_development_dependency('rake')
s.add_development_dependency('rdoc')
s.add_development_dependency('pry-byebug')
s.add_development_dependency('aruba')
s.add_runtime_dependency('gli','2.16.0')
end
| 36.782609 | 81 | 0.695035 |
11604e83f0d6f0fea4acb24807ca0f056bb76df3 | 1,586 | require 'spec_helper'
require 'support/requests/request_helpers'
describe 'api' do
before(:each) do
@user = create(:user, :with_company)
@company = @user.companies.first
end
context 'invite two users' do
let(:email1) { '[email protected]' }
let(:email2) { '[email protected]' }
subject do
do_post invites_path,
params: { invite: { invitable_id: @company.id,
invitable_type: @company.class.name,
emails: [email1, email2] } },
**json_headers()
end
it 'returns json' do
sign_in_with @user
subject
expect(response.content_type).to eq('application/json')
end
it 'responds with success' do
sign_in_with @user
subject
expect(response).to have_http_status(201) # 201 is http status 'created'
end
it 'responds with json invites' do
sign_in_with @user
subject
invites = JSON.parse(response.body)
expect(invites.count).to be 2
expect(invites[0]['email']).to eq email1
expect(invites[1]['email']).to eq email2
expect(invites[0]['recipient_id']).to be nil
expect(invites[1]['recipient_id']).to be nil
expect(invites[0]['sender_id']).to eq @user.id
expect(invites[1]['sender_id']).to eq @user.id
expect(invites[0]['invitable_id']).to eq @company.id
expect(invites[1]['invitable_id']).to eq @company.id
expect(invites[0]['invitable_type']).to eq @company.class.name
expect(invites[1]['invitable_type']).to eq @company.class.name
end
end
end
| 31.098039 | 78 | 0.620429 |
03c73aaa074166ea4973c61cedfa1486995a0fbb | 250 | module Madmin
class BaseController < ActionController::Base
include Pagy::Backend
protect_from_forgery with: :exception
# Loads all the models for the sidebar
before_action do
Rails.application.eager_load!
end
end
end
| 19.230769 | 47 | 0.728 |
0377e50011a285fd73b4fbd1c96572d45bc7dcf9 | 114 | class Toy < ActiveRecord::Base
self.primary_key = :toy_id
belongs_to :pet
scope :with_pet, joins(:pet)
end
| 16.285714 | 30 | 0.719298 |
18b1e07ea0a1df5d701c61fdd4e28b2dcdb2cc84 | 10,428 | require 'miner/event/events'
# Triggers module
module Triggers
# Module logger
@@logger = nil
# ID -> instance maps
@@triggers = {}
# An event trigger
class Trigger
def initialize(id, triggerId, filters)
@id = id
@triggerId = triggerId
@filters = filters
end
attr_reader :id, :triggerId, :filters
def self.createFromJSON(id, json)
if (json.include? :trigger_id)
triggerId = json[:trigger_id].downcase
case triggerId
when "startup"
return StartupTrigger.new(id, triggerId, json[:filters])
when "shutdown"
return ShutdownTrigger.new(id, triggerId, json[:filters])
when "switch_coin"
return CoinSwitchTrigger.new(id, triggerId, json[:filters])
when "start_mining"
return StartMiningTrigger.new(id, triggerId, json[:filters])
when "stop_mining"
return StopMiningTrigger.new(id, triggerId, json[:filters])
when "switch_algorithm"
return AlgoSwitchTrigger.new(id, triggerId, json[:filters])
when "timer"
return TimerTrigger.new(id, triggerId, json[:filters])
when "timeout"
return TimeoutTrigger.new(id, triggerId, json[:filters])
else
Triggers.logger.warn "Unkown trigger id '#{triggerId}' for trigger '#{id}'. It will not be created."
end
else
Triggers.logger.warn "No trigger id for trigger '#{id}'. It will not be created."
end
return nil
end
# Add this trigger to a worker
def addToWorker(worker, event)
end
# Remove this trigger from a worker
def removeFromWorker(worker)
end
end
# Mid-class for triggers activated by worker signals
class WorkerTrigger < Trigger
def initialize(id, triggerId, filters, signal)
super(id, triggerId, filters)
# The signal to expect
@signal = signal
end
# Add this trigger to a worker
def addToWorker(worker, event)
worker.addListener(@signal, self) { |wkr, vars|
prepareVars(wkr, vars)
event.fire(worker, vars)
}
end
# Remove this trigger from a worker
def removeFromWorker(worker)
worker.removeListener(self)
end
# Override in subclasses to tweak vars before passing on
def prepareVars(worker, vars)
end
end
# Trigger that activates when worker starts
class StartupTrigger < WorkerTrigger
def initialize(id, triggerId, filters)
super(id, triggerId, filters, :startup)
end
# Override
def prepareVars(worker, vars)
Triggers.logger.debug {"Activating startup trigger on '#{worker.id}'"}
end
end
# Trigger that activates when worker stops
class ShutdownTrigger < WorkerTrigger
def initialize(id, triggerId, filters)
super(id, triggerId, filters, :shutdown)
end
# Override
def prepareVars(worker, vars)
Triggers.logger.debug {"Activating shutdown trigger on '#{worker.id}'"}
end
end
# Trigger that activates when worker switches coins
class CoinSwitchTrigger < WorkerTrigger
def initialize(id, triggerId, filters)
super(id, triggerId, filters, :switch_coin)
end
# Override
def prepareVars(worker, vars)
Triggers.logger.debug {"Activating coin switch trigger on '#{worker.id}'"}
end
end
# Trigger that activates when worker starts mining
class StartMiningTrigger < WorkerTrigger
def initialize(id, triggerId, filters)
super(id, triggerId, filters, :start_mining)
end
# Override
def prepareVars(worker, vars)
Triggers.logger.debug {"Activating start mining trigger on '#{worker.id}'"}
end
end
# Trigger that activates when worker stops mining
class StopMiningTrigger < WorkerTrigger
def initialize(id, triggerId, filters)
super(id, triggerId, filters, :stop_mining)
end
# Override
def prepareVars(worker, vars)
Triggers.logger.debug {"Activating stop mining trigger on '#{worker.id}'"}
end
end
# Trigger that activates when worker switches algorithm
class AlgoSwitchTrigger < WorkerTrigger
def initialize(id, triggerId, filters)
super(id, triggerId, filters, :switch_algo)
end
# Override
def prepareVars(worker, vars)
Triggers.logger.debug {"Activating algo switch trigger on '#{worker.id}'"}
end
end
# Trigger that activates on a timer
class TimerTrigger < Trigger
def initialize(id, triggerId, filters)
super(id, triggerId, filters)
# Get interval
if (filters.include? :interval)
@interval = Float(filters[:interval])
if (@interval == nil)
Triggers.logger.warn "Invalid timer interval: #{filters[:interval]}"
end
else
@interval = nil
Triggers.logger.warn "Timer triggers require an interval"
end
end
# Add this trigger to a worker
def addToWorker(worker, event)
if (@interval != nil)
Triggers.logger.debug {"Attching worker '#{worker.id}' to timer '#{id}'"}
# Wait for start event
worker.addListener(:startup, self) {
Triggers.logger.debug {"Timer '#{@id}' activated."}
# Create timer thread
Thread.new {
while (true)
begin
# Wait for interval
sleep @interval
# Create variable data
vars = {'TIMER.ID' => @id}
worker.injectGlobalVars(vars)
# fire event
event.fire(worker, vars)
rescue Exception => e
Triggers.logger.error "Exception in timer thread"
Triggers.logger.error e
Triggers.logger.error e.backtrace.join("\n\t")
end
end
}
}
else
end
end
# Remove this trigger from a worker
def removeFromWorker(worker)
Triggers.logger.debug {"Removing worker '#{worker.id}' from timer '#{id}'"}
worker.removeListener(self)
end
end
# Trigger that activates on a timeout
class TimeoutTrigger < Trigger
def initialize(id, triggerId, filters)
super(id, triggerId, filters)
# Get delay
if (filters.include? :delay)
@delay = Float(filters[:delay])
if (@delay == nil)
Triggers.logger.warn "Invalid timer delay: #{filters[:delay]}"
end
else
@delay = nil
Triggers.logger.warn "Timer triggers require an delay"
end
end
# Add this trigger to a worker
def addToWorker(worker, event)
if (@delay != nil)
Triggers.logger.debug {"Attching worker '#{worker.id}' to timeout '#{id}'"}
# Wait for start event
worker.addListener(:startup, self) {
Triggers.logger.debug {"Timeout '#{@id}' activated."}
# Create timeout thread
Thread.new {
begin
# Wait for delay
sleep @delay
# Create variable data
vars = {'TIMEOUT.ID' => @id}
worker.injectGlobalVars(vars)
# fire event
event.fire(worker, vars)
rescue Exception => e
Triggers.logger.error "Exception in timeout thread"
Triggers.logger.error e
Triggers.logger.error e.backtrace.join("\n\t")
end
}
}
else
end
end
# Remove this trigger from a worker
def removeFromWorker(worker)
Triggers.logger.debug {"Removing worker '#{worker.id}' from timeout '#{id}'"}
worker.removeListener(self)
end
end
# Gets the events logger
def self.logger()
if (@@logger == nil)
@@logger = Log.createLogger("Event/Triggers")
end
return @@logger
end
# Gets a trigger instance by ID
def self.getTrigger(id)
return @@triggers[id.to_sym]
end
def self.loadTrigger(id, json)
trigger = Trigger.createFromJSON(id, json)
if (trigger != nil)
@@triggers[id] = trigger
end
end
def self.loadTriggers()
Config.triggers.each {|id, json| self.loadTrigger(id, json)}
end
end | 34.876254 | 122 | 0.482643 |
61d7a4c0689be58bd1853c36ec3ccc456f0f69b7 | 2,561 | # frozen_string_literal: true
# Redmine - project management software
# Copyright (C) 2006-2019 Jean-Philippe Lang
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
require File.expand_path('../../../test_helper', __FILE__)
class Redmine::ApiTest::JsonpTest < Redmine::ApiTest::Base
fixtures :trackers
def test_should_ignore_jsonp_callback_with_jsonp_disabled
with_settings :jsonp_enabled => '0' do
get '/trackers.json?jsonp=handler'
end
assert_response :success
assert_match %r{^\{"trackers":.+\}$}, response.body
assert_equal 'application/json; charset=utf-8', response.headers['Content-Type']
end
def test_jsonp_should_accept_callback_param
with_settings :jsonp_enabled => '1' do
get '/trackers.json?callback=handler'
end
assert_response :success
assert_match %r{^handler\(\{"trackers":.+\}\)$}, response.body
assert_equal 'application/javascript; charset=utf-8', response.headers['Content-Type']
end
def test_jsonp_should_accept_jsonp_param
with_settings :jsonp_enabled => '1' do
get '/trackers.json?jsonp=handler'
end
assert_response :success
assert_match %r{^handler\(\{"trackers":.+\}\)$}, response.body
assert_equal 'application/javascript; charset=utf-8', response.headers['Content-Type']
end
def test_jsonp_should_strip_invalid_characters_from_callback
with_settings :jsonp_enabled => '1' do
get '/trackers.json?callback=+-aA$1_.'
end
assert_response :success
assert_match %r{^aA1_.\(\{"trackers":.+\}\)$}, response.body
assert_equal 'application/javascript; charset=utf-8', response.headers['Content-Type']
end
def test_jsonp_without_callback_should_return_json
with_settings :jsonp_enabled => '1' do
get '/trackers.json?callback='
end
assert_response :success
assert_match %r{^\{"trackers":.+\}$}, response.body
assert_equal 'application/json; charset=utf-8', response.headers['Content-Type']
end
end
| 34.146667 | 88 | 0.75166 |
4a957d7bd5c82553a17c67470befe740d4883332 | 1,075 | require 'shellwords'
module SimCtl
class Command
module Create
# Creates a device
#
# @param name [String] name of the new device
# @param devicetype [SimCtl::DeviceType] device type of the new device
# @param runtime [SimCtl::Runtime] runtime of the new device
# @return [SimCtl::Device] the device that was created
def create_device(name, devicetype, runtime)
runtime = runtime(name: runtime) unless runtime.is_a?(Runtime)
devicetype = devicetype(name: devicetype) unless devicetype.is_a?(DeviceType)
raise "Invalid runtime: #{runtime}" unless runtime.is_a?(Runtime)
raise "Invalid devicetype: #{devicetype}" unless devicetype.is_a?(DeviceType)
command = command_for('create', Shellwords.shellescape(name), devicetype.identifier, runtime.identifier)
device = Executor.execute(command) do |identifier|
device(udid: identifier)
end
device.wait { |d| d.state == :shutdown && File.exist?(d.path.device_plist) }
device
end
end
end
end
| 39.814815 | 112 | 0.671628 |
ac6cc053b6dc8d943ee99904c75800fa51c3edf7 | 3,077 | require 'test_helper'
class MeasureTest < Minitest::Test
def setup
@subject = Geode::Measure
end
def test_numerics_return_new_instance_with_unit
assert_equal :degree, 1.degree.unit
assert_equal :kilometer, 1.kilometer.unit
assert_equal :mile, 1.mile.unit
assert_equal :radian, 1.radian.unit
end
def test_numerics_return_new_instance_with_given_value
assert_equal 1.1, 1.1.degrees.value
assert_equal 2.2, 2.2.kilometers.value
assert_equal 3.3, 3.3.miles.value
assert_equal 4.4, 4.4.radians.value
end
def test_it_accepts_both_singular_and_plural_units
assert_equal @subject.new(1, :degree), @subject.new(1, :degrees)
assert_equal @subject.new(1, :kilometer), @subject.new(1, :kilometers)
assert_equal @subject.new(1, :mile), @subject.new(1, :miles)
assert_equal @subject.new(1, :radian), @subject.new(1, :radians)
end
def test_it_rases_error_for_bad_units
assert_raises(Geode::UnitError) { @subject.new(1, :not_a_unit) }
end
def test_it_outputs_string_with_unit
assert_equal '1 degree', 1.degree.to_s
assert_equal '2 kilometers', 2.kilometers.to_s
assert_equal '3 miles', 3.miles.to_s
assert_equal '4 radians', 4.radians.to_s
end
def test_it_casts_values_for_math_operations
assert_equal (1.degree + 2.kilometers).unit, :degree
assert_equal (1.kilometer - 2.miles).unit, :kilometer
assert_equal (1.mile * 2.radians).unit, :mile
assert_equal (1.radian / 2.degrees).unit, :radian
assert_equal (3.degrees % 4.miles).unit, :degree
end
def test_it_rases_error_for_incompatible_types
assert_raises(TypeError) { 1.degree + '1' }
assert_raises(TypeError) { 1.kilometer - true }
assert_raises(TypeError) { 1.mile * [2.radians] }
assert_raises(TypeError) { 1.radian / { degrees: 2 } }
assert_raises(TypeError) { 3.degrees % :four_miles }
assert_raises(TypeError) { 2.kilometers == '9.942 furlongs' }
end
def test_it_converts_to_degrees_correctly
assert_equal 1.degree, 1.degree.degrees
assert_equal 0.008998244115858544.degrees, 1.kilometer.degrees
assert_equal 0.014481270178392253.degrees, 1.mile.degrees
assert_equal 57.29577951308232.degrees, 1.radian.degrees
end
def test_it_converts_to_kilometers_correctly
assert_equal 111.13279292318774.kilometers, 1.degree.kilometers
assert_equal 1.kilometer, 1.kilometer.kilometers
assert_equal 1.609344.kilometers, 1.mile.kilometers
assert_equal 6367.44.kilometers, 1.radian.kilometers
end
def test_it_converts_to_miles_correctly
assert_equal 69.0547160353459.miles, 1.degree.miles
assert_equal 0.621371192237334.miles, 1.kilometer.miles
assert_equal 1.mile, 1.mile.miles
assert_equal 3956.5437842996894.miles, 1.radian.miles
end
def test_it_converts_to_radians_correctly
assert_equal 0.017453292519943295.radians, 1.degree.radians
assert_equal 0.00015704898671993768.radians, 1.kilometer.radians
assert_equal 0.0002527458444838114.radians, 1.mile.radians
assert_equal 1.radian, 1.radian.radians
end
end
| 36.2 | 74 | 0.753656 |
87e4df415124afac9387ff1fe6d6d9c87b7cc3d6 | 10,325 | class Pool < ApplicationRecord
class RevertError < Exception;
end
array_attribute :post_ids, parse: /\d+/, cast: :to_i
belongs_to_creator
validates :name, uniqueness: { case_sensitive: false, if: :name_changed? }
validates :name, length: { minimum: 1, maximum: 250 }
validates :description, length: { maximum: 10_000 }
validate :user_not_create_limited, on: :create
validate :user_not_limited, on: :update, if: :limited_attribute_changed?
validate :user_not_posts_limited, on: :update, if: :post_ids_changed?
validate :validate_name, if: :name_changed?
validates :category, inclusion: { :in => %w(series collection) }
validate :updater_can_change_category
validate :updater_can_remove_posts
validate :updater_can_edit_deleted
validate :validate_number_of_posts
before_validation :normalize_post_ids
before_validation :normalize_name
after_save :create_version
after_save :synchronize, if: :saved_change_to_post_ids?
after_create :synchronize!
before_destroy :remove_all_posts
attr_accessor :skip_sync
def limited_attribute_changed?
name_changed? || description_changed? || category_changed? || is_active_changed?
end
module SearchMethods
def for_user(id)
where("pools.creator_id = ?", id)
end
def deleted
where("pools.is_deleted = true")
end
def undeleted
where("pools.is_deleted = false")
end
def series
where("pools.category = ?", "series")
end
def collection
where("pools.category = ?", "collection")
end
def series_first
order(Arel.sql("(case pools.category when 'series' then 0 else 1 end), pools.name"))
end
def selected_first(current_pool_id)
return where("true") if current_pool_id.blank?
current_pool_id = current_pool_id.to_i
reorder(Arel.sql("(case pools.id when #{current_pool_id} then 0 else 1 end), pools.name"))
end
def name_matches(name)
name = normalize_name_for_search(name)
name = "*#{name}*" unless name =~ /\*/
where("lower(pools.name) like ? escape E'\\\\'", name.to_escaped_for_sql_like)
end
def default_order
order(updated_at: :desc)
end
def search(params)
q = super
if params[:name_matches].present?
q = q.name_matches(params[:name_matches])
end
q = q.attribute_matches(:description, params[:description_matches])
if params[:creator_name].present?
q = q.where("pools.creator_id = (select _.id from users _ where lower(_.name) = ?)", params[:creator_name].tr(" ", "_").mb_chars.downcase)
end
if params[:creator_id].present?
q = q.where(creator_id: params[:creator_id].split(",").map(&:to_i))
end
if params[:category] == "series"
q = q.series
elsif params[:category] == "collection"
q = q.collection
end
q = q.attribute_matches(:is_active, params[:is_active])
q = q.attribute_matches(:is_deleted, params[:is_deleted])
params[:order] ||= params.delete(:sort)
case params[:order]
when "name"
q = q.order("pools.name")
when "created_at"
q = q.order("pools.created_at desc")
when "post_count"
q = q.order(Arel.sql("cardinality(post_ids) desc")).default_order
else
q = q.apply_default_order(params)
end
q
end
end
extend SearchMethods
def user_not_create_limited
allowed = creator.can_pool_with_reason
if allowed != true
errors.add(:creator, User.throttle_reason(allowed))
return false
end
true
end
def user_not_limited
allowed = CurrentUser.can_pool_edit_with_reason
if allowed != true
errors.add(:updater, User.throttle_reason(allowed))
return false
end
true
end
def user_not_posts_limited
allowed = CurrentUser.can_pool_post_edit_with_reason
if allowed != true
errors.add(:updater, User.throttle_reason(allowed) + ": updating unique pools posts")
return false
end
true
end
def self.name_to_id(name)
if name =~ /\A\d+\z/
name.to_i
else
select_value_sql("SELECT id FROM pools WHERE lower(name) = ?", name.downcase.tr(" ", "_")).to_i
end
end
def self.normalize_name(name)
name.gsub(/[_[:space:]]+/, "_").gsub(/\A_|_\z/, "")
end
def self.normalize_name_for_search(name)
normalize_name(name).mb_chars.downcase
end
def self.find_by_name(name)
if name =~ /\A\d+\z/
where("pools.id = ?", name.to_i).first
elsif name
where("lower(pools.name) = ?", normalize_name_for_search(name)).first
else
nil
end
end
def versions
PoolArchive.where("pool_id = ?", id).order("id asc")
end
def is_series?
category == "series"
end
def is_collection?
category == "collection"
end
def normalize_name
self.name = Pool.normalize_name(name)
end
def pretty_name
name.tr("_", " ")
end
def pretty_category
category.titleize
end
def normalize_post_ids
self.post_ids = post_ids.uniq if is_collection?
end
def revert_to!(version)
if id != version.pool_id
raise RevertError.new("You cannot revert to a previous version of another pool.")
end
self.post_ids = version.post_ids
self.name = version.name
self.description = version.description
save
end
def contains?(post_id)
post_ids.include?(post_id)
end
def page_number(post_id)
post_ids.find_index(post_id).to_i + 1
end
def deletable_by?(user)
user.is_janitor?
end
def updater_can_edit_deleted
if is_deleted? && !deletable_by?(CurrentUser.user)
errors.add(:base, "You cannot update pools that are deleted")
end
end
def create_mod_action_for_delete
ModAction.log(:pool_delete, {pool_id: id, pool_name: name, user_id: creator_id})
end
def create_mod_action_for_undelete
ModAction.log(:pool_undelete, {pool_id: id, pool_name: name, user_id: creator_id})
end
def validate_number_of_posts
post_ids_before = post_ids_before_last_save || post_ids_was
added = post_ids - post_ids_before
return unless added.size > 0
if post_ids.size > 1_000
errors.add(:base, "Pools can have up to 1,000 posts each")
false
else
true
end
end
def add!(post)
return if post.nil?
return if post.id.nil?
return if contains?(post.id)
return if is_deleted?
with_lock do
reload
self.skip_sync = true
update(post_ids: post_ids + [post.id])
self.skip_sync = false
post.add_pool!(self, true)
post.save
end
end
def add(id)
return if id.nil?
return if contains?(id)
return if is_deleted?
self.post_ids << id
end
def remove!(post)
return unless contains?(post.id)
return unless CurrentUser.user.can_remove_from_pools?
with_lock do
reload
self.skip_sync = true
update(post_ids: post_ids - [post.id])
self.skip_sync = false
post.remove_pool!(self)
post.save
end
end
def posts(options = {})
offset = options[:offset] || 0
limit = options[:limit] || Danbooru.config.posts_per_page
slice = post_ids.slice(offset, limit)
if slice && slice.any?
# This hack is here to work around posts that are not found but present in the pool id list.
# Previously there was an N+1 post lookup loop.
posts = Hash[Post.where(id: slice).map {|p| [p.id, p]}]
slice.map {|id| posts[id]}.compact
else
[]
end
end
def synchronize
return if skip_sync == true
post_ids_before = post_ids_before_last_save || post_ids_was
added = post_ids - post_ids_before
removed = post_ids_before - post_ids
Post.where(id: added).find_each do |post|
post.add_pool!(self, true)
post.save
end
Post.where(id: removed).find_each do |post|
post.remove_pool!(self)
post.save
end
end
def synchronize!
synchronize
save if will_save_change_to_post_ids?
end
def remove_all_posts
with_lock do
transaction do
Post.where(id: post_ids).find_each do |post|
post.remove_pool!(self)
post.save
end
end
end
end
def post_count
post_ids.size
end
def first_post?(post_id)
post_id == post_ids.first
end
def last_post?(post_id)
post_id == post_ids.last
end
# XXX finds wrong post when the pool contains multiple copies of the same post (#2042).
def previous_post_id(post_id)
return nil if first_post?(post_id) || !contains?(post_id)
n = post_ids.index(post_id) - 1
post_ids[n]
end
def next_post_id(post_id)
return nil if last_post?(post_id) || !contains?(post_id)
n = post_ids.index(post_id) + 1
post_ids[n]
end
def cover_post_id
post_ids.first
end
def create_version(updater: CurrentUser.user, updater_ip_addr: CurrentUser.ip_addr)
PoolArchive.queue(self, updater, updater_ip_addr)
end
def last_page
(post_count / CurrentUser.user.per_page.to_f).ceil
end
def method_attributes
super + [:creator_name, :post_count]
end
def category_changeable_by?(user)
user.is_janitor? || (user.is_member? && post_count <= Danbooru.config.pool_category_change_limit)
end
def updater_can_change_category
if category_changed? && !category_changeable_by?(CurrentUser.user)
errors.add(:base, "You cannot change the category of pools with greater than #{Danbooru.config.pool_category_change_limit} posts")
end
end
def validate_name
case name
when /\A(any|none|series|collection)\z/i
errors.add(:name, "cannot be any of the following names: any, none, series, collection")
when /\*/
errors.add(:name, "cannot contain asterisks")
when ""
errors.add(:name, "cannot be blank")
when /\A[0-9]+\z/
errors.add(:name, "cannot contain only digits")
when /,/
errors.add(:name, "cannot contain commas")
when /(__|\-\-| )/
errors.add(:name, "cannot contain consecutive underscores, hyphens or spaces")
end
end
def updater_can_remove_posts
removed = post_ids_was - post_ids
if removed.any? && !CurrentUser.user.can_remove_from_pools?
errors.add(:base, "You cannot removes posts from pools within the first week of sign up")
end
end
end
| 25.182927 | 146 | 0.669249 |
ff86b0e83332a2794a53efae1187de6ff35628db | 148 | module MiqAeMethodService
class MiqAeServiceManageIQ_Providers_Openstack_CloudManager_CloudVolumeBackup < MiqAeServiceCloudVolumeBackup
end
end
| 29.6 | 111 | 0.912162 |
ab432486909db991a5703645057b59a6e895f0ff | 5,108 | # encoding: utf-8
require 'ostruct'
require_relative '../../lib/importer/loader'
require_relative '../../lib/importer/source_file'
require_relative '../../lib/importer/exceptions'
require_relative '../doubles/job'
require_relative '../doubles/ogr2ogr'
require_relative '../doubles/georeferencer'
require_relative '../../spec/doubles/importer_stats'
require_relative '../../../../spec/rspec_configuration.rb'
describe CartoDB::Importer2::Loader do
before do
CartoDB::Stats::Aggregator.stubs(:read_config).returns({})
resultset = OpenStruct.new(:first => {:num_rows => 10})
db = Object.new
db.stubs(:fetch).returns(resultset)
@job = CartoDB::Importer2::Doubles::Job.new(db)
@source_file = CartoDB::Importer2::SourceFile.new('/var/tmp/foo')
@ogr2ogr = CartoDB::Importer2::Doubles::Ogr2ogr.new
@georeferencer = CartoDB::Importer2::Doubles::Georeferencer.new
@loader = CartoDB::Importer2::Loader.new(@job, @source_file, layer=nil, @ogr2ogr, @georeferencer)
end
before(:each) do
CartoDB::Stats::Aggregator.stubs(:read_config).returns({})
end
describe '#run' do
it 'logs the database connection options used' do
@loader.run
(@job.logger.to_s. =~ /#{@job.pg_options.keys.first}/).should_not be nil
end
it 'runs the ogr2ogr command to load the file' do
ogr2ogr_mock = mock
ogr2ogr_mock.stubs(:generic_error?).returns(false).twice
ogr2ogr_mock.stubs(:command).returns('')
ogr2ogr_mock.stubs(:command_output).returns('')
ogr2ogr_mock.stubs(:encoding_error?).returns(false)
ogr2ogr_mock.stubs(:invalid_dates?).returns(false)
ogr2ogr_mock.stubs(:duplicate_column?).returns(false)
ogr2ogr_mock.stubs(:invalid_geojson?).returns(false)
ogr2ogr_mock.stubs(:too_many_columns?).returns(false)
ogr2ogr_mock.stubs(:unsupported_format?).returns(false)
ogr2ogr_mock.stubs(:file_too_big?).returns(false)
ogr2ogr_mock.stubs(:statement_timeout?).returns(false)
ogr2ogr_mock.stubs(:duplicate_column?).returns(false)
ogr2ogr_mock.stubs(:segfault_error?).returns(false)
ogr2ogr_mock.stubs(:kml_style_missing?).returns(false)
ogr2ogr_mock.stubs(:exit_code).returns(0)
ogr2ogr_mock.stubs(:run).returns(Object.new).at_least_once
loader = CartoDB::Importer2::Loader.new(@job, @source_file, layer=nil, ogr2ogr_mock, @georeferencer)
loader.run
end
it 'logs the exit code from ogr2ogr' do
@loader.run
(@job.logger.to_s =~ /ogr2ogr exit code:\s+\d+/).should_not be nil
end
it 'logs any output from ogr2ogr' do
@loader.run
(@job.logger.to_s =~ /ogr2ogr output: \w*/).should_not be nil
end
it 'encoding problem importing but return 0, should try fallback and then raise an error' do
resultset = OpenStruct.new(:first => {:num_rows => 0})
db = Object.new
db.stubs(:fetch).returns(resultset)
@job = CartoDB::Importer2::Doubles::Job.new(db)
# Enter fallback
@ogr2ogr.stubs(:generic_error?).returns(true)
# Fails after fallback
@ogr2ogr.stubs(:encoding_error?).returns(true)
@ogr2ogr.stubs(:exit_code).returns(0)
loader = CartoDB::Importer2::Loader.new(@job, @source_file, layer=nil, @ogr2ogr, @georeferencer)
loader.expects(:try_fallback).once
expect { loader.run }.to raise_error(CartoDB::Importer2::RowsEncodingColumnError)
end
end
describe '#ogr2ogr' do
it 'returns the passed ogr2ogr instance' do
ogr2ogr = Object.new
loader = CartoDB::Importer2::Loader.new(@job, @source_file, layer=nil, ogr2ogr, @georeferencer)
loader.ogr2ogr.should eq ogr2ogr
end
it 'initializes an ogr2ogr command wrapper if none passed' do
loader = CartoDB::Importer2::Loader.new(@job, @source_file)
loader.ogr2ogr.class.name.should eq 'CartoDB::Importer2::Ogr2ogr'
end
end
describe 'stats logger' do
before do
resultset = OpenStruct.new(:first => {:num_rows => 10})
db = Object.new
db.stubs(:fetch).returns(resultset)
@job = CartoDB::Importer2::Doubles::Job.new(db)
@source_file = CartoDB::Importer2::SourceFile.new('/var/tmp/foo')
@ogr2ogr = CartoDB::Importer2::Doubles::Ogr2ogr.new
@georeferencer = CartoDB::Importer2::Doubles::Georeferencer.new
@loader = CartoDB::Importer2::Loader.new(@job, @source_file, layer=nil, @ogr2ogr, @georeferencer)
@importer_stats_spy = CartoDB::Doubles::Stats::Importer.instance
end
it 'logs stats' do
loader = CartoDB::Importer2::Loader.new(@job, @source_file, layer=nil, @ogr2ogr, @georeferencer)
loader.set_importer_stats(@importer_stats_spy)
loader.run
@importer_stats_spy.timed_block_suffix_count('loader').should eq 1
@importer_stats_spy.timed_block_suffix_count('loader.normalize').should eq 1
@importer_stats_spy.timed_block_suffix_count('loader.ogr2ogr').should eq 1
@importer_stats_spy.timed_block_suffix_count('loader.post_ogr2ogr_tasks').should eq 1
end
end
end
| 39.596899 | 111 | 0.690681 |
03a1e2a4b851f2bba0ee5022566205034ae63327 | 916 | #
# Copyright:: Copyright (c) 2018 Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
require "chef_apply/version"
RSpec.describe ChefApply::VERSION do
subject(:version) do
ChefApply::VERSION
end
context "VERSION" do
it "returns the version" do
expect(Gem::Version.correct?(version)).to be_truthy
end
end
end
| 28.625 | 74 | 0.740175 |
5d66a8db2858d9bbfe1ffe836e27881fcc2b8718 | 115 | module Log
module_function
def log(msg, indent = 3)
puts "- #{Time.now} #{'-' * indent}> #{msg}"
end
end | 16.428571 | 48 | 0.573913 |
7af333d623ffa512defb1c45e1b21bd4b688179c | 570 | cask :v1 => 'geppetto' do
version '4.2.0'
if Hardware::CPU.is_32_bit?
sha256 '78f578ff4cf0a9eadf85cc5a821e55125ee98ab4a8e1d4f0f5d1607487314804'
url "https://downloads.puppetlabs.com/geppetto/4.x/geppetto-macosx.cocoa.x86-#{version}-R201407250959.zip"
else
sha256 '7a09c823cea9900cb51d009f47fab69569e1d8115c6326f3e91db62714480d69'
url "https://downloads.puppetlabs.com/geppetto/4.x/geppetto-macosx.cocoa.x86_64-#{version}-R201407250959.zip"
end
homepage 'http://puppetlabs.github.io/geppetto/'
license :oss
app 'geppetto/Geppetto.app'
end
| 35.625 | 113 | 0.773684 |
7a6a75c3185397f8e0f85ae14a848c2fb0a3b30d | 2,183 | # typed: false
# frozen_string_literal: true
require "macho"
require "os/mac/architecture_list"
# {Pathname} extension for dealing with Mach-O files.
#
# @api private
module MachOShim
extend Forwardable
delegate [:dylib_id, :rpaths, :delete_rpath] => :macho
def macho
@macho ||= begin
MachO.open(to_s)
end
end
private :macho
def mach_data
@mach_data ||= begin
machos = []
mach_data = []
if MachO::Utils.fat_magic?(macho.magic)
machos = macho.machos
else
machos << macho
end
machos.each do |m|
arch = case m.cputype
when :x86_64, :i386, :ppc64 then m.cputype
when :ppc then :ppc7400
else :dunno
end
type = case m.filetype
when :dylib, :bundle then m.filetype
when :execute then :executable
else :dunno
end
mach_data << { arch: arch, type: type }
end
mach_data
rescue MachO::NotAMachOError
# Silently ignore errors that indicate the file is not a Mach-O binary ...
[]
rescue
# ... but complain about other (parse) errors for further investigation.
onoe "Failed to read Mach-O binary: #{self}"
raise if Homebrew::EnvConfig.developer?
[]
end
end
private :mach_data
def dynamically_linked_libraries(except: :none)
lcs = macho.dylib_load_commands.reject { |lc| lc.type == except }
lcs.map(&:name).map(&:to_s).uniq
end
def archs
mach_data.map { |m| m.fetch :arch }.extend(ArchitectureListExtension)
end
def arch
case archs.length
when 0 then :dunno
when 1 then archs.first
else :universal
end
end
def universal?
arch == :universal
end
def i386?
arch == :i386
end
def x86_64?
arch == :x86_64
end
def ppc7400?
arch == :ppc7400
end
def ppc64?
arch == :ppc64
end
def dylib?
mach_data.any? { |m| m.fetch(:type) == :dylib }
end
def mach_o_executable?
mach_data.any? { |m| m.fetch(:type) == :executable }
end
alias binary_executable? mach_o_executable?
def mach_o_bundle?
mach_data.any? { |m| m.fetch(:type) == :bundle }
end
end
| 18.982609 | 80 | 0.613376 |
1c881f854c38480d2d7d8324dfd64487265feeaa | 1,278 | module AbAdmin
module Models
class TypeModel
include ::EnumField::DefineEnum
attr_reader :code
class_attribute :codes, :i18n_scope, instance_writer: false
self.codes = []
self.i18n_scope = [:admin, :type_model]
def initialize(code)
@code = code.to_sym
end
class << self
def define_enum_by_codes
define_enum do |builder|
codes.each do |kind|
builder.member kind, object: new(kind.to_s)
end
end
define_question_methods
end
def define_question_methods
codes.each do |code_check|
define_method "#{code_check}?" do
self.code == code_check
end
end
end
def legal?(value)
ActiveSupport::Deprecation.warn('legal? is deprecated, use valid? instead')
valid?(value)
end
def valid?(c_id)
all.map(&:id).include?(c_id.to_i)
end
def valid_code?(code)
return unless code
codes.include?(code.to_sym)
end
end
def title
I18n.t!(@code, scope: i18n_scope)
rescue I18n::MissingTranslationData
@code.to_s.humanize
end
end
end
end | 22.821429 | 85 | 0.556338 |
5dc4085d333b2840ff745e7fc19e678912055ae6 | 2,020 | require 'spec_helper'
describe Puppet::Type.type(:iis_site).provider(:webadministration) do
subject(:webadministration) { described_class.new }
let(:resource) do
result = Puppet::Type.type(:iis_site).new(name: 'iis_site')
result.provider = webadministration
result
end
context 'verify provider' do
it { is_expected.to be_an_instance_of Puppet::Type::Iis_site::ProviderWebadministration }
it { is_expected.to respond_to(:create) }
it { is_expected.to respond_to(:exists?) }
it { is_expected.to respond_to(:destroy) }
it { is_expected.to respond_to(:start) }
it { is_expected.to respond_to(:stop) }
context 'verify ssl? function' do
it { is_expected.to respond_to(:ssl?) }
it 'returns true protocol == https' do
resource[:bindings] = {
'protocol' => 'https',
'bindinginformation' => '*:443:',
'sslflags' => 0,
'certificatehash' => 'D69B5C3315FF0DA09AF640784622CF20DC51F03E',
'certificatestorename' => 'My',
}
expect(webadministration.ssl?).to be true
end
it 'returns true bindings is an array' do
resource[:bindings] = [{
'protocol' => 'https',
'bindinginformation' => '*:443:',
'sslflags' => 0,
'certificatehash' => 'D69B5C3315FF0DA09AF640784622CF20DC51F03E',
'certificatestorename' => 'My',
},
{
'protocol' => 'http',
'bindinginformation' => '*:8080:',
}]
expect(webadministration.ssl?).to be true
end
it 'returns false if no https bindings are specified' do
resource[:bindings] = {
'protocol' => 'http',
'bindinginformation' => '*:8080:',
}
expect(webadministration.ssl?).to be false
end
end
end
end
| 34.237288 | 93 | 0.544554 |
edfd66f0c1dc40749e585fd4eaf4d3395f8980f2 | 86 | # frozen_string_literal: true
require './server'
$stdout.sync = true
run Server.app | 12.285714 | 29 | 0.744186 |
8779577258b3b39d56d8dbc1ae26c78c0f9c209d | 2,317 | # LDAP authorization model
#
# * Check if we are allowed access (not blocked)
#
module Gitlab
module LDAP
class Access
attr_reader :provider, :user
def self.open(user, &block)
Gitlab::LDAP::Adapter.open(user.ldap_identity.provider) do |adapter|
block.call(self.new(user, adapter))
end
end
def self.allowed?(user)
self.open(user) do |access|
if access.allowed?
Users::UpdateService.new(user, last_credential_check_a: Time.now).execute
true
else
false
end
end
end
def initialize(user, adapter = nil)
@adapter = adapter
@user = user
@provider = user.ldap_identity.provider
end
def allowed?
if ldap_user
unless ldap_config.active_directory
unblock_user(user, 'is available again') if user.ldap_blocked?
return true
end
# Block user in GitLab if he/she was blocked in AD
if Gitlab::LDAP::Person.disabled_via_active_directory?(user.ldap_identity.extern_uid, adapter)
block_user(user, 'is disabled in Active Directory')
false
else
unblock_user(user, 'is not disabled anymore') if user.ldap_blocked?
true
end
else
# Block the user if they no longer exist in LDAP/AD
block_user(user, 'does not exist anymore')
false
end
end
def adapter
@adapter ||= Gitlab::LDAP::Adapter.new(provider)
end
def ldap_config
Gitlab::LDAP::Config.new(provider)
end
def ldap_user
@ldap_user ||= Gitlab::LDAP::Person.find_by_dn(user.ldap_identity.extern_uid, adapter)
end
def block_user(user, reason)
user.ldap_block
Gitlab::AppLogger.info(
"LDAP account \"#{user.ldap_identity.extern_uid}\" #{reason}, " \
"blocking Gitlab user \"#{user.name}\" (#{user.email})"
)
end
def unblock_user(user, reason)
user.activate
Gitlab::AppLogger.info(
"LDAP account \"#{user.ldap_identity.extern_uid}\" #{reason}, " \
"unblocking Gitlab user \"#{user.name}\" (#{user.email})"
)
end
end
end
end
| 26.329545 | 104 | 0.578334 |
38c36cc38712a9d5400f0035e4eaa7408ea8b234 | 769 | module Museum
class Loupe < ::ApplicationRecord
extend ::FriendlyId
friendly_id :title, use: :slugged
validates_presence_of :title
def self.id_by_slug(name)
( find_by_slug(name) || abort(name) ).id
end
def magnify(gem)
parse_as(data_format, response(gem))
end
def url(gem)
uri_template % { :name => gem.name, :user => Museum.configuration.username }
end
def response(gem)
url(gem).fetch
end
def parse_as(format, string)
begin
case format
when 'json'
JSON.parse string.to_s
when 'yaml'
YAML.parse string.to_s
when 'xml'
Hash.from_xml(string.to_s)
end
rescue
{}
end
end
end
end | 19.717949 | 82 | 0.572172 |
39cbd01aa46a030cba06767a253fcc9bb5d130ce | 193 | class AddStatusIndexToSubmissions < ActiveRecord::Migration[5.0]
def change
add_index :submissions, [:exercise_id, :user_id, :status, :created_at], :name => 'ex_us_st_cr_index'
end
end
| 32.166667 | 104 | 0.751295 |
6a63b894875a8e0c0b6b95620c6d971e3b9e569d | 5,049 | require 'spec_helper_integration'
feature 'Authorization Code Flow' do
background do
config_is_set(:authenticate_resource_owner) { User.first || redirect_to('/sign_in') }
client_exists
create_resource_owner
sign_in
end
scenario 'resource owner authorizes the client' do
visit authorization_endpoint_url(client: @client)
click_on 'Authorize'
access_grant_should_exist_for(@client, @resource_owner)
i_should_be_on_client_callback(@client)
url_should_have_param('code', Doorkeeper::AccessGrant.first.token)
url_should_not_have_param('state')
url_should_not_have_param('error')
end
scenario 'resource owner authorizes using test url' do
@client.redirect_uri = Doorkeeper.configuration.native_redirect_uri
@client.save!
visit authorization_endpoint_url(client: @client)
click_on 'Authorize'
access_grant_should_exist_for(@client, @resource_owner)
url_should_have_param('code', Doorkeeper::AccessGrant.first.token)
i_should_see 'Authorization code:'
i_should_see Doorkeeper::AccessGrant.first.token
end
scenario 'resource owner authorizes the client with state parameter set' do
visit authorization_endpoint_url(client: @client, state: 'return-me')
click_on 'Authorize'
url_should_have_param('code', Doorkeeper::AccessGrant.first.token)
url_should_have_param('state', 'return-me')
end
scenario 'resource owner requests an access token with authorization code' do
visit authorization_endpoint_url(client: @client)
click_on 'Authorize'
authorization_code = Doorkeeper::AccessGrant.first.token
create_access_token authorization_code, @client
access_token_should_exist_for(@client, @resource_owner)
should_not_have_json 'error'
should_have_json 'access_token', Doorkeeper::AccessToken.first.token
should_have_json 'token_type', 'bearer'
should_have_json_within 'expires_in', Doorkeeper::AccessToken.first.expires_in, 1
end
context 'with scopes' do
background do
default_scopes_exist :public
optional_scopes_exist :write
end
scenario 'resource owner authorizes the client with default scopes' do
visit authorization_endpoint_url(client: @client)
click_on 'Authorize'
access_grant_should_exist_for(@client, @resource_owner)
access_grant_should_have_scopes :public
end
scenario 'resource owner authorizes the client with required scopes' do
visit authorization_endpoint_url(client: @client, scope: 'public write')
click_on 'Authorize'
access_grant_should_have_scopes :public, :write
end
scenario 'resource owner authorizes the client with required scopes (without defaults)' do
visit authorization_endpoint_url(client: @client, scope: 'write')
click_on 'Authorize'
access_grant_should_have_scopes :write
end
scenario 'new access token matches required scopes' do
visit authorization_endpoint_url(client: @client, scope: 'public write')
click_on 'Authorize'
authorization_code = Doorkeeper::AccessGrant.first.token
create_access_token authorization_code, @client
access_token_should_exist_for(@client, @resource_owner)
access_token_should_have_scopes :public, :write
end
scenario 'returns new token if scopes have changed' do
client_is_authorized(@client, @resource_owner, scopes: 'public write')
visit authorization_endpoint_url(client: @client, scope: 'public')
click_on 'Authorize'
authorization_code = Doorkeeper::AccessGrant.first.token
create_access_token authorization_code, @client
expect(Doorkeeper::AccessToken.count).to be(2)
should_have_json 'access_token', Doorkeeper::AccessToken.last.token
end
scenario 'resource owner authorizes the client with extra scopes' do
client_is_authorized(@client, @resource_owner, scopes: 'public')
visit authorization_endpoint_url(client: @client, scope: 'public write')
click_on 'Authorize'
authorization_code = Doorkeeper::AccessGrant.first.token
create_access_token authorization_code, @client
expect(Doorkeeper::AccessToken.count).to be(2)
should_have_json 'access_token', Doorkeeper::AccessToken.last.token
access_token_should_have_scopes :public, :write
end
end
end
describe 'Authorization Code Flow' do
before do
Doorkeeper.configure do
orm DOORKEEPER_ORM
use_refresh_token
end
client_exists
end
context 'issuing a refresh token' do
before do
authorization_code_exists application: @client
end
it 'second of simultaneous client requests get an error for revoked acccess token' do
authorization_code = Doorkeeper::AccessGrant.first.token
allow_any_instance_of(Doorkeeper::AccessGrant).to receive(:revoked?).and_return(false, true)
post token_endpoint_url(code: authorization_code, client: @client)
should_not_have_json 'access_token'
should_have_json 'error', 'invalid_grant'
end
end
end
| 33.66 | 98 | 0.752822 |
385d943680ff73897a3066616d08e6ff93dc426e | 3,233 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe 'v1/roles', type: :request do
let!(:role1) { create(:role) }
let!(:role2) { create(:role) }
describe '#index' do
before { get '/api/v1/roles', params: params }
context 'with no options' do
let(:params) { {} }
it 'lists roles' do
expect(json_ids(true)).to eq([role1.id, role2.id])
assert_payload(:role, role1, json_items[0])
end
end
context 'when sideloading events' do
let(:params) { { include: 'event' } }
let(:event1) { role1.event }
let(:event2) { role2.event }
it 'returns relevant events in response' do
json_events = json_includes('events')
expect(json_events.length).to eq(2)
assert_payload(:event, event1, json_events[0])
assert_payload(:event, event2, json_events[1])
end
end
context 'when sideloading subjects' do
let(:subject1) { role1.subject }
let(:subject2) { role2.subject }
let(:params) { { include: 'subject' } }
it 'returns relevant subjects in response' do
json_subjects = json_includes('subjects')
expect(json_subjects.length).to eq(2)
assert_payload(:subject, subject1, json_subjects[0])
assert_payload(:subject, subject2, json_subjects[1])
end
end
context 'when sideloading role types' do
let(:role_type1) { role1.role_type }
let(:role_type2) { role2.role_type }
let(:params) { { include: 'role_type' } }
it 'returns relevant roles in response' do
json_role_types = json_includes('role_types')
expect(json_role_types.length).to eq(2)
assert_payload(:role_type, role_type1, json_role_types[0])
assert_payload(:role_type, role_type2, json_role_types[1])
end
end
end
describe '#show' do
it 'returns relevant role' do
get "/api/v1/roles/#{role1.id}"
assert_payload(:role, role1, json_item)
end
context 'when sideloading events' do
let(:event1) { role1.event }
it 'returns relevant events in response' do
get "/api/v1/roles/#{role1.id}", params: {
include: 'event'
}
json_events = json_includes('events')
expect(json_events.length).to eq(1)
assert_payload(:event, event1, json_events[0])
end
end
context 'when sideloading subjects' do
let(:subject1) { role1.subject }
it 'returns relevant subjects in response' do
get "/api/v1/roles/#{role1.id}", params: {
include: 'subject'
}
json_subjects = json_includes('subjects')
expect(json_subjects.length).to eq(1)
assert_payload(:subject, subject1, json_subjects[0])
end
end
context 'when sideloading role types' do
let(:role_type1) { role1.role_type }
let(:role_type2) { role1.role_type }
it 'returns relevant roles in response' do
get "/api/v1/roles/#{role1.id}", params: {
include: 'role_type'
}
json_role_types = json_includes('role_types')
expect(json_role_types.length).to eq(1)
assert_payload(:role_type, role_type1, json_role_types[0])
end
end
end
end
| 29.935185 | 66 | 0.622642 |
5df9f3a3ee6dd14c6778d0c62e2c226cb7ead739 | 287 | class RemoveExtendingOrganisationFromActivities < ActiveRecord::Migration[6.0]
def change
remove_column :activities, :extending_organisation_name
remove_column :activities, :extending_organisation_reference
remove_column :activities, :extending_organisation_type
end
end
| 35.875 | 78 | 0.832753 |
26ba649a512211d09c832b91d776d56b80f03ad2 | 475 | Pod::Spec.new do |s|
s.name = "react-native-beacons-manager"
s.version = "1.1.0"
s.summary = "React-Native library for detecting beacons (iOS and Android)"
s.homepage = "https://github.com/MacKentoch/react-native-beacons-manager#readme"
s.license = { :type => "MIT" }
s.authors = { "" => "" }
s.platform = :ios, "8.0"
s.source = { :path => "." }
s.source_files = "ios", "ios/**/*.{h,m}"
s.dependency 'React'
end
| 33.928571 | 86 | 0.555789 |
ab273908e7d0bc0e894cbb8f9c3667efcf00c5af | 376 | require 'rails_helper'
RSpec.describe Like, type: :model do
subject { Like.new(author_id: 7, post_id: 7) }
before { subject.save }
it 'checks that "author_id" is an integer' do
subject.author_id = 7.5
expect(subject).to_not be_valid
end
it 'checks that "post_id" is an integer' do
subject.author_id = 7.5
expect(subject).to_not be_valid
end
end
| 22.117647 | 48 | 0.694149 |
1124413b5fd42dd65524918f327d77fbdcc34176 | 4,434 | # frozen_string_literal: true
require 'pathname'
require './lib/auxiliary/downloader'
require './lib/auxiliary/json_helper'
require './lib/auxiliary/string_helper'
require './lib/auxiliary/extra_software/exceptions'
require './lib/engines/hckinstall/setup_scripts_helper'
# AutoHCK module
module AutoHCK
# ExtraSoftwareManager class
class ExtraSoftwareManager
include Helper
ALLOWED_INSTALL_TIME_VALUE = %w[before after].freeze
def initialize(project)
@logger = project.logger
@ext_path = project.config['extra_software']
@sw_names = []
@sw_configs = {}
end
def download_software(name, config)
path = Pathname.new(@ext_path).join(name).join(config['file_name'])
if File.exist?(path)
@logger.info("#{config['file_name']} already exist, download skipped")
return
end
dw = Downloader.new(@logger)
dw.download(config['download_url'],
Pathname.new(@ext_path).join(name).join(config['file_name']))
end
def read_config(name, kit)
paths = [
Pathname.new(@ext_path).join(name).join("#{kit.downcase}-config.json"),
Pathname.new(@ext_path).join(name).join('config.json')
]
paths.each do |path|
return Json.read_json(path, @logger) if File.exist?(path)
end
raise(ExtraSoftwareMissingConfig,
"Failed to find any config files: #{paths.join}")
end
def validate_software(name, config)
unless ALLOWED_INSTALL_TIME_VALUE.include?(config['install_time']['kit']) &&
ALLOWED_INSTALL_TIME_VALUE.include?(config['install_time']['driver'])
raise(ExtraSoftwareBrokenConfig,
"#{name}: unknown install time value")
end
if config['install_time']['kit'] == 'before' &&
config['install_time']['driver'] == 'after'
raise(ExtraSoftwareBrokenConfig,
"#{name}: kit install time is before, but the driver - after")
end
end
def check_install_needed(name, config, engine_mode)
if engine_mode == 'install'
if config['install_time']['driver'] == 'after'
@logger.warn("SW #{name}: Skip installation in install mode, because any driver will not be installed")
return false
end
elsif config['install_time']['kit'] == 'before'
@logger.warn("SW #{name}: Skip installation in test mode, because HLK kit already installed")
return false
end
true
end
def prepare_software_packages(sw_names, kit, engine_mode)
sw_names.each do |name|
next if @sw_names.include?(name)
config = read_config(name, kit)
validate_software(name, config)
next unless check_install_needed(name, config, engine_mode)
@sw_names += [name]
@sw_configs[name] = config
download_software(name, config)
end
end
def copy_to_setup_scripts(setup_scripts_path)
copy_extra_software(setup_scripts_path, @ext_path, @sw_names)
end
def install_software_on_computer(sw_name, sw_config, tools, machine_name)
@logger.info("Installing #{sw_name} on #{machine_name}")
path = tools.upload_to_machine(machine_name, Pathname.new(@ext_path).join(sw_name))
path = path.tr('/', '\\')
replacement_list = {
'@sw_path@' => path,
'@file_name@' => sw_config['file_name'],
'@temp@' => '${env:TEMP}'
}
cmd = "#{sw_config['install_cmd']} #{sw_config['install_args']}"
full_cmd = replace_string(cmd, replacement_list)
@logger.debug("cmd #{machine_name}:\n - path = #{path}\n - cmd = #{cmd}\n - full_cmd = #{full_cmd}\n")
tools.run_on_machine(machine_name, "Installing #{sw_name}", full_cmd)
end
def install_software_before_driver(tools, machine_name)
@sw_names.each do |name|
sw_config = @sw_configs[name]
if sw_config['install_time']['driver'] == 'before'
install_software_on_computer(name, sw_config, tools,
machine_name)
end
end
end
def install_software_after_driver(tools, machine_name)
@sw_names.each do |name|
sw_config = @sw_configs[name]
if sw_config['install_time']['driver'] == 'after'
install_software_on_computer(name, sw_config, tools,
machine_name)
end
end
end
end
end
| 32.130435 | 113 | 0.633514 |
e2c495475467d92341f8724a205eab2272cebe34 | 1,711 | require 'spec_helper'
describe Tugboat::Middleware::SSHDroplet do
include_context "spec"
before do
allow(Kernel).to receive(:exec)
end
describe ".call" do
it "exec ssh with correct options" do
expect(Kernel).to receive(:exec).with("ssh",
"-o", "IdentitiesOnly=yes",
"-o", "LogLevel=ERROR",
"-o", "StrictHostKeyChecking=no",
"-o", "UserKnownHostsFile=/dev/null",
"-i", ssh_key_path,
"-p", ssh_port,
"#{ssh_user}@#{droplet_ip_private}")
env["droplet_ip"] = droplet_ip
env["droplet_ip_private"] = droplet_ip_private
env["config"] = config
described_class.new(app).call(env)
end
it "executes ssh with custom options" do
expect(Kernel).to receive(:exec).with("ssh",
"-o", "IdentitiesOnly=yes",
"-o", "LogLevel=ERROR",
"-o", "StrictHostKeyChecking=no",
"-o", "UserKnownHostsFile=/dev/null",
"-i", ssh_key_path,
"-p", ssh_port,
"-e",
"-q",
"-X",
"#{ssh_user}@#{droplet_ip}",
"echo hello")
env["droplet_ip"] = droplet_ip
env["droplet_ip_private"] = droplet_ip_private
env["config"] = config
env["user_droplet_ssh_command"] = "echo hello"
env["user_droplet_use_public_ip"] = true
env["user_droplet_ssh_opts"] = "-e -q -X"
described_class.new(app).call(env)
end
end
end
| 30.553571 | 61 | 0.485096 |
281aee99ec0ff728bba32ee6d9526acaf08bb3d8 | 616 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "google/apis/firestore_v1beta2"
| 38.5 | 74 | 0.766234 |
798f3fc86e1357be23259e34d3b5964b5fa5de0d | 4,027 | default['android-sdk']['name'] = 'android-sdk'
default['android-sdk']['owner'] = 'root'
default['android-sdk']['group'] = 'root'
default['android-sdk']['setup_root'] = nil # ark defaults (/usr/local) is used if this attribute is not defined
default['android-sdk']['with_symlink'] = true # use ark's :install action when true; use ark's :put action when false
default['android-sdk']['set_environment_variables'] = true
default['android-sdk']['version'] = '24.4'
default['android-sdk']['checksum'] = 'f2bb546534d16e2004665257ee530060338c684adad14a49cd4bbde08098d8a4'
default['android-sdk']['download_url'] = "http://dl.google.com/android/android-sdk_r#{node['android-sdk']['version']}-linux.tgz"
#
# List of Android SDK components to preinstall:
# Selection based on
# - Platform usage statistics (see http://developer.android.com/about/dashboards/index.html)
# - Build Tools releases: http://developer.android.com/tools/revisions/build-tools.html
#
# Hint:
# Add 'tools' to the list below if you wish to get the latest version,
# without having to adapt 'version' and 'checksum' attributes of this cookbook.
# Note that it will require (waste) some extra download effort.
#
default['android-sdk']['components'] = %w( platform-tools
build-tools-23.0.1
android-23
sys-img-armeabi-v7a-android-23
sys-img-armeabi-v7a-android-tv-23
android-22
sys-img-armeabi-v7a-android-22
android-21
sys-img-armeabi-v7a-android-21
android-20
sys-img-armeabi-v7a-android-wear-20
android-19
sys-img-armeabi-v7a-android-19
android-18
sys-img-armeabi-v7a-android-18
android-17
sys-img-armeabi-v7a-android-17
android-16
sys-img-armeabi-v7a-android-16
android-15
sys-img-armeabi-v7a-android-15
android-10
extra-android-support
extra-google-google_play_services
extra-google-m2repository
extra-android-m2repository )
default['android-sdk']['license']['white_list'] = %w(.+)
default['android-sdk']['license']['black_list'] = [] # e.g. ['intel-.+', 'mips-.+', 'android-wear-sdk-license-.+']
default['android-sdk']['license']['default_answer'] = 'n' # 'y' or 'n' ('yes' or 'no')
default['android-sdk']['scripts']['path'] = '/usr/local/bin'
default['android-sdk']['scripts']['owner'] = node['android-sdk']['owner']
default['android-sdk']['scripts']['group'] = node['android-sdk']['group']
default['android-sdk']['java_from_system'] = false
default['android-sdk']['maven-rescue'] = false
| 66.016393 | 141 | 0.427862 |
ff0e8296f29cf7aa6ad43719a9d6bdda2a913d57 | 458 | require 'test/unit'
require File.dirname(__FILE__) + '/../lib/die'
require 'shoulda'
class TestDie < Test::Unit::TestCase
context 'Die.roll' do
setup do
seed = 1234567890
srand(seed)
@expected_rand = rand
srand(seed)
end
should 'return a value between 1 and the number of sides' do
sides = 1000
expected = (@expected_rand * sides + 1).to_i
assert_equal expected, Die.roll(sides)
end
end
end
| 19.913043 | 64 | 0.637555 |
e2758f9e100acd89770bfaa38cb6c663a7b659f0 | 741 | module Tori
class Controls
CONTROLS = {
'a' => :left,
'd' => :right
}
def initialize(window, player)
@window = window
@player = player
end
def button_down(key)
case CONTROLS[key]
when :left, :right then @player.walking!
end
end
def button_up(key)
@player.idle! unless CONTROLS.keys.map { |control| @window.button_down?(control) }.any?
end
def update
case maching_action
when :left then @player.move_left
when :right then @player.move_right
end
end
private
def maching_action
CONTROLS.each do |key, action|
if @window.button_down? key
return action
end
end
end
end
end
| 17.642857 | 93 | 0.580297 |
e247ebdcd4c872979c72bbd60aa19a7615efcdc0 | 944 | class DropContactsTriggers < ActiveRecord::Migration[5.0]
def up
if respond_to?(:drop_trigger)
drop_trigger("not_ready_contacts_before_insert_update_row_tr", "contacts", :generated => true)
drop_trigger("contacts_before_insert_update_row_tr", "contacts", :generated => true)
end
end
def down
if respond_to?(:create_trigger)
create_trigger("not_ready_contacts_before_insert_update_row_tr", :generated => true, :compatibility => 1).
on("contacts").
before(:insert, :update) do
"new.not_ready_content_tsearch := to_tsvector('pg_catalog.simple', coalesce(new.not_ready_content,''));"
end
create_trigger("contacts_before_insert_update_row_tr", :generated => true, :compatibility => 1).
on("contacts").
before(:insert, :update) do
"new.content_tsearch := to_tsvector('pg_catalog.simple', coalesce(new.content,''));"
end
end
end
end
| 37.76 | 112 | 0.6875 |
267eff645a61d6b13fc992144a4babf552da31d2 | 116 | RSpec.describe HerokuTool do
it "has a version number" do
expect(HerokuTool::VERSION).not_to be nil
end
end
| 19.333333 | 45 | 0.741379 |
ed200344439e6b828464551b35e5bdd00907940a | 5,153 | module Steep
module AST
module Annotation
class Collection
attr_reader :annotations
attr_reader :builder
attr_reader :current_module
attr_reader :var_type_annotations
attr_reader :const_type_annotations
attr_reader :ivar_type_annotations
attr_reader :method_type_annotations
attr_reader :block_type_annotation
attr_reader :return_type_annotation
attr_reader :self_type_annotation
attr_reader :instance_type_annotation
attr_reader :module_type_annotation
attr_reader :implement_module_annotation
attr_reader :dynamic_annotations
attr_reader :break_type_annotation
def initialize(annotations:, builder:, current_module:)
@annotations = annotations
@builder = builder
@current_module = current_module
@var_type_annotations = {}
@method_type_annotations = {}
@const_type_annotations = {}
@ivar_type_annotations = {}
@dynamic_annotations = []
annotations.each do |annotation|
case annotation
when VarType
var_type_annotations[annotation.name] = annotation
when MethodType
method_type_annotations[annotation.name] = annotation
when BlockType
@block_type_annotation = annotation
when ReturnType
@return_type_annotation = annotation
when SelfType
@self_type_annotation = annotation
when ConstType
@const_type_annotations[annotation.name] = annotation
when InstanceType
@instance_type_annotation = annotation
when ModuleType
@module_type_annotation = annotation
when Implements
@implement_module_annotation = annotation
when IvarType
@ivar_type_annotations[annotation.name] = annotation
when Dynamic
@dynamic_annotations << annotation
when BreakType
@break_type_annotation = annotation
else
raise "Unexpected annotation: #{annotation.inspect}"
end
end
end
def absolute_type(type)
if type
builder.absolute_type(type, current: current_module)
end
end
def var_type(lvar: nil, ivar: nil, const: nil)
case
when lvar
absolute_type(var_type_annotations[lvar]&.type)
when ivar
absolute_type(ivar_type_annotations[ivar]&.type)
when const
absolute_type(const_type_annotations[const]&.type)
end
end
def method_type(name)
if (a = method_type_annotations[name])
builder.method_type_to_method_type(a.type, current: current_module)
end
end
def block_type
absolute_type(block_type_annotation&.type)
end
def return_type
absolute_type(return_type_annotation&.type)
end
def self_type
absolute_type(self_type_annotation&.type)
end
def instance_type
absolute_type(instance_type_annotation&.type)
end
def module_type
absolute_type(module_type_annotation&.type)
end
def break_type
absolute_type(break_type_annotation&.type)
end
def lvar_types
var_type_annotations.each_key.with_object({}) do |name, hash|
hash[name] = var_type(lvar: name)
end
end
def ivar_types
ivar_type_annotations.each_key.with_object({}) do |name, hash|
hash[name] = var_type(ivar: name)
end
end
def const_types
const_type_annotations.each_key.with_object({}) do |name, hash|
hash[name] = var_type(const: name)
end
end
def instance_dynamics
dynamic_annotations.flat_map do |annot|
annot.names.select(&:instance_method?).map(&:name)
end
end
def module_dynamics
dynamic_annotations.flat_map do |annot|
annot.names.select(&:module_method?).map(&:name)
end
end
def merge_block_annotations(annotations)
if annotations.current_module != current_module || annotations.builder != builder
raise "Cannot merge another annotation: self=#{self}, other=#{annotations}"
end
retained_annotations = self.annotations.reject do |annotation|
annotation.is_a?(BlockType) || annotation.is_a?(BreakType)
end
self.class.new(annotations: retained_annotations + annotations.annotations,
builder: builder,
current_module: current_module)
end
def any?(&block)
annotations.any?(&block)
end
def size
annotations.size
end
def include?(obj)
annotations.include?(obj)
end
end
end
end
end
| 30.134503 | 91 | 0.599069 |
4a4dc0767b02f5bd3840a37083fcb7c6b5714cfd | 1,352 | require 'plain_model/querying/base'
require 'plain_model/querying/with_model'
require_relative 'querying/select'
require_relative 'querying/from'
require_relative 'querying/where'
require_relative 'querying/order_by'
require_relative 'querying/group_by'
require_relative 'querying/having'
require_relative 'querying/limit'
require_relative 'querying/union'
require_relative 'querying/array_join'
require_relative 'querying/page'
require_relative 'querying/scope'
require_relative 'querying/except'
require_relative 'querying/collect'
module ActiveHouse
class QueryBuilder
include PlainModel::Querying::Base
include PlainModel::Querying::WithModel
include ActiveHouse::Querying::Select
include ActiveHouse::Querying::From
include ActiveHouse::Querying::Where
include ActiveHouse::Querying::OrderBy
include ActiveHouse::Querying::GroupBy
include ActiveHouse::Querying::Having
include ActiveHouse::Querying::Limit
include ActiveHouse::Querying::Union
include ActiveHouse::Querying::ArrayJoin
include ActiveHouse::Querying::Scope
include ActiveHouse::Querying::Page
include ActiveHouse::Querying::Except
include ActiveHouse::Querying::Collect
# allows using query without model_class
def initialize(model_class = nil)
super(model_class || ActiveHouse::Model)
end
end
end
| 32.190476 | 46 | 0.79142 |
873b599f84d4a4fc4161a8592df84f5ef48350e1 | 4,780 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Gitlab::Database::LoadBalancing::HostList do
def expect_metrics(hosts)
expect(Gitlab::Metrics.registry.get(:db_load_balancing_hosts).get({})).to eq(hosts)
end
before do
allow(Gitlab::Database)
.to receive(:create_connection_pool)
.and_return(ActiveRecord::Base.connection_pool)
end
let(:load_balancer) { double(:load_balancer) }
let(:host_count) { 2 }
let(:host_list) do
hosts = Array.new(host_count) do
Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer, port: 5432)
end
described_class.new(hosts)
end
describe '#initialize' do
it 'sets metrics for current number of hosts and current index' do
host_list
expect_metrics(2)
end
end
describe '#length' do
it 'returns the number of hosts in the list' do
expect(host_list.length).to eq(2)
end
end
describe '#host_names_and_ports' do
context 'with ports' do
it 'returns the host names of all hosts' do
hosts = [
['localhost', 5432],
['localhost', 5432]
]
expect(host_list.host_names_and_ports).to eq(hosts)
end
end
context 'without ports' do
let(:host_list) do
hosts = Array.new(2) do
Gitlab::Database::LoadBalancing::Host.new('localhost', load_balancer)
end
described_class.new(hosts)
end
it 'returns the host names of all hosts' do
hosts = [
['localhost', nil],
['localhost', nil]
]
expect(host_list.host_names_and_ports).to eq(hosts)
end
end
end
describe '#manage_pool?' do
before do
allow(Gitlab::Database).to receive(:create_connection_pool) { double(:connection) }
end
context 'when the testing pool belongs to one host of the host list' do
it 'returns true' do
pool = host_list.hosts.first.pool
expect(host_list.manage_pool?(pool)).to be(true)
end
end
context 'when the testing pool belongs to a former host of the host list' do
it 'returns false' do
pool = host_list.hosts.first.pool
host_list.hosts = [
Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer)
]
expect(host_list.manage_pool?(pool)).to be(false)
end
end
context 'when the testing pool belongs to a new host of the host list' do
it 'returns true' do
host = Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer)
host_list.hosts = [host]
expect(host_list.manage_pool?(host.pool)).to be(true)
end
end
context 'when the testing pool does not have any relation with the host list' do
it 'returns false' do
host = Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer)
expect(host_list.manage_pool?(host.pool)).to be(false)
end
end
end
describe '#hosts' do
it 'returns a copy of the host' do
first = host_list.hosts
expect(host_list.hosts).to eq(first)
expect(host_list.hosts.object_id).not_to eq(first.object_id)
end
end
describe '#hosts=' do
it 'updates the list of hosts to use' do
host_list.hosts = [
Gitlab::Database::LoadBalancing::Host.new('foo', load_balancer)
]
expect(host_list.length).to eq(1)
expect(host_list.hosts[0].host).to eq('foo')
expect_metrics(1)
end
end
describe '#next' do
it 'returns a host' do
expect(host_list.next)
.to be_an_instance_of(Gitlab::Database::LoadBalancing::Host)
end
it 'cycles through all available hosts' do
expect(host_list.next).to eq(host_list.hosts[0])
expect_metrics(2)
expect(host_list.next).to eq(host_list.hosts[1])
expect_metrics(2)
expect(host_list.next).to eq(host_list.hosts[0])
expect_metrics(2)
end
it 'skips hosts that are offline' do
allow(host_list.hosts[0]).to receive(:online?).and_return(false)
expect(host_list.next).to eq(host_list.hosts[1])
expect_metrics(2)
end
it 'returns nil if no hosts are online' do
host_list.hosts.each do |host|
allow(host).to receive(:online?).and_return(false)
end
expect(host_list.next).to be_nil
expect_metrics(2)
end
it 'returns nil if no hosts are available' do
expect(described_class.new.next).to be_nil
end
end
describe '#shuffle' do
let(:host_count) { 3 }
it 'randomizes the list' do
2.times do
all_hosts = host_list.hosts
host_list.shuffle
expect(host_list.length).to eq(host_count)
expect(host_list.hosts).to contain_exactly(*all_hosts)
end
end
end
end
| 25.291005 | 89 | 0.642887 |
e8a777af2a4c725e33ab43ede4eb31238475806a | 22,180 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Monitoring
module V3
# A single strongly-typed value.
# @!attribute [rw] bool_value
# @return [true, false]
# A Boolean value: `true` or `false`.
# @!attribute [rw] int64_value
# @return [Integer]
# A 64-bit integer. Its range is approximately ±9.2x10<sup>18</sup>.
# @!attribute [rw] double_value
# @return [Float]
# A 64-bit double-precision floating-point number. Its magnitude
# is approximately ±10<sup>±300</sup> and it has 16
# significant digits of precision.
# @!attribute [rw] string_value
# @return [String]
# A variable-length string value.
# @!attribute [rw] distribution_value
# @return [Google::Api::Distribution]
# A distribution value.
class TypedValue; end
# A closed time interval. It extends from the start time to the end time, and includes both: `[startTime, endTime]`. Valid time intervals depend on the [`MetricKind`](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors#MetricKind) of the metric value. In no case can the end time be earlier than the start time.
#
# * For a `GAUGE` metric, the `startTime` value is technically optional; if
# no value is specified, the start time defaults to the value of the
# end time, and the interval represents a single point in time. If both
# start and end times are specified, they must be identical. Such an
# interval is valid only for `GAUGE` metrics, which are point-in-time
# measurements.
#
# * For `DELTA` and `CUMULATIVE` metrics, the start time must be earlier
# than the end time.
#
# * In all cases, the start time of the next interval must be
# at least a microsecond after the end time of the previous interval.
# Because the interval is closed, if the start time of a new interval
# is the same as the end time of the previous interval, data written
# at the new start time could overwrite data written at the previous
# end time.
# @!attribute [rw] end_time
# @return [Google::Protobuf::Timestamp]
# Required. The end of the time interval.
# @!attribute [rw] start_time
# @return [Google::Protobuf::Timestamp]
# Optional. The beginning of the time interval. The default value
# for the start time is the end time. The start time must not be
# later than the end time.
class TimeInterval; end
# Describes how to combine multiple time series to provide different views of
# the data. Aggregation consists of an alignment step on individual time
# series (`alignment_period` and `per_series_aligner`) followed by an optional
# reduction step of the data across the aligned time series
# (`cross_series_reducer` and `group_by_fields`). For more details, see
# [Aggregation](https://cloud.google.com/monitoring/api/learn_more#aggregation).
# @!attribute [rw] alignment_period
# @return [Google::Protobuf::Duration]
# The alignment period for per-{Google::Monitoring::V3::TimeSeries time series}
# alignment. If present, `alignmentPeriod` must be at least 60
# seconds. After per-time series alignment, each time series will
# contain data points only on the period boundaries. If
# `perSeriesAligner` is not specified or equals `ALIGN_NONE`, then
# this field is ignored. If `perSeriesAligner` is specified and
# does not equal `ALIGN_NONE`, then this field must be defined;
# otherwise an error is returned.
# @!attribute [rw] per_series_aligner
# @return [Google::Monitoring::V3::Aggregation::Aligner]
# The approach to be used to align individual time series. Not all
# alignment functions may be applied to all time series, depending
# on the metric type and value type of the original time
# series. Alignment may change the metric type or the value type of
# the time series.
#
# Time series data must be aligned in order to perform cross-time
# series reduction. If `crossSeriesReducer` is specified, then
# `perSeriesAligner` must be specified and not equal `ALIGN_NONE`
# and `alignmentPeriod` must be specified; otherwise, an error is
# returned.
# @!attribute [rw] cross_series_reducer
# @return [Google::Monitoring::V3::Aggregation::Reducer]
# The approach to be used to combine time series. Not all reducer
# functions may be applied to all time series, depending on the
# metric type and the value type of the original time
# series. Reduction may change the metric type of value type of the
# time series.
#
# Time series data must be aligned in order to perform cross-time
# series reduction. If `crossSeriesReducer` is specified, then
# `perSeriesAligner` must be specified and not equal `ALIGN_NONE`
# and `alignmentPeriod` must be specified; otherwise, an error is
# returned.
# @!attribute [rw] group_by_fields
# @return [Array<String>]
# The set of fields to preserve when `crossSeriesReducer` is
# specified. The `groupByFields` determine how the time series are
# partitioned into subsets prior to applying the aggregation
# function. Each subset contains time series that have the same
# value for each of the grouping fields. Each individual time
# series is a member of exactly one subset. The
# `crossSeriesReducer` is applied to each subset of time series.
# It is not possible to reduce across different resource types, so
# this field implicitly contains `resource.type`. Fields not
# specified in `groupByFields` are aggregated away. If
# `groupByFields` is not specified and all the time series have
# the same resource type, then the time series are aggregated into
# a single output time series. If `crossSeriesReducer` is not
# defined, this field is ignored.
class Aggregation
# The Aligner describes how to bring the data points in a single
# time series into temporal alignment.
module Aligner
# No alignment. Raw data is returned. Not valid if cross-time
# series reduction is requested. The value type of the result is
# the same as the value type of the input.
ALIGN_NONE = 0
# Align and convert to delta metric type. This alignment is valid
# for cumulative metrics and delta metrics. Aligning an existing
# delta metric to a delta metric requires that the alignment
# period be increased. The value type of the result is the same
# as the value type of the input.
#
# One can think of this aligner as a rate but without time units; that
# is, the output is conceptually (second_point - first_point).
ALIGN_DELTA = 1
# Align and convert to a rate. This alignment is valid for
# cumulative metrics and delta metrics with numeric values. The output is a
# gauge metric with value type
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
#
# One can think of this aligner as conceptually providing the slope of
# the line that passes through the value at the start and end of the
# window. In other words, this is conceptually ((y1 - y0)/(t1 - t0)),
# and the output unit is one that has a "/time" dimension.
#
# If, by rate, you are looking for percentage change, see the
# `ALIGN_PERCENT_CHANGE` aligner option.
ALIGN_RATE = 2
# Align by interpolating between adjacent points around the
# period boundary. This alignment is valid for gauge
# metrics with numeric values. The value type of the result is the same
# as the value type of the input.
ALIGN_INTERPOLATE = 3
# Align by shifting the oldest data point before the period
# boundary to the boundary. This alignment is valid for gauge
# metrics. The value type of the result is the same as the
# value type of the input.
ALIGN_NEXT_OLDER = 4
# Align time series via aggregation. The resulting data point in
# the alignment period is the minimum of all data points in the
# period. This alignment is valid for gauge and delta metrics with numeric
# values. The value type of the result is the same as the value
# type of the input.
ALIGN_MIN = 10
# Align time series via aggregation. The resulting data point in
# the alignment period is the maximum of all data points in the
# period. This alignment is valid for gauge and delta metrics with numeric
# values. The value type of the result is the same as the value
# type of the input.
ALIGN_MAX = 11
# Align time series via aggregation. The resulting data point in
# the alignment period is the average or arithmetic mean of all
# data points in the period. This alignment is valid for gauge and delta
# metrics with numeric values. The value type of the output is
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
ALIGN_MEAN = 12
# Align time series via aggregation. The resulting data point in
# the alignment period is the count of all data points in the
# period. This alignment is valid for gauge and delta metrics with numeric
# or Boolean values. The value type of the output is
# {Google::Api::MetricDescriptor::ValueType::INT64 INT64}.
ALIGN_COUNT = 13
# Align time series via aggregation. The resulting data point in
# the alignment period is the sum of all data points in the
# period. This alignment is valid for gauge and delta metrics with numeric
# and distribution values. The value type of the output is the
# same as the value type of the input.
ALIGN_SUM = 14
# Align time series via aggregation. The resulting data point in
# the alignment period is the standard deviation of all data
# points in the period. This alignment is valid for gauge and delta metrics
# with numeric values. The value type of the output is
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
ALIGN_STDDEV = 15
# Align time series via aggregation. The resulting data point in
# the alignment period is the count of True-valued data points in the
# period. This alignment is valid for gauge metrics with
# Boolean values. The value type of the output is
# {Google::Api::MetricDescriptor::ValueType::INT64 INT64}.
ALIGN_COUNT_TRUE = 16
# Align time series via aggregation. The resulting data point in
# the alignment period is the count of False-valued data points in the
# period. This alignment is valid for gauge metrics with
# Boolean values. The value type of the output is
# {Google::Api::MetricDescriptor::ValueType::INT64 INT64}.
ALIGN_COUNT_FALSE = 24
# Align time series via aggregation. The resulting data point in
# the alignment period is the fraction of True-valued data points in the
# period. This alignment is valid for gauge metrics with Boolean values.
# The output value is in the range [0, 1] and has value type
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
ALIGN_FRACTION_TRUE = 17
# Align time series via aggregation. The resulting data point in
# the alignment period is the 99th percentile of all data
# points in the period. This alignment is valid for gauge and delta metrics
# with distribution values. The output is a gauge metric with value type
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
ALIGN_PERCENTILE_99 = 18
# Align time series via aggregation. The resulting data point in
# the alignment period is the 95th percentile of all data
# points in the period. This alignment is valid for gauge and delta metrics
# with distribution values. The output is a gauge metric with value type
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
ALIGN_PERCENTILE_95 = 19
# Align time series via aggregation. The resulting data point in
# the alignment period is the 50th percentile of all data
# points in the period. This alignment is valid for gauge and delta metrics
# with distribution values. The output is a gauge metric with value type
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
ALIGN_PERCENTILE_50 = 20
# Align time series via aggregation. The resulting data point in
# the alignment period is the 5th percentile of all data
# points in the period. This alignment is valid for gauge and delta metrics
# with distribution values. The output is a gauge metric with value type
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
ALIGN_PERCENTILE_05 = 21
# Align and convert to a percentage change. This alignment is valid for
# gauge and delta metrics with numeric values. This alignment conceptually
# computes the equivalent of "((current - previous)/previous)*100"
# where previous value is determined based on the alignmentPeriod.
# In the event that previous is 0 the calculated value is infinity with the
# exception that if both (current - previous) and previous are 0 the
# calculated value is 0.
# A 10 minute moving mean is computed at each point of the time window
# prior to the above calculation to smooth the metric and prevent false
# positives from very short lived spikes.
# Only applicable for data that is >= 0. Any values < 0 are treated as
# no data. While delta metrics are accepted by this alignment special care
# should be taken that the values for the metric will always be positive.
# The output is a gauge metric with value type
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
ALIGN_PERCENT_CHANGE = 23
end
# A Reducer describes how to aggregate data points from multiple
# time series into a single time series.
module Reducer
# No cross-time series reduction. The output of the aligner is
# returned.
REDUCE_NONE = 0
# Reduce by computing the mean across time series for each
# alignment period. This reducer is valid for delta and
# gauge metrics with numeric or distribution values. The value type of the
# output is {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
REDUCE_MEAN = 1
# Reduce by computing the minimum across time series for each
# alignment period. This reducer is valid for delta and
# gauge metrics with numeric values. The value type of the output
# is the same as the value type of the input.
REDUCE_MIN = 2
# Reduce by computing the maximum across time series for each
# alignment period. This reducer is valid for delta and
# gauge metrics with numeric values. The value type of the output
# is the same as the value type of the input.
REDUCE_MAX = 3
# Reduce by computing the sum across time series for each
# alignment period. This reducer is valid for delta and
# gauge metrics with numeric and distribution values. The value type of
# the output is the same as the value type of the input.
REDUCE_SUM = 4
# Reduce by computing the standard deviation across time series
# for each alignment period. This reducer is valid for delta
# and gauge metrics with numeric or distribution values. The value type of
# the output is {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
REDUCE_STDDEV = 5
# Reduce by computing the count of data points across time series
# for each alignment period. This reducer is valid for delta
# and gauge metrics of numeric, Boolean, distribution, and string value
# type. The value type of the output is
# {Google::Api::MetricDescriptor::ValueType::INT64 INT64}.
REDUCE_COUNT = 6
# Reduce by computing the count of True-valued data points across time
# series for each alignment period. This reducer is valid for delta
# and gauge metrics of Boolean value type. The value type of
# the output is {Google::Api::MetricDescriptor::ValueType::INT64 INT64}.
REDUCE_COUNT_TRUE = 7
# Reduce by computing the count of False-valued data points across time
# series for each alignment period. This reducer is valid for delta
# and gauge metrics of Boolean value type. The value type of
# the output is {Google::Api::MetricDescriptor::ValueType::INT64 INT64}.
REDUCE_COUNT_FALSE = 15
# Reduce by computing the fraction of True-valued data points across time
# series for each alignment period. This reducer is valid for delta
# and gauge metrics of Boolean value type. The output value is in the
# range [0, 1] and has value type
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}.
REDUCE_FRACTION_TRUE = 8
# Reduce by computing 99th percentile of data points across time series
# for each alignment period. This reducer is valid for gauge and delta
# metrics of numeric and distribution type. The value of the output is
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}
REDUCE_PERCENTILE_99 = 9
# Reduce by computing 95th percentile of data points across time series
# for each alignment period. This reducer is valid for gauge and delta
# metrics of numeric and distribution type. The value of the output is
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}
REDUCE_PERCENTILE_95 = 10
# Reduce by computing 50th percentile of data points across time series
# for each alignment period. This reducer is valid for gauge and delta
# metrics of numeric and distribution type. The value of the output is
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}
REDUCE_PERCENTILE_50 = 11
# Reduce by computing 5th percentile of data points across time series
# for each alignment period. This reducer is valid for gauge and delta
# metrics of numeric and distribution type. The value of the output is
# {Google::Api::MetricDescriptor::ValueType::DOUBLE DOUBLE}
REDUCE_PERCENTILE_05 = 12
end
end
# Specifies an ordering relationship on two arguments, here called left and
# right.
module ComparisonType
# No ordering relationship is specified.
COMPARISON_UNSPECIFIED = 0
# The left argument is greater than the right argument.
COMPARISON_GT = 1
# The left argument is greater than or equal to the right argument.
COMPARISON_GE = 2
# The left argument is less than the right argument.
COMPARISON_LT = 3
# The left argument is less than or equal to the right argument.
COMPARISON_LE = 4
# The left argument is equal to the right argument.
COMPARISON_EQ = 5
# The left argument is not equal to the right argument.
COMPARISON_NE = 6
end
# The tier of service for a Workspace. Please see the
# [service tiers
# documentation](https://cloud.google.com/monitoring/workspaces/tiers) for more
# details.
module ServiceTier
# An invalid sentinel value, used to indicate that a tier has not
# been provided explicitly.
SERVICE_TIER_UNSPECIFIED = 0
# The Stackdriver Basic tier, a free tier of service that provides basic
# features, a moderate allotment of logs, and access to built-in metrics.
# A number of features are not available in this tier. For more details,
# see [the service tiers
# documentation](https://cloud.google.com/monitoring/workspaces/tiers).
SERVICE_TIER_BASIC = 1
# The Stackdriver Premium tier, a higher, more expensive tier of service
# that provides access to all Stackdriver features, lets you use Stackdriver
# with AWS accounts, and has a larger allotments for logs and metrics. For
# more details, see [the service tiers
# documentation](https://cloud.google.com/monitoring/workspaces/tiers).
SERVICE_TIER_PREMIUM = 2
end
end
end
end | 53.062201 | 347 | 0.660009 |
b9a3c34605bf1ae44a13e5f1f45d061d9409ea3f | 1,943 | # frozen_string_literal: true
require "test_helper"
require "project_types/extension/extension_test_helpers"
module Extension
module Tasks
module Converters
class VersionConverterTest < MiniTest::Test
include TestHelpers::FakeUI
def setup
super
ShopifyCLI::ProjectType.load_type(:extension)
@api_key = "FAKE_API_KEY"
@registration_id = 42
@config = {}
@extension_context = "fake#context"
@location = "https://www.fakeurl.com"
@last_user_interaction_at = Time.now.to_s
end
def test_from_hash_aborts_with_a_parse_error_if_the_hash_is_nil
io = capture_io_and_assert_raises(ShopifyCLI::Abort) do
Converters::VersionConverter.from_hash(@context, nil)
end
assert_message_output(io: io, expected_content: @context.message("tasks.errors.parse_error"))
end
def test_from_hash_parses_a_version_from_a_hash
hash = {
Converters::VersionConverter::REGISTRATION_ID_FIELD => @registration_id,
Converters::VersionConverter::LAST_USER_INTERACTION_AT_FIELD => @last_user_interaction_at,
Converters::VersionConverter::CONTEXT_FIELD => @extension_context,
Converters::VersionConverter::LOCATION_FIELD => @location,
Converters::VersionConverter::VALIDATION_ERRORS_FIELD => [],
}
parsed_version = Tasks::Converters::VersionConverter.from_hash(@context, hash)
assert_kind_of(Models::Version, parsed_version)
assert_equal @registration_id, parsed_version.registration_id
assert_kind_of(Time, parsed_version.last_user_interaction_at)
assert_equal @extension_context, parsed_version.context
assert_equal @location, parsed_version.location
assert_equal [], parsed_version.validation_errors
end
end
end
end
end
| 36.660377 | 103 | 0.686052 |
010ba809f24ff990d2ad2f22ddea03244ca63e52 | 507 | # app/lib/message.rb
class Message
def self.not_found(record = 'record')
"Sorry, #{record} not found."
end
def self.invalid_credentials
'Invalid credentials'
end
def self.invalid_token
'Invalid token'
end
def self.missing_token
'Missing token'
end
def self.unauthorized
'Unauthorized request'
end
def self.account_created
'Account created successfully'
end
def self.expired_token
'Sorry, your token has expired. Please login to continue'
end
end | 16.9 | 61 | 0.704142 |
e9face6a2bec3e96360402448c8a6089f919488a | 1,137 | require File.join(File.dirname(__FILE__), '..', '..', 'spec_helper')
describe Restful::Access::RuleSet do
describe "allow" do
before(:each) do
@ruleset = Restful::Access::RuleSet.new
@controller = ActionController::Base.new
@action = "index"
end
it "should return false if there are no rules" do
@ruleset.allow(@controller, @action).should == false
end
it "should return false if no rules match" do
@ruleset << mock(:access_rule, :type => :allow, :matches => false)
@ruleset.allow(@controller, @action).should == false
end
it "should return false if the last rule is a deny" do
@ruleset << mock(:access_rule, :type => :allow, :matches => true)
@ruleset << mock(:access_rule, :type => :deny, :matches => true)
@ruleset.allow(@controller, @action).should == false
end
it "should return true if the last rule is an allow" do
@ruleset << mock(:access_rule, :type => :deny, :matches => true)
@ruleset << mock(:access_rule, :type => :allow, :matches => true)
@ruleset.allow(@controller, @action).should == true
end
end
end | 35.53125 | 72 | 0.630607 |
01e52c55baea23a95f9bf15239872d5d2d3fc2e5 | 37 | class Team < ActiveRecord::Base
end
| 9.25 | 31 | 0.756757 |
33d8cd311b80bd5ecc043dd245f373d75e920507 | 222 | class CreateAccessTokens < ActiveRecord::Migration[5.1]
def change
create_table :access_tokens do |t|
t.references :user, foreign_key: true
t.string :password_digest
t.timestamps
end
end
end
| 20.181818 | 55 | 0.698198 |
fff30a9c1e772be7da46fc70b876df00e49392e5 | 133 | #encoding: utf-8
module GeeePay
class Railtie < Rails::Railtie
rake_tasks do
load 'rake/geee_pay.rake'
end
end
end
| 14.777778 | 32 | 0.676692 |
285a08500203fd1eefc6a873d5f3c0aeb778432e | 993 | require "spec_helper"
RSpec.describe Bruv do
it { expect(Bruv::VERSION).not_to be_nil }
class DummyClass
include Bruv
attribute :a
attribute :d, ->(d) { d.upcase }
attributes :b, :c
end
let(:a) { 1 }
let(:d) { "abc" }
let(:b) { 2 }
let(:c) { "3" }
context "correct number of params provided" do
subject { DummyClass.new(a, d, b, c) }
it "generates readers and assigns variables properly", :aggregate_failures do
expect(subject.a).to eq(a)
expect(subject.d).to eq(d.upcase)
expect(subject.b).to eq(b)
expect(subject.c).to eq(c)
end
end
context "number of params exceeds number of defined variables" do
subject { DummyClass.new(a, d, b, c, "1") }
it "generates readers and assigns variables properly", :aggregate_failures do
message = "Number of arguments exceeds number of instance variables for: DummyClass"
expect { subject }.to raise_error(Bruv::BruvArgumentError, message)
end
end
end
| 27.583333 | 90 | 0.654582 |
1de8e62195f161f04dee3e5a78c684b29c41f59d | 304 | module MemberHelper
def member
{
"object": "Member",
"id": "1",
"email": "[email protected]",
"quality_score": 0.86,
"credit": "$5.00",
"credit_cents": 500,
"created": "2015-05-22T14:56:29.000Z",
"updated": "2015-05-22T14:56:28.000Z"
}
end
end
| 20.266667 | 44 | 0.526316 |
03091eb871465026e2b00f84c93406f171870887 | 741 | require 'calculate_reverse_polish/core'
module CalculateReversePolish
module REPL
class << self
def run(client: nil)
@core = Core.new
catch_signals
loop do
repl('>')
end
end
private
def catch_signals
# Trap ^C
Signal.trap("INT") { stop }
# Trap `Kill`
Signal.trap("TERM") { stop }
end
def stop
puts 'Stoped...'
exit
end
def repl(prompt)
print prompt
handle_input(gets.chomp!)
end
def handle_input(input)
result = @client.process(input)
puts "#{result}"
rescue StandardError => err
pust "ERROR: #{err}"
end
end
end
end | 16.466667 | 39 | 0.516869 |
03b444d77eeaafa41bcce506b401284b5bbff69d | 1,465 | #
# Be sure to run `pod lib lint AttributedString.swift.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'AttributedString.swift'
s.version = ENV["LIB_VERSION"] || '1.0.0'
s.summary = 'A simple swifty extension wrapper for NSAttributedString or NSMutableAttributedString'
s.description = <<-DESC
A simple swifty extension wrapper for NSAttributedString or NSMutableAttributedString to make them much easier to use.
DESC
s.homepage = 'https://github.com/michaelhenry/AttributedString.swift'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'michaelhenry' => '[email protected]' }
s.source = { :git => 'https://github.com/michaelhenry/AttributedString.swift.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.source_files = 'AttributedString.swift/Classes/**/*'
# s.swift_version = '5.0'
# s.resource_bundles = {
# 'AttributedString.swift' => ['AttributedString.swift/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 40.694444 | 119 | 0.656655 |
abed2e09ac8266d7be0aa752d23560fd9cf32fb7 | 172 | # frozen_string_literal: true
require 'rspec/cloud/core/matchers/be_equal_to'
require 'rspec/cloud/core/matchers/have_digits'
require 'rspec/cloud/core/matchers/have_key'
| 28.666667 | 47 | 0.825581 |
5da873290f2a654abcfc24ef9c7b1080c4968e83 | 1,459 | require 'vertx-web/template_engine'
require 'vertx/util/utils.rb'
# Generated from io.vertx.ext.web.templ.ThymeleafTemplateEngine
module VertxWeb
# A template engine that uses the Thymeleaf library.
class ThymeleafTemplateEngine < ::VertxWeb::TemplateEngine
# @private
# @param j_del [::VertxWeb::ThymeleafTemplateEngine] the java delegate
def initialize(j_del)
super(j_del)
@j_del = j_del
end
# @private
# @return [::VertxWeb::ThymeleafTemplateEngine] the underlying java delegate
def j_del
@j_del
end
# Create a template engine using defaults
# @return [::VertxWeb::ThymeleafTemplateEngine] the engine
def self.create
if !block_given?
return ::Vertx::Util::Utils.safe_create(Java::IoVertxExtWebTempl::ThymeleafTemplateEngine.java_method(:create, []).call(),::VertxWeb::ThymeleafTemplateEngine)
end
raise ArgumentError, "Invalid arguments when calling create()"
end
# Set the mode for the engine
# @param [String] mode the mode
# @return [::VertxWeb::ThymeleafTemplateEngine] a reference to this for fluency
def set_mode(mode=nil)
if mode.class == String && !block_given?
return ::Vertx::Util::Utils.safe_create(@j_del.java_method(:setMode, [Java::java.lang.String.java_class]).call(mode),::VertxWeb::ThymeleafTemplateEngine)
end
raise ArgumentError, "Invalid arguments when calling set_mode(mode)"
end
end
end
| 39.432432 | 166 | 0.710075 |
1a6ac658b92b9b58c0e6c2440d2436001bd1fa51 | 387 | ENV['RAILS_ENV'] ||= 'test'
require File.expand_path('../../config/environment', __FILE__)
require 'rails/test_help'
require 'mocha/mini_test'
class ActiveSupport::TestCase
self.use_transactional_fixtures = true
# Setup all fixtures in test/fixtures/*.yml for all tests in alphabetical order.
fixtures :all
# Add more helper methods to be used by all tests here...
end
| 25.8 | 82 | 0.73385 |
bf7f51572a6eefea96bd092cd6d579da1bffcc7a | 3,763 | # Encoding: utf-8
# Cookbook Name:: dmg
# Provider:: package
#
# Copyright 2011, Joshua Timberman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::Mixin::ShellOut
use_inline_resources if defined?(use_inline_resources)
def load_current_resource
@dmgpkg = Chef::Resource::DmgPackage.new(new_resource.name)
@dmgpkg.app(new_resource.app)
Chef::Log.debug("Checking for application #{new_resource.app}")
@dmgpkg.installed(installed?)
end
action :install do
unless @dmgpkg.installed
volumes_dir = new_resource.volumes_dir ? new_resource.volumes_dir : new_resource.app
dmg_name = new_resource.dmg_name ? new_resource.dmg_name : new_resource.app
dmg_file = if new_resource.file.nil?
"#{Chef::Config[:file_cache_path]}/#{dmg_name}.dmg"
else
new_resource.file
end
remote_file "#{dmg_file} - #{@dmgpkg.name}" do
path dmg_file
source new_resource.source
headers new_resource.headers if new_resource.headers
checksum new_resource.checksum if new_resource.checksum
end if new_resource.source
passphrase_cmd = new_resource.dmg_passphrase ? "-passphrase #{new_resource.dmg_passphrase}" : ''
ruby_block "attach #{dmg_file}" do
block do
cmd = shell_out("hdiutil imageinfo #{passphrase_cmd} '#{dmg_file}' | grep -q 'Software License Agreement: true'")
software_license_agreement = (cmd.exitstatus == 0)
raise "Requires EULA Acceptance; add 'accept_eula true' to package resource" if software_license_agreement && !new_resource.accept_eula
accept_eula_cmd = new_resource.accept_eula ? 'echo Y | PAGER=true' : ''
shell_out!("#{accept_eula_cmd} hdiutil attach #{passphrase_cmd} '#{dmg_file}' -quiet")
end
not_if "hdiutil info #{passphrase_cmd} | grep -q 'image-path.*#{dmg_file}'"
end
case new_resource.type
when 'app'
execute "rsync --force --recursive --links --perms --executability --owner --group --times '/Volumes/#{volumes_dir}/#{new_resource.app}.app' '#{new_resource.destination}'" do
user new_resource.owner if new_resource.owner
end
file "#{new_resource.destination}/#{new_resource.app}.app/Contents/MacOS/#{new_resource.app}" do
mode 0755
ignore_failure true
end
when 'mpkg', 'pkg'
execute "sudo installer -pkg '/Volumes/#{volumes_dir}/#{new_resource.app}.#{new_resource.type}' -target /" do
# Prevent cfprefsd from holding up hdiutil detach for certain disk images
environment('__CFPREFERENCES_AVOID_DAEMON' => '1') if Gem::Version.new(node['platform_version']) >= Gem::Version.new('10.8')
end
end
execute "hdiutil detach '/Volumes/#{volumes_dir}' || hdiutil detach '/Volumes/#{volumes_dir}' -force"
end
end
private
def installed?
if ::File.directory?("#{new_resource.destination}/#{new_resource.app}.app")
Chef::Log.info "Already installed; to upgrade, remove \"#{new_resource.destination}/#{new_resource.app}.app\""
true
elsif shell_out("pkgutil --pkgs='#{new_resource.package_id}'").exitstatus == 0
Chef::Log.info "Already installed; to upgrade, try \"sudo pkgutil --forget '#{new_resource.package_id}'\""
true
else
false
end
end
| 39.197917 | 180 | 0.701834 |
f7562acdcaa21f51bf188063c4d989c1497d097a | 398 | Spree::Api::V1::StockLocationsController.class_eval do
before_filter :artist_locations, only: [:index]
before_filter :artist_transfers, only: [:index]
private
def artist_locations
params[:q] ||= {}
params[:q][:artist_id_eq] = spree_current_user.artist_id
end
def artist_transfers
params[:q] ||= {}
params[:q][:artist_id_eq] = spree_current_user.artist_id
end
end
| 20.947368 | 60 | 0.708543 |
91e21eedf03b59493cd17e15ac9aeb3419a52339 | 1,377 | class Class
def inherited s
p Object.constants.include?(s.name)
if s.name == "YAML::Syck::Resolver" then
raise IOError
end
if $raise then
$sub = s
puts "raise #{$raise}"
raise $raise
end
puts "#{s} < #{self}"
end
end
class B
end
class A < B
end
class << self
class << self
class << self
end
end
end
puts '-'*25
puts 'dup'
puts '-'*25
C = A.dup # no event
puts '-'*25
puts 'Struct'
puts '-'*25
S = Struct.new :foo, :bar do
puts self.new.foo
puts 'bar'
end
puts '-'*25
$raise = 'xxx'
begin
S = Struct.new :foo, :bar do
puts self.new.foo
puts 'bar'
end
rescue
p $!
end
p $sub.public_instance_methods(false)
p $sub.private_instance_methods(false)
p $sub.protected_instance_methods(false)
p $sub.singleton_methods(false)
$raise = nil
puts '-'*25
puts 'Class.new'
puts '-'*25
X = Class.new B do
def bar
end
puts 'bar'
end
puts '-'*25
$raise = 'hello'
begin
X = Class.new B do
def bar
end
puts 'bar'
end
rescue
p $!
end
p $sub.instance_methods(false)
$raise = nil
puts '-'*25
puts 'class E < B'
puts '-'*25
$raise = 'hello'
begin
class E < B
def bar
end
puts 'bar'
end
rescue
p $!
end
p $sub.instance_methods(false)
$raise = nil
puts '-'*25
puts 'yaml'
puts '-'*25
begin
require 'yaml'
rescue
p $!
end
| 11.87069 | 45 | 0.586783 |
21bfa82e7b744bc06972f2c16448843fd7958842 | 2,850 | require File.expand_path(File.dirname(__FILE__) + "/../../spec_helper")
module Polonium
module ServerRunners
describe ExternalServerRunner do
attr_reader :configuration, :rails_env, :rails_root, :runner, :start_server_command, :stop_server_command, :original_start_server_command, :original_stop_server_command
before do
@configuration = Configuration.new
@rails_env = configuration.rails_env = 'test'
@rails_root = configuration.rails_root = File.dirname(__FILE__)
@start_server_command = "cd #{rails_root}; script/server -e #{rails_env} -p #{configuration.internal_app_server_port} -c #{rails_root}"
@stop_server_command = "ps ax | grep 'script/server -e #{rails_env}' | sed /grep/d | awk '{print $1}' | xargs kill -9 2>/dev/null"
@runner = ExternalServerRunner.new(configuration)
end
after do
ExternalServerRunner.start_server_command(&ExternalServerRunner::DEFAULT_START_SERVER_COMMAND)
ExternalServerRunner.stop_server_command(&ExternalServerRunner::DEFAULT_STOP_SERVER_COMMAND)
end
describe "#start" do
it "stops the server, then starts an external rails server" do
mock(runner).system(stop_server_command).ordered
mock(runner).system(start_server_command).ordered
runner.start
end
context "with a custom start_server_command" do
it "stops the server, then starts an external rails server with the custom command" do
ExternalServerRunner.start_server_command do
"custom start server command"
end
mock(runner).system(stop_server_command).ordered
mock(runner).system("custom start server command").ordered
runner.start
end
end
context "with a custom stop_server_command" do
it "stops the server with the custom command, then starts an external rails server" do
ExternalServerRunner.stop_server_command do
"custom stop server command"
end
mock(runner).system("custom stop server command").ordered
mock(runner).system(start_server_command).ordered
runner.start
end
end
end
describe "#stop" do
it "stops the server" do
mock(runner).system(stop_server_command)
runner.stop
end
context "with a custom stop_server_command" do
it "stops the server with the custom command" do
ExternalServerRunner.stop_server_command do
"custom stop server command"
end
mock(runner).system("custom stop server command")
runner.stop
end
end
end
end
end
end | 38.513514 | 175 | 0.639298 |
28bf9f4ae4bf8890f8d60f7d07212c338e271536 | 6,510 | class CLI
#Array of all pokemon for which pictures are available
@@picture_array = [
"Bulbasaur",
"Ivysaur",
"Venusaur",
"Charmander",
"Charmeleon","Charizard","Squirtle","Wartortle","Blastoise","Caterpie",
"Metapod","Butterfree","Weedle","Kakuna","Beedrill","Pidgey","Pidgeotto","Pidgeot",
"Rattata","Raticate","Spearow","Fearow","Ekans","Arbok","Pikachu","Raichu","Sandshrew",
"Sandslash","Nidoran♀","Nidorina","Nidoqueen","Nidoran♂","Nidorino","Nidoking","Clefairy",
"Clefable","Vulpix","Ninetales","Jigglypuff","Wigglytuff","Zubat","Golbat","Oddish","Gloom",
"Vileplume","Paras","Parasect","Venonat","Venomoth","Diglett","Dugtrio","Meowth","Persian","Psyduck",
"Golduck","Mankey","Primeape","Growlithe","Arcanine","Poliwag","Poliwhirl","Poliwrath","Abra","Kadabra",
"Alakazam","Machop","Machoke","Machamp","Bellsprout","Weepinbell","Victreebel","Tentacool","Tentacruel",
"Geodude","Graveler","Golem","Ponyta","Rapidash","Slowpoke","Slowbro","Magnemite","Magneton","Farfetch'd",
"Doduo","Dodrio","Seel","Dewgong","Grimer","Muk","Shellder","Cloyster","Gastly","Haunter","Gengar","Onix",
"Drowzee","Hypno","Krabby","Kingler","Voltorb","Electrode","Exeggcute","Exeggutor","Cubone","Marowak","Hitmonlee",
"Hitmonchan","Lickitung","Koffing","Weezing","Rhyhorn","Rhydon","Chansey","Tangela","Kangaskhan","Horsea","Seadra",
"Goldeen","Seaking","Staryu","Starmie","Mr. Mime","Scyther","Jynx","Electabuzz","Magmar","Pinsir","Tauros","Magikarp",
"Gyarados","Lapras","Ditto","Eevee","Vaporeon","Jolteon","Flareon","Porygon","Omanyte","Omastar","Kabuto","Kabutops",
"Aerodactyl","Snorlax","Articuno","Zapdos","Moltres","Dratini","Dragonair","Dragonite","Mewtwo",
"Mew"].map {|word| word.downcase}
def initialize
greeting
main_menu
run
end
def slow_print(sentence)
sentence.each_char {|letter| print letter; sleep(0.03)}
print "\n"
end
def greeting
slow_print("...")
slow_print("Hello there! Welcome to the world of")
dot_dot_dot(95)
puts logo.to_s.yellow
dot_dot_dot(95)
slow_print("My name is OAK! People call me the POKEMON PROF! This world is inhabited by creatures called ")
slow_print("POKEMON! For some people, POKEMON are pets. Others use them for fights. Myself...I study ")
slow_print("POKEMON as a profession. On the desk there is my invention, POKEDEX! It automatically records ")
slow_print("data on POKEMON you've seen or caught! It's a hi-tech encyclopedia!")
end
def main_menu
puts "\t\t\tEnter 'list' to list PokeDexes\t\t\t\t\t\t".yellow.on_blue
sleep(0.25)
puts "\t\t\tEnter 'pics' to list all Pokemon with images available\t\t\t".white.on_red
sleep(0.25)
puts "\t\t\tEnter 'exit' to exit\t\t\t\t\t\t\t".blue.on_yellow
sleep(0.25)
end
def list_pokedex
#Lists all pokedexes
Pokedex.list
end
def exit_animation
#animation for exiting program
dot_dot_dot(95)
puts ash
dot_dot_dot(95)
slow_print("\t\tGotta\t\tCatch\t\t'Em\t\tAll!'")
dot_dot_dot(95)
puts ""
sleep(1)
puts logo.to_s.yellow
dot_dot_dot(95)
end
def list_and_select_pokemon
list_pokemon
pokemon_selection
end
def pokedex_selection
selection = gets.strip
if selection.to_i <= 21 && selection.to_i != 0
Scrape.populate_pokemon(selection.to_i)
list_and_select_pokemon
elsif selection == 'exit'
exit_animation
else
not_valid
enter_selection
pokedex_selection
end
end
def enter_selection
puts "Enter your selection".black.on_yellow
end
def not_valid
puts "Not a valid selection try again!".red.on_blue
end
def gathering_info
puts "Gathering information. Please wait".red.on_white
print "\n"
dot_dot_dot(100)
print "\n"
puts "Here is your info!".red.on_white
print "\n"
dot_dot_dot(100)
sleep(0.5)
end
def run
Pokemon.clear
Pokedex.clear
Scrape.pokedex
input = gets.strip
if input == "exit"
exit_animation
elsif input == "list"
list_pokedex
enter_selection
pokedex_selection
elsif input == "pics"
Scrape.populate_pokemon(17)
list_and_select_pokemon
else
not_valid
puts "You can type 'pics', 'list or 'exit'".black.on_white
run
end
end
def list_pokemon
Pokemon.all.each.with_index(1) do |pokemon, idx|
puts "#{idx}. #{pokemon.name}"
pokemon.number = idx
sleep(0.02)
end
end
def pick_a_pokemon
print "Enter the name of a Pokemon or it's index for more information:\nor type 'exit' to go back to main menu:".yellow
print "\n"
end
def pokemon_selection
pick_a_pokemon
input = gets.strip.downcase
if input.to_i > 0 && input.to_i < (Pokemon.all.size + 1)
gathering_info
if Pokemon.all.size == 151
print_pokemon(input.to_i)
else
name = Pokemon.name_by_number(input.to_i).downcase
if picture_array.include?(name)
print_pokemon(picture_array.index(name)+1)
end
end
dot_dot_dot(100)
Scrape.by_number(input.to_i)
dot_dot_dot(100)
pokemon_selection
elsif Pokemon.all_names.include?(input)
gathering_info
if picture_array.include?(input)
print_pokemon(picture_array.index(input)+1)
dot_dot_dot(100)
end
Scrape.pokemon(input)
dot_dot_dot(100)
pokemon_selection
elsif input == 'exit'
main_menu
run
else
pick_a_pokemon
puts "Please select from the following list!"
sleep(1)
dot_dot_dot(100)
list_pokemon
pokemon_selection
end
end
def picture_array
@@picture_array
end
end
| 36.166667 | 127 | 0.583717 |
4a0ca32775651d9b60d7935c41ee67a41da50aac | 1,289 | class Rdup < Formula
desc "Utility to create a file list suitable for making backups"
homepage "https://github.com/miekg/rdup"
url "https://github.com/miekg/rdup/archive/1.1.15.tar.gz"
sha256 "787b8c37e88be810a710210a9d9f6966b544b1389a738aadba3903c71e0c29cb"
revision 1
head "https://github.com/miekg/rdup.git"
bottle do
cellar :any
sha256 "10160aeeb73f78719894f7d95e0286975e77b7778acebb0150256fd0e83d0931" => :mojave
sha256 "2bc9ea46a7792c1c3f4d0b8d220e7712876e9847973a32dc948079c72045a0e3" => :high_sierra
sha256 "bb7077f739d9ba32ff6b1017987ebffc9b9e4081c6d3dd36e56f0193c9e9e4e7" => :sierra
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "pkg-config" => :build
depends_on "glib"
depends_on "libarchive"
depends_on "mcrypt"
depends_on "nettle"
depends_on "pcre"
def install
system "autoreconf", "-fiv"
system "./configure", "--prefix=#{prefix}"
system "make"
system "make", "install"
end
test do
# tell rdup to archive itself, then let rdup-tr make a tar archive of it,
# and test with tar and grep whether the resulting tar archive actually
# contains rdup
system "#{bin}/rdup /dev/null #{bin}/rdup | #{bin}/rdup-tr -O tar | tar tvf - | grep #{bin}/rdup"
end
end
| 33.051282 | 101 | 0.723817 |
382f7529a919df4925cc0a071d4893fdc8e86bc1 | 301 | class CreateSites < ActiveRecord::Migration[5.0]
def change
create_table :sites do |t|
t.string :code, index: true
t.string :name
t.string :status
t.string :url
t.text :description
t.text :topics, array: true, default: []
t.timestamps
end
end
end
| 21.5 | 48 | 0.614618 |
e26fd1595fec1e40c4cca8c93e23d29cb2853717 | 1,508 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ServiceFabric::V6_5_0_36
module Models
#
# Safety check that ensures that a quorum of replicas are not lost for a
# partition.
#
class EnsurePartitionQuorumSafetyCheck < PartitionSafetyCheck
include MsRestAzure
def initialize
@Kind = "EnsurePartitionQuorum"
end
attr_accessor :Kind
#
# Mapper for EnsurePartitionQuorumSafetyCheck class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'EnsurePartitionQuorum',
type: {
name: 'Composite',
class_name: 'EnsurePartitionQuorumSafetyCheck',
model_properties: {
Kind: {
client_side_validation: true,
required: true,
serialized_name: 'Kind',
type: {
name: 'String'
}
},
partition_id: {
client_side_validation: true,
required: false,
serialized_name: 'PartitionId',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 25.133333 | 76 | 0.537798 |
7a2d7d68ce208c66e1924132128296dde33fda53 | 4,044 | require_relative '../../features/support/element_helper.rb'
module Accessors
def self.button(name, locator)
# generates method for clicking button.
# this method will not return any value.
# @example click on 'Submit' button.
# button(:login_button,"xpath~//UIButtonField")
# def click_login_button
# login_button # This will click on the button.
# end
#Click Button
define_method("#{name}") do
ElementHelper.new(locator).click
end
#Check if button is enabled
define_method("#{name}_enabled?") do
ElementHelper.new(locator).enabled?
end
#Check if button exists
define_method("#{name}?") do
ElementHelper.new(locator).exists?
end
end
# Image class generates all the methods related to different operations that can be performed on the image object on the screen.
def self.image(name,locator)
# generates method for checking the existence of the image.
# this will return true or false based on if image is available or not
# @example check if 'logo' image is displayed on the page
# text(:logo,"xpath~//UITextField")
# DSL for clicking the logo image.
# def click_logo
# logo # This will click on the logo text on the screen.
# end
#Check if image exists
define_method("#{name}?") do
ElementHelper.new(locator).exists?
end
#Click Image
define_method("#{name}") do
ElementHelper.new(locator).click
end
end
def self.text(name,locator)
# generates method for clicking button.
# this method will not return any value.
# @example click on 'Submit' button.
# button(:login_button,"xpath~//UIButtonField")
# def click_login_button
# login_button # This will click on the button.
# end
#Check if Text exists
define_method("#{name}?") do
ElementHelper.new(locator).exists?
end
#Clicks on text
define_method("#{name}") do
ElementHelper.new(locator).click
end
#Get text property
define_method("#{name}_text") do
ElementHelper.new(locator).text
end
#Checks the value of a text
define_method("#{name}_value") do
ElementHelper.new(locator).value
end
define_method("#{name}_dynamic_text") do |text|
ElementHelper.new(locator).dynamic_text_exists?(text)
end
end
# text_field class generates all the methods related to different operations that can be performed on the text_field object on the screen.
def self.text_field(name,locator)
# generates method for setting text into text field.
# There is no return value for this method.
# @example setting username field.
# DSL for entering text in username text field.
# def set_username_text_field(username)
# self.username=username # This method will enter text into username text field.
# end
#Send Key to textfield
define_method("#{name}=") do |text|
ElementHelper.new(locator).text=text
end
#Get textfield text
define_method("#{name}_text") do
ElementHelper.new(locator).text
end
#Clear textfield
define_method("clear_#{name}") do
ElementHelper.new(locator).clear
end
#Check if textfield exist
define_method("#{name}?") do
ElementHelper.new(locator).exist?
end
#Get textfield value
define_method("#{name}_value") do
ElementHelper.new(locator).value
end
#Check if textfield is enabled
define_method("#{name}_enabled?") do
ElementHelper.new(locator).enabled?
end
end
# table class generates all the methods
# related to different operations that can be
# performed on the table object on the screen.
def self.table(name, locator)
#generates method for counting total no of cells in table
define_method("#{name}_cell_count") do
ElementHelper.new(locator).cell_count
end
end
def self.element(name, locator)
#generates method for elements object
define_method("#{name}") do
ElementHelper.new(locator)
end
end
end | 27.69863 | 140 | 0.680762 |
e2e2453ca75b44e24a87f1dae14cc425192d38b9 | 1,411 | # encoding: utf-8
require 'spec_helper'
describe Github::Gists::Comments, '#create' do
let(:gist_id) { 1 }
let(:request_path) { "/gists/#{gist_id}/comments" }
let(:inputs) {
{ "body" =>"Just commenting for the sake of commenting",
"unrelated" => true }
}
before {
stub_post(request_path).with(inputs.except('unrelated')).
to_return(:body => body, :status => status,
:headers => {:content_type => "application/json; charset=utf-8"})
}
after { reset_authentication_for(subject) }
context "resouce created" do
let(:body) { fixture('gists/comment.json') }
let(:status) { 201 }
it "should fail to create resource if 'content' input is missing" do
expect {
subject.create gist_id, inputs.except('body')
}.to raise_error(Github::Error::RequiredParams)
end
it "should create resource successfully" do
subject.create gist_id, inputs
a_post(request_path).with(inputs).should have_been_made
end
it "should return the resource" do
comment = subject.create gist_id, inputs
comment.should be_a Github::ResponseWrapper
end
it "should get the comment information" do
comment = subject.create gist_id, inputs
comment.user.login.should == 'octocat'
end
end
it_should_behave_like 'request failure' do
let(:requestable) { subject.create gist_id, inputs }
end
end # create
| 27.134615 | 72 | 0.666903 |
bb6741c9b4e02dc7308c57c908d31fd6baca881d | 1,263 | require File.expand_path("#{File.dirname(__FILE__)}/../../spec_helper")
module TrackerGit
describe Command::Deploy do
describe "#call" do
it "calls system command `cap demo deploy`" do
mock(Command::Deploy).system("cap demo deploy")
Command::Deploy.call(tracker)
end
context "when system command passes" do
it "delivers all finished stories" do
finished_stories = [
{'id' => 1, 'current_state' => 'finished'},
{'id' => 3, 'current_state' => 'finished'},
]
mock.strong(tracker).find({"current_state" => "finished"}) do
finished_stories
end
finished_stories.each do |finished_story|
mock.strong(tracker).update_story(finished_story.merge('current_state' => 'delivered'))
end
mock(Command::Deploy).system("cap demo deploy") {true}
Command::Deploy.call(tracker)
end
end
context "when system command fails" do
it "does not deliver stories" do
dont_allow(tracker).find({"current_state" => "finished"})
mock(Command::Deploy).system("cap demo deploy") {false}
Command::Deploy.call(tracker)
end
end
end
end
end | 30.071429 | 99 | 0.593032 |
01f608426bc7aa1b97162b1cae7f5691ec21e6b8 | 728 | # Returns a new instance of the Hash class being benchmarked. Define before
# loading if using a class besides Hash.
unless Object.public_method_defined? :new_hash
def hash_class
Hash
end
def new_hash
Hash.new
end
end
# Repeatable sequence of random numbers
srand(1)
STRING = "rdnqsp uxq\nhnokjirs\nb c6rlh|4c@jcb av8\nPvunszwijhy lz kdgy7hlKlR nzqxg\ndqldeg nm-yg vmnb mk gdrn x"
N = STRING.size
SYM_KEYS = Array.new(10000) { |i| :"key#{i}" }
STR_KEYS = Array.new(10000) { |i| s = STRING[rand(N/4)..rand(N/2)]; 4.times { s << rand(128) }; s }
def new_sym_hash(n)
h = new_hash
n.times { |i| h[SYM_KEYS[i]] = i }
h
end
def new_str_hash(n)
h = new_hash
n.times { |i| h[STR_KEYS[i]] = i }
h
end
| 22.060606 | 115 | 0.677198 |
e95eba94b6ce7043dc098e6a098074f3417b43ee | 50 | require "ted_talks/version"
module TedTalks
end
| 8.333333 | 27 | 0.8 |
edd5eec4d698a8a882a8f81ae4f57b65fe9f277e | 811 | # frozen_string_literal: true
# Helpers for dealing with ContentItemSelectionParams which live in the session
# during the process of a Canvas deployment
module SelectionParams
extend ActiveSupport::Concern
included do
helper_method :selection_params
end
private
def selection_params
session[:content_item_selection_params]
end
def selection_params=(data)
session[:content_item_selection_params] = data
end
def lti_uid
selection_params.try :[], 'lti_uid'
end
def pundit_user
DeploymentPolicy::UserContext.new current_user, selection_params
end
def ensure_content_item_selection_params_set!
redirect_to root_url unless selection_params.present?
end
def clear_content_item_selection_params
session[:content_item_selection_params] = nil
end
end
| 21.342105 | 79 | 0.787916 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.