hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
618da79b9559ee9c7c9b826d084d7fd647e38599 | 124 | require 'test_helper'
class UsersBadgeTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.5 | 46 | 0.709677 |
7a4b541aadfc7ae3a2846074e5f80db27da425a9 | 2,560 | # frozen_string_literal: true
class RemovePaperclipColumns < ActiveRecord::Migration[6.1]
def change
remove_paperclip_metadata
remove_other_obsolete_columns
end
def remove_other_obsolete_columns
# This is now saved in the DB instead of being set dynamically.
remove_column :operations, :attachment_download_name, :string
# This has been obsolete for a long time.
remove_column :responses, :odk_xml, :text
end
# Not all servers have "legacy_url" (only if they were using cloud storage during the original migration).
def remove_paperclip_metadata
remove_column :operations, :attachment_content_type, :string
remove_column :operations, :attachment_file_name, :string
remove_column :operations, :attachment_file_size, :integer
remove_column :operations, :attachment_legacy_url, :string if ActiveRecord::Base.connection.column_exists?(:operations, :attachment_legacy_url)
remove_column :operations, :attachment_updated_at, :datetime
remove_column :saved_uploads, :file_content_type, :string
remove_column :saved_uploads, :file_file_name, :string
remove_column :saved_uploads, :file_file_size, :integer
remove_column :saved_uploads, :file_legacy_url, :string if ActiveRecord::Base.connection.column_exists?(:saved_uploads, :file_legacy_url)
remove_column :saved_uploads, :file_updated_at, :datetime
remove_column :questions, :media_prompt_content_type, :string
remove_column :questions, :media_prompt_file_name, :string
remove_column :questions, :media_prompt_file_size, :integer
remove_column :questions, :media_prompt_legacy_url, :string if ActiveRecord::Base.connection.column_exists?(:questions, :media_prompt_legacy_url)
remove_column :questions, :media_prompt_updated_at, :datetime
remove_column :media_objects, :item_content_type, :string
remove_column :media_objects, :item_file_name, :string
remove_column :media_objects, :item_file_size, :integer
remove_column :media_objects, :item_legacy_url, :string if ActiveRecord::Base.connection.column_exists?(:media_objects, :item_legacy_url)
remove_column :media_objects, :item_updated_at, :datetime
remove_column :responses, :odk_xml_content_type, :string
remove_column :responses, :odk_xml_file_name, :string
remove_column :responses, :odk_xml_file_size, :integer
remove_column :responses, :odk_xml_legacy_url, :string if ActiveRecord::Base.connection.column_exists?(:responses, :odk_xml_legacy_url)
remove_column :responses, :odk_xml_updated_at, :datetime
end
end
| 52.244898 | 149 | 0.793359 |
611eb056042df9f844d853a51579a1b54ef678e0 | 136 | # frozen_string_literal: true
module Types
class BaseField < GraphQL::Schema::Field
argument_class Types::BaseArgument
end
end
| 17 | 42 | 0.772059 |
d5d1fabc84bdf6cb83a3a99a2cf627acca45c8c9 | 33,500 | # Comprehensively test a formula or pull request.
#
# Usage: brew test-bot [options...] <pull-request|formula>
#
# Options:
# --keep-logs: Write and keep log files under ./brewbot/
# --cleanup: Clean the Homebrew directory. Very dangerous. Use with care.
# --clean-cache: Remove all cached downloads. Use with care.
# --skip-setup: Don't check the local system is setup correctly.
# --skip-homebrew: Don't check Homebrew's files and tests are all valid.
# --junit: Generate a JUnit XML test results file.
# --email: Generate an email subject file.
# --no-bottle: Run brew install without --build-bottle
# --keep-old: Run brew bottle --keep-old to build new bottles for a single platform.
# --HEAD: Run brew install with --HEAD
# --local: Ask Homebrew to write verbose logs under ./logs/ and set HOME to ./home/
# --tap=<tap>: Use the git repository of the given tap
# --dry-run: Just print commands, don't run them.
# --fail-fast: Immediately exit on a failing step.
# --verbose: Print out all logs in realtime
#
# --ci-master: Shortcut for Homebrew master branch CI options.
# --ci-pr: Shortcut for Homebrew pull request CI options.
# --ci-testing: Shortcut for Homebrew testing CI options.
# --ci-upload: Homebrew CI bottle upload.
# --ci-reset-and-update: Homebrew CI repository and tap reset and update.
require "formula"
require "utils"
require "date"
require "rexml/document"
require "rexml/xmldecl"
require "rexml/cdata"
require "cmd/tap"
module Homebrew
EMAIL_SUBJECT_FILE = "brew-test-bot.#{MacOS.cat}.email.txt"
BYTES_IN_1_MEGABYTE = 1024*1024
def resolve_test_tap
tap = ARGV.value("tap")
return Tap.new(*tap_args(tap)) if tap
if ENV["UPSTREAM_BOT_PARAMS"]
bot_argv = ENV["UPSTREAM_BOT_PARAMS"].split " "
bot_argv.extend HomebrewArgvExtension
tap = bot_argv.value("tap")
return Tap.new(*tap_args(tap)) if tap
end
if git_url = ENV["UPSTREAM_GIT_URL"] || ENV["GIT_URL"]
# Also can get tap from Jenkins GIT_URL.
url_path = git_url.sub(%r{^https?://github\.com/}, "").chomp("/")
HOMEBREW_TAP_ARGS_REGEX =~ url_path
return Tap.new($1, $3) if $1 && $3 && $3 != "homebrew"
end
# return nil means we are testing core repo.
end
def homebrew_git_repo(tap = nil)
if tap
tap.path
else
HOMEBREW_REPOSITORY
end
end
class Step
attr_reader :command, :name, :status, :output, :time
def initialize(test, command, options = {})
@test = test
@category = test.category
@command = command
@puts_output_on_success = options[:puts_output_on_success]
@name = command[1].delete("-")
@status = :running
@repository = options[:repository] || HOMEBREW_REPOSITORY
@time = 0
end
def log_file_path
file = "#{@category}.#{@name}.txt"
root = @test.log_root
root ? root + file : file
end
def command_short
(@command - %w[brew --force --retry --verbose --build-bottle --rb]).join(" ")
end
def passed?
@status == :passed
end
def failed?
@status == :failed
end
def puts_command
if ENV["TRAVIS"]
@@travis_step_num ||= 0
@travis_fold_id = @command.first(2).join(".") + ".#{@@travis_step_num += 1}"
@travis_timer_id = rand(2**32).to_s(16)
puts "travis_fold:start:#{@travis_fold_id}"
puts "travis_time:start:#{@travis_timer_id}"
end
puts "#{Tty.blue}==>#{Tty.white} #{@command.join(" ")}#{Tty.reset}"
end
def puts_result
if ENV["TRAVIS"]
travis_start_time = @start_time.to_i*1000000000
travis_end_time = @end_time.to_i*1000000000
travis_duration = travis_end_time - travis_start_time
puts "#{Tty.white}==>#{Tty.green} PASSED#{Tty.reset}" if passed?
puts "travis_time:end:#{@travis_timer_id},start=#{travis_start_time},finish=#{travis_end_time},duration=#{travis_duration}"
puts "travis_fold:end:#{@travis_fold_id}"
end
puts "#{Tty.white}==>#{Tty.red} FAILED#{Tty.reset}" if failed?
end
def has_output?
@output && [email protected]?
end
def time
@end_time - @start_time
end
def run
@start_time = Time.now
puts_command
if ARGV.include? "--dry-run"
@end_time = Time.now
@status = :passed
puts_result
return
end
verbose = ARGV.verbose?
@output = ""
working_dir = Pathname.new(@command.first == "git" ? @repository : Dir.pwd)
read, write = IO.pipe
begin
pid = fork do
read.close
$stdout.reopen(write)
$stderr.reopen(write)
write.close
working_dir.cd { exec(*@command) }
end
write.close
while buf = read.read(1)
if verbose
print buf
$stdout.flush
end
@output << buf
end
ensure
read.close
end
Process.wait(pid)
@end_time = Time.now
@status = $?.success? ? :passed : :failed
puts_result
if has_output?
@output = fix_encoding(@output)
puts @output if (failed? || @puts_output_on_success) && !verbose
File.write(log_file_path, @output) if ARGV.include? "--keep-logs"
end
exit 1 if ARGV.include?("--fail-fast") && failed?
end
private
if String.method_defined?(:force_encoding)
def fix_encoding(str)
return str if str.valid_encoding?
# Assume we are starting from a "mostly" UTF-8 string
str.force_encoding(Encoding::UTF_8)
str.encode!(Encoding::UTF_16, :invalid => :replace)
str.encode!(Encoding::UTF_8)
end
elsif require "iconv"
def fix_encoding(str)
Iconv.conv("UTF-8//IGNORE", "UTF-8", str)
end
else
def fix_encoding(str)
str
end
end
end
class Test
attr_reader :log_root, :category, :name, :steps
def initialize(argument, options={})
@hash = nil
@url = nil
@formulae = []
@added_formulae = []
@modified_formula = []
@steps = []
@tap = options[:tap]
@repository = Homebrew.homebrew_git_repo @tap
@skip_homebrew = options[:skip_homebrew]
url_match = argument.match HOMEBREW_PULL_OR_COMMIT_URL_REGEX
git "rev-parse", "--verify", "-q", argument
if $?.success?
@hash = argument
elsif url_match
@url = url_match[0]
elsif safe_formulary(argument)
@formulae = [argument]
else
raise ArgumentError.new("#{argument} is not a pull request URL, commit URL or formula name.")
end
@category = __method__
@brewbot_root = Pathname.pwd + "brewbot"
FileUtils.mkdir_p @brewbot_root
end
def no_args?
@hash == "HEAD"
end
def safe_formulary(formula)
Formulary.factory formula
rescue FormulaUnavailableError, TapFormulaAmbiguityError
end
def git(*args)
rd, wr = IO.pipe
pid = fork do
rd.close
STDERR.reopen("/dev/null")
STDOUT.reopen(wr)
wr.close
Dir.chdir @repository
exec("git", *args)
end
wr.close
Process.wait(pid)
rd.read
ensure
rd.close
end
def download
def shorten_revision(revision)
git("rev-parse", "--short", revision).strip
end
def current_sha1
shorten_revision "HEAD"
end
def current_branch
git("symbolic-ref", "HEAD").gsub("refs/heads/", "").strip
end
def single_commit?(start_revision, end_revision)
git("rev-list", "--count", "#{start_revision}..#{end_revision}").to_i == 1
end
def diff_formulae(start_revision, end_revision, path, filter)
git(
"diff-tree", "-r", "--name-only", "--diff-filter=#{filter}",
start_revision, end_revision, "--", path
).lines.map do |line|
file = line.chomp
next unless File.extname(file) == ".rb"
File.basename(file, ".rb")
end.compact
end
def brew_update
return unless current_branch == "master"
success = quiet_system "brew", "update"
success ||= quiet_system "brew", "update"
end
@category = __method__
@start_branch = current_branch
travis_pr = ENV["TRAVIS_PULL_REQUEST"] && ENV["TRAVIS_PULL_REQUEST"] != "false"
# Use Jenkins GitHub Pull Request Builder plugin variables for
# pull request jobs.
if ENV["ghprbPullLink"]
@url = ENV["ghprbPullLink"]
@hash = nil
test "git", "checkout", "origin/master"
# Use Travis CI pull-request variables for pull request jobs.
elsif travis_pr
@url = "https://github.com/#{ENV["TRAVIS_REPO_SLUG"]}/pull/#{ENV["TRAVIS_PULL_REQUEST"]}"
@hash = nil
end
# Use Jenkins Git plugin variables for master branch jobs.
if ENV["GIT_PREVIOUS_COMMIT"] && ENV["GIT_COMMIT"]
diff_start_sha1 = ENV["GIT_PREVIOUS_COMMIT"]
diff_end_sha1 = ENV["GIT_COMMIT"]
# Use Travis CI Git variables for master or branch jobs.
elsif ENV["TRAVIS_COMMIT_RANGE"]
diff_start_sha1, diff_end_sha1 = ENV["TRAVIS_COMMIT_RANGE"].split "..."
diff_start_sha1 = git("merge-base", diff_start_sha1, diff_end_sha1).strip
# Otherwise just use the current SHA-1 (which may be overriden later)
else
diff_end_sha1 = diff_start_sha1 = current_sha1
end
# Handle no arguments being passed on the command-line e.g. `brew test-bot`.
if no_args?
if diff_start_sha1 == diff_end_sha1 || \
single_commit?(diff_start_sha1, diff_end_sha1)
@name = diff_end_sha1
else
@name = "#{diff_start_sha1}-#{diff_end_sha1}"
end
# Handle formulae arguments being passed on the command-line e.g. `brew test-bot wget fish`.
elsif @formulae && @formulae.any?
@name = "#{@formulae.first}-#{diff_end_sha1}"
# Handle a hash being passed on the command-line e.g. `brew test-bot 1a2b3c`.
elsif @hash
test "git", "checkout", @hash
diff_start_sha1 = "#{@hash}^"
diff_end_sha1 = @hash
@name = @hash
# Handle a URL being passed on the command-line or through Jenkins/Travis
# environment variables e.g.
# `brew test-bot https://github.com/Homebrew/homebrew/pull/44293`.
elsif @url
# TODO: in future Travis CI may need to also use `brew pull` to e.g. push
# the right commit to BrewTestBot.
unless travis_pr
diff_start_sha1 = current_sha1
test "brew", "pull", "--clean", @url
diff_end_sha1 = current_sha1
end
@short_url = @url.gsub("https://github.com/", "")
if @short_url.include? "/commit/"
# 7 characters should be enough for a commit (not 40).
@short_url.gsub!(/(commit\/\w{7}).*/, '\1')
@name = @short_url
else
@name = "#{@short_url}-#{diff_end_sha1}"
end
else
raise "Cannot set @name: invalid command-line arguments!"
end
if ENV["TRAVIS"]
puts "name: #{@name}"
puts "url: #{@url}"
puts "hash: #{@hash}"
puts "diff_start_sha1: #{diff_start_sha1}"
puts "diff_end_sha1: #{diff_end_sha1}"
end
@log_root = @brewbot_root + @name
FileUtils.mkdir_p @log_root
return unless diff_start_sha1 != diff_end_sha1
return if @url && steps.last && !steps.last.passed?
if @tap
formula_path = %w[Formula HomebrewFormula].find { |dir| (@repository/dir).directory? } || ""
else
formula_path = "Library/Formula"
end
@added_formulae += diff_formulae(diff_start_sha1, diff_end_sha1, formula_path, "A")
@modified_formula += diff_formulae(diff_start_sha1, diff_end_sha1, formula_path, "M")
@formulae += @added_formulae + @modified_formula
end
def skip(formula_name)
puts "#{Tty.blue}==>#{Tty.white} SKIPPING: #{formula_name}#{Tty.reset}"
end
def satisfied_requirements?(formula, spec, dependency = nil)
requirements = formula.send(spec).requirements
unsatisfied_requirements = requirements.reject do |requirement|
satisfied = false
satisfied ||= requirement.satisfied?
satisfied ||= requirement.optional?
if !satisfied && requirement.default_formula?
default = Formula[requirement.default_formula]
satisfied = satisfied_requirements?(default, :stable, formula.full_name)
end
satisfied
end
if unsatisfied_requirements.empty?
true
else
name = formula.full_name
name += " (#{spec})" unless spec == :stable
name += " (#{dependency} dependency)" if dependency
skip name
puts unsatisfied_requirements.map(&:message)
false
end
end
def setup
@category = __method__
return if ARGV.include? "--skip-setup"
test "brew", "doctor" unless ENV["TRAVIS"]
test "brew", "--env"
test "brew", "config"
end
def formula(formula_name)
@category = "#{__method__}.#{formula_name}"
canonical_formula_name = if @tap
"#{@tap}/#{formula_name}"
else
formula_name
end
test "brew", "uses", canonical_formula_name
formula = Formulary.factory(canonical_formula_name)
formula.conflicts.map { |c| Formulary.factory(c.name) }.
select(&:installed?).each do |conflict|
test "brew", "unlink", conflict.name
end
installed_gcc = false
deps = []
reqs = []
if formula.stable
return unless satisfied_requirements?(formula, :stable)
deps |= formula.stable.deps.to_a.reject(&:optional?)
reqs |= formula.stable.requirements.to_a.reject(&:optional?)
elsif formula.devel
return unless satisfied_requirements?(formula, :devel)
end
if formula.devel && !ARGV.include?("--HEAD")
deps |= formula.devel.deps.to_a.reject(&:optional?)
reqs |= formula.devel.requirements.to_a.reject(&:optional?)
end
begin
deps.each { |d| d.to_formula.recursive_dependencies }
rescue TapFormulaUnavailableError => e
raise if e.tap.installed?
safe_system "brew", "tap", e.tap.name
retry
end
begin
deps.each do |dep|
CompilerSelector.select_for(dep.to_formula)
end
CompilerSelector.select_for(formula)
rescue CompilerSelectionError => e
unless installed_gcc
run_as_not_developer { test "brew", "install", "gcc" }
installed_gcc = true
OS::Mac.clear_version_cache
retry
end
skip canonical_formula_name
puts e.message
return
end
conflicts = formula.conflicts
formula.recursive_dependencies.each do |dependency|
conflicts += dependency.to_formula.conflicts
end
conflicts.each do |conflict|
confict_formula = Formulary.factory(conflict.name)
if confict_formula.installed? && confict_formula.linked_keg.exist?
test "brew", "unlink", "--force", conflict.name
end
end
installed = Utils.popen_read("brew", "list").split("\n")
dependencies = Utils.popen_read("brew", "deps", "--skip-optional",
canonical_formula_name).split("\n")
(installed & dependencies).each do |installed_dependency|
installed_dependency_formula = Formulary.factory(installed_dependency)
if installed_dependency_formula.installed? &&
!installed_dependency_formula.keg_only? &&
!installed_dependency_formula.linked_keg.exist?
test "brew", "link", installed_dependency
end
end
dependencies -= installed
unchanged_dependencies = dependencies - @formulae
changed_dependences = dependencies - unchanged_dependencies
runtime_dependencies = Utils.popen_read("brew", "deps",
"--skip-build", "--skip-optional",
canonical_formula_name).split("\n")
build_dependencies = dependencies - runtime_dependencies
unchanged_build_dependencies = build_dependencies - @formulae
dependents = Utils.popen_read("brew", "uses", "--skip-build", "--skip-optional", canonical_formula_name).split("\n")
dependents -= @formulae
dependents = dependents.map { |d| Formulary.factory(d) }
testable_dependents = dependents.select { |d| d.test_defined? && d.bottled? }
if (deps | reqs).any? { |d| d.name == "mercurial" && d.build? }
run_as_not_developer { test "brew", "install", "mercurial" }
end
test "brew", "fetch", "--retry", *unchanged_dependencies unless unchanged_dependencies.empty?
unless changed_dependences.empty?
test "brew", "fetch", "--retry", "--build-bottle", *changed_dependences
# Install changed dependencies as new bottles so we don't have checksum problems.
test "brew", "install", "--build-bottle", *changed_dependences
# Run postinstall on them because the tested formula might depend on
# this step
test "brew", "postinstall", *changed_dependences
end
formula_fetch_options = []
formula_fetch_options << "--build-bottle" unless ARGV.include? "--no-bottle"
formula_fetch_options << "--force" if ARGV.include? "--cleanup"
formula_fetch_options << canonical_formula_name
test "brew", "fetch", "--retry", *formula_fetch_options
test "brew", "uninstall", "--force", canonical_formula_name if formula.installed?
install_args = ["--verbose"]
install_args << "--build-bottle" unless ARGV.include? "--no-bottle"
install_args << "--HEAD" if ARGV.include? "--HEAD"
# Pass --devel or --HEAD to install in the event formulae lack stable. Supports devel-only/head-only.
# head-only should not have devel, but devel-only can have head. Stable can have all three.
if devel_only_tap? formula
install_args << "--devel"
elsif head_only_tap? formula
install_args << "--HEAD"
end
install_args << canonical_formula_name
# Don't care about e.g. bottle failures for dependencies.
run_as_not_developer do
test "brew", "install", "--only-dependencies", *install_args unless dependencies.empty?
test "brew", "install", *install_args
end
install_passed = steps.last.passed?
audit_args = [canonical_formula_name]
audit_args << "--strict" << "--online" if @added_formulae.include? formula_name
test "brew", "audit", *audit_args
if install_passed
if formula.stable? && !ARGV.include?("--no-bottle")
bottle_args = ["--verbose", "--rb", canonical_formula_name]
bottle_args << "--keep-old" if ARGV.include? "--keep-old"
test "brew", "bottle", *bottle_args
bottle_step = steps.last
if bottle_step.passed? && bottle_step.has_output?
bottle_filename =
bottle_step.output.gsub(/.*(\.\/\S+#{bottle_native_regex}).*/m, '\1')
test "brew", "uninstall", "--force", canonical_formula_name
if unchanged_build_dependencies.any?
test "brew", "uninstall", "--force", *unchanged_build_dependencies
unchanged_dependencies -= unchanged_build_dependencies
end
test "brew", "install", bottle_filename
end
end
test "brew", "test", "--verbose", canonical_formula_name if formula.test_defined?
testable_dependents.each do |dependent|
unless dependent.installed?
test "brew", "fetch", "--retry", dependent.name
next if steps.last.failed?
conflicts = dependent.conflicts.map { |c| Formulary.factory(c.name) }.select(&:installed?)
conflicts.each do |conflict|
test "brew", "unlink", conflict.name
end
run_as_not_developer { test "brew", "install", dependent.name }
next if steps.last.failed?
end
if dependent.installed?
test "brew", "test", "--verbose", dependent.name
end
end
test "brew", "uninstall", "--force", canonical_formula_name
end
if formula.devel && formula.stable? && !ARGV.include?("--HEAD") \
&& satisfied_requirements?(formula, :devel)
test "brew", "fetch", "--retry", "--devel", *formula_fetch_options
run_as_not_developer { test "brew", "install", "--devel", "--verbose", canonical_formula_name }
devel_install_passed = steps.last.passed?
test "brew", "audit", "--devel", *audit_args
if devel_install_passed
test "brew", "test", "--devel", "--verbose", canonical_formula_name if formula.test_defined?
test "brew", "uninstall", "--devel", "--force", canonical_formula_name
end
end
test "brew", "uninstall", "--force", *unchanged_dependencies if unchanged_dependencies.any?
end
def homebrew
@category = __method__
return if @skip_homebrew
test "brew", "tests" unless OS.linux?
if @tap
test "brew", "readall", @tap.name
else
test "brew", "tests", "--no-compat" unless OS.linux?
readall_args = ["--aliases"]
readall_args << "--syntax" if MacOS.version >= :mavericks
test "brew", "readall", *readall_args
test "brew", "update-test"
end
end
def cleanup_before
@category = __method__
return unless ARGV.include? "--cleanup"
git "gc", "--auto"
git "stash"
git "am", "--abort"
git "rebase", "--abort"
git "reset", "--hard"
git "checkout", "-f", "master"
git "clean", "-ffdx"
pr_locks = "#{HOMEBREW_REPOSITORY}/.git/refs/remotes/*/pr/*/*.lock"
Dir.glob(pr_locks) { |lock| FileUtils.rm_rf lock }
end
def cleanup_after
@category = __method__
checkout_args = []
if ARGV.include? "--cleanup"
test "git", "clean", "-ffdx"
checkout_args << "-f"
end
checkout_args << @start_branch
if @start_branch && !@start_branch.empty? && \
(ARGV.include?("--cleanup") || @url || @hash)
test "git", "checkout", *checkout_args
end
if ARGV.include? "--cleanup"
test "git", "reset", "--hard"
git "stash", "pop"
test "brew", "cleanup", "--prune=7"
git "gc", "--auto"
if ARGV.include? "--local"
FileUtils.rm_rf ENV["HOMEBREW_HOME"]
FileUtils.rm_rf ENV["HOMEBREW_LOGS"]
end
end
FileUtils.rm_rf @brewbot_root unless ARGV.include? "--keep-logs"
end
def test(*args)
options = Hash === args.last ? args.pop : {}
options[:repository] = @repository
step = Step.new self, args, options
step.run
steps << step
step
end
def check_results
steps.all? do |step|
case step.status
when :passed then true
when :running then raise
when :failed then false
end
end
end
def formulae
changed_formulae_dependents = {}
@formulae.each do |formula|
formula_dependencies = Utils.popen_read("brew", "deps", formula).split("\n")
unchanged_dependencies = formula_dependencies - @formulae
changed_dependences = formula_dependencies - unchanged_dependencies
changed_dependences.each do |changed_formula|
changed_formulae_dependents[changed_formula] ||= 0
changed_formulae_dependents[changed_formula] += 1
end
end
changed_formulae = changed_formulae_dependents.sort do |a1, a2|
a2[1].to_i <=> a1[1].to_i
end
changed_formulae.map!(&:first)
unchanged_formulae = @formulae - changed_formulae
changed_formulae + unchanged_formulae
end
def head_only_tap?(formula)
formula.head && formula.devel.nil? && formula.stable.nil? && formula.tap == "homebrew/homebrew-head-only"
end
def devel_only_tap?(formula)
formula.devel && formula.stable.nil? && formula.tap == "homebrew/homebrew-devel-only"
end
def run
cleanup_before
download
setup
homebrew
formulae.each do |f|
formula(f)
end
cleanup_after
check_results
end
end
def test_bot_ci_reset_and_update
Tap.each do |tap|
next unless tap.git?
cd tap.path do
quiet_system "git", "am", "--abort"
quiet_system "git", "rebase", "--abort"
safe_system "git", "checkout", "-f", "master"
safe_system "git", "reset", "--hard", "origin/master"
end
end
exec "brew", "update"
end
def test_ci_upload(tap)
jenkins = ENV["JENKINS_HOME"]
job = ENV["UPSTREAM_JOB_NAME"]
id = ENV["UPSTREAM_BUILD_ID"]
raise "Missing Jenkins variables!" if !jenkins || !job || !id
bintray_user = ENV["BINTRAY_USER"]
bintray_key = ENV["BINTRAY_KEY"]
if !bintray_user || !bintray_key
raise "Missing BINTRAY_USER or BINTRAY_KEY variables!"
end
# Don't pass keys/cookies to subprocesses..
ENV["BINTRAY_KEY"] = nil
ENV["HUDSON_SERVER_COOKIE"] = nil
ENV["JENKINS_SERVER_COOKIE"] = nil
ENV["HUDSON_COOKIE"] = nil
ARGV << "--verbose"
ARGV << "--keep-old" if ENV["UPSTREAM_BOTTLE_KEEP_OLD"]
bottles = Dir["#{jenkins}/jobs/#{job}/configurations/axis-version/*/builds/#{id}/archive/*.bottle*.*"]
return if bottles.empty?
FileUtils.cp bottles, Dir.pwd, :verbose => true
ENV["GIT_AUTHOR_NAME"] = ENV["GIT_COMMITTER_NAME"] = "BrewTestBot"
ENV["GIT_AUTHOR_EMAIL"] = ENV["GIT_COMMITTER_EMAIL"] = "[email protected]"
ENV["GIT_WORK_TREE"] = Homebrew.homebrew_git_repo(tap)
ENV["GIT_DIR"] = "#{ENV["GIT_WORK_TREE"]}/.git"
pr = ENV["UPSTREAM_PULL_REQUEST"]
number = ENV["UPSTREAM_BUILD_NUMBER"]
quiet_system "git", "am", "--abort"
quiet_system "git", "rebase", "--abort"
safe_system "git", "checkout", "-f", "master"
safe_system "git", "reset", "--hard", "origin/master"
safe_system "brew", "update"
if pr
pull_pr = if tap
"https://github.com/#{tap.user}/homebrew-#{tap.repo}/pull/#{pr}"
else
pr
end
safe_system "brew", "pull", "--clean", pull_pr
end
bottle_args = ["--merge", "--write", *Dir["*.bottle.rb"]]
bottle_args << "--tap=#{tap}" if tap
bottle_args << "--keep-old" if ARGV.include? "--keep-old"
system "brew", "bottle", *bottle_args
remote_repo = tap ? "homebrew-#{tap.repo}" : "homebrew"
remote = "[email protected]:BrewTestBot/#{remote_repo}.git"
tag = pr ? "pr-#{pr}" : "testing-#{number}"
safe_system "git", "push", "--force", remote, "master:master", ":refs/tags/#{tag}"
bintray_repo = if tap.nil?
Bintray.repository(tap)
else
Bintray.repository(tap.name)
end
bintray_repo_url = "https://api.bintray.com/packages/homebrew/#{bintray_repo}"
formula_packaged = {}
Dir.glob("*.bottle*.tar.gz") do |filename|
formula_name, canonical_formula_name = bottle_resolve_formula_names filename
formula = Formulary.factory canonical_formula_name
version = formula.pkg_version
bintray_package = Bintray.package formula_name
if system "curl", "-I", "--silent", "--fail", "--output", "/dev/null",
"#{BottleSpecification::DEFAULT_DOMAIN}/#{bintray_repo}/#{filename}"
raise <<-EOS.undent
#{filename} is already published. Please remove it manually from
https://bintray.com/homebrew/#{bintray_repo}/#{bintray_package}/view#files
EOS
end
unless formula_packaged[formula_name]
package_url = "#{bintray_repo_url}/#{bintray_package}"
unless system "curl", "--silent", "--fail", "--output", "/dev/null", package_url
curl "--silent", "--fail", "-u#{bintray_user}:#{bintray_key}",
"-H", "Content-Type: application/json",
"-d", "{\"name\":\"#{bintray_package}\"}", bintray_repo_url
puts
end
formula_packaged[formula_name] = true
end
content_url = "https://api.bintray.com/content/homebrew"
content_url += "/#{bintray_repo}/#{bintray_package}/#{version}/#{filename}"
content_url += "?override=1"
curl "--silent", "--fail", "-u#{bintray_user}:#{bintray_key}",
"-T", filename, content_url
puts
end
safe_system "git", "tag", "--force", tag
safe_system "git", "push", "--force", remote, "refs/tags/#{tag}"
end
def sanitize_ARGV_and_ENV
if Pathname.pwd == HOMEBREW_PREFIX && ARGV.include?("--cleanup")
odie "cannot use --cleanup from HOMEBREW_PREFIX as it will delete all output."
end
ENV["HOMEBREW_DEVELOPER"] = "1"
ENV["HOMEBREW_SANDBOX"] = "1"
ENV["HOMEBREW_NO_EMOJI"] = "1"
ENV["HOMEBREW_FAIL_LOG_LINES"] = "150"
if ENV["TRAVIS"]
ARGV << "--verbose"
ARGV << "--ci-master" if ENV["TRAVIS_PULL_REQUEST"] == "false"
ENV["HOMEBREW_VERBOSE_USING_DOTS"] = "1"
end
if ARGV.include?("--ci-master") || ARGV.include?("--ci-pr") \
|| ARGV.include?("--ci-testing")
ARGV << "--cleanup" if ENV["JENKINS_HOME"] || ENV["TRAVIS"]
ARGV << "--junit" << "--local"
end
if ARGV.include? "--ci-master"
ARGV << "--no-bottle" << "--email"
end
if ARGV.include? "--local"
ENV["HOMEBREW_HOME"] = ENV["HOME"] = "#{Dir.pwd}/home"
mkdir_p ENV["HOME"]
ENV["HOMEBREW_LOGS"] = "#{Dir.pwd}/logs"
end
if ARGV.include? "--email"
File.open EMAIL_SUBJECT_FILE, "w" do |file|
# The file should be written at the end but in case we don't get to that
# point ensure that we have something valid.
file.write "#{MacOS.version}: internal error."
end
end
end
def test_bot
sanitize_ARGV_and_ENV
tap = resolve_test_tap
# Tap repository if required, this is done before everything else
# because Formula parsing and/or git commit hash lookup depends on it.
if tap && !tap.installed?
safe_system "brew", "tap", tap.name
end
if ARGV.include? "--ci-reset-and-update"
return test_bot_ci_reset_and_update
elsif ARGV.include? "--ci-upload"
return test_ci_upload(tap)
end
tests = []
any_errors = false
skip_homebrew = ARGV.include?("--skip-homebrew")
if ARGV.named.empty?
# With no arguments just build the most recent commit.
head_test = Test.new("HEAD", :tap => tap, :skip_homebrew => skip_homebrew)
any_errors = !head_test.run
tests << head_test
else
ARGV.named.each do |argument|
test_error = false
begin
test = Test.new(argument, :tap => tap, :skip_homebrew => skip_homebrew)
skip_homebrew = true
rescue ArgumentError => e
test_error = true
ofail e.message
else
test_error = !test.run
tests << test
end
any_errors ||= test_error
end
end
if ARGV.include? "--junit"
xml_document = REXML::Document.new
xml_document << REXML::XMLDecl.new
testsuites = xml_document.add_element "testsuites"
tests.each do |test|
testsuite = testsuites.add_element "testsuite"
testsuite.add_attribute "name", "brew-test-bot.#{MacOS.cat}"
testsuite.add_attribute "tests", test.steps.count
test.steps.each do |step|
testcase = testsuite.add_element "testcase"
testcase.add_attribute "name", step.command_short
testcase.add_attribute "status", step.status
testcase.add_attribute "time", step.time
if step.has_output?
# Remove invalid XML CData characters from step output.
output = step.output.delete("\000\a\b\e\f\x2\x1f")
if output.bytesize > BYTES_IN_1_MEGABYTE
output = "truncated output to 1MB:\n" \
+ output.slice(-BYTES_IN_1_MEGABYTE, BYTES_IN_1_MEGABYTE)
end
cdata = REXML::CData.new output
if step.passed?
elem = testcase.add_element "system-out"
else
elem = testcase.add_element "failure"
elem.add_attribute "message", "#{step.status}: #{step.command.join(" ")}"
end
elem << cdata
end
end
end
open("brew-test-bot.xml", "w") do |xml_file|
pretty_print_indent = 2
xml_document.write(xml_file, pretty_print_indent)
end
end
if ARGV.include? "--email"
failed_steps = []
tests.each do |test|
test.steps.each do |step|
next if step.passed?
failed_steps << step.command_short
end
end
if failed_steps.empty?
email_subject = ""
else
email_subject = "#{MacOS.version}: #{failed_steps.join ", "}."
end
File.open EMAIL_SUBJECT_FILE, "w" do |file|
file.write email_subject
end
end
HOMEBREW_CACHE.children.each(&:rmtree) if ARGV.include? "--clean-cache"
Homebrew.failed = any_errors
end
end
| 33.037475 | 131 | 0.608239 |
62db692378e9a49871ad6ffd622d2e7501f126bc | 80 | # frozen_string_literal: true
require_relative "has_navigation/has_navigation"
| 20 | 48 | 0.8625 |
bf1878e3a17be2b7c1a7d07951dfcf8c13c69932 | 1,305 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'rails_script/version'
Gem::Specification.new do |spec|
spec.name = 'rails_script'
spec.version = RailsScript::VERSION
spec.authors = ['Kevin Pheasey']
spec.email = ['[email protected]']
spec.summary = %q{A Rails-centric, object oriented, featherweight framework for writting CoffeeScript}
spec.description = %q{Rails Script is a Rails-centric, object oriented, featherweight framework for writting CoffeeScript. It is optomized for the Rails Asset Pipeline and is compatible with TurboLinks. Using Rails controller names and actions to call JavaScript, it has never been easier to write clean, concise, and maintanable page specific JavaScript.}
spec.homepage = 'https://github.com/kpheasey/rails_script'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\u0000")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.6'
spec.add_development_dependency 'rake', '~> 0'
spec.add_dependency 'coffee-rails', '>= 4.0.0'
end
| 50.192308 | 360 | 0.702682 |
398707671cf2bc17e46e844c90bd10db115751b7 | 233 | require 'mqtt'
require 'mqtt_pipe/version'
require 'mqtt_pipe/types'
require 'mqtt_pipe/packer'
require 'mqtt_pipe/listener'
require 'mqtt_pipe/pipe'
module MQTTPipe
extend self
def create &block
Pipe.new &block
end
end
| 14.5625 | 28 | 0.759657 |
abb5e160af635912675f4f3f6aecc792a6a84f06 | 1,024 | # frozen_string_literal: true
require "spec_helper"
require "yaml"
RSpec.describe H3m::Map do
YAML.load_file("spec/resources/resources.yml").each do |map_spec|
map_file = map_spec["file"]
map_path = "spec/resources/#{map_spec["file"]}"
map = H3m::Map.new(map_path)
it "should parse game_version for #{map_file}" do
expect(map.game_version).to eq map_spec["game_version"]
end
it "should parse map size for #{map_file}" do
expect(map.size).to eq map_spec["size"]
end
it "should parse map name for #{map_file}" do
expect(map.name).to eq map_spec["name"]
end
it "should parse map description for #{map_file}" do
expect(map.description).to eq map_spec["description"]
end
it "should parse difficulty for #{map_file}" do
expect(map.difficulty).to eq map_spec["difficulty"]
end
it "should parse subterranean_level flag for #{map_file}" do
expect(map.subterranean_level?).to eq map_spec["has_subterranean_level"]
end
end
end
| 27.675676 | 78 | 0.683594 |
38b3d3fb44660abe3b01af6f4ac510df4df52d4c | 184 | arr = [1, 2, 3, 4, 5, 6, 7]
puts 'Исходный массив:', arr.to_s
reversed = []
arr.each do |i|
reversed.unshift i
end
puts 'Новый массив, полученный из исходного:', reversed.to_s
| 15.333333 | 60 | 0.652174 |
e25e24b5cd286a7862bd79c2ba411df3b6f9ff42 | 4,838 | module Rubybear
module Mixins
module ActsAsPoster
# Creates a post operation.
#
# bears = Rubybear::Chain.new(chain: :bears, account_name: 'your account name', wif: 'your wif')
# options = {
# title: 'This is my fancy post title.',
# body: 'This is my fancy post body.',
# tags: %w(thess are my fancy tags)
# }
# bears.post(options)
# bears.broadcast!
#
# @param options [::Hash] options
# @option options [String] :title Title of the post.
# @option options [String] :body Body of the post.
# @option options [::Array<String>] :tags Tags of the post.
# @option options [String] :permlink (automatic) Permlink of the post, defaults to formatted title.
# @option options [String] :parent_permlink (automatic) Parent permlink of the post, defaults to first tag.
# @option options [String] :parent_author (optional) Parent author of the post (only used if reply).
# @option options [String] :max_accepted_payout (1000000.000 BSD) Maximum accepted payout, set to '0.000 BSD' to deline payout
# @option options [Integer] :percent_bears_dollars (5000) Percent BEARS Dollars is used to set 50/50 or 100% BEARS Power
# @option options [Integer] :allow_votes (true) Allow votes for this post.
# @option options [Integer] :allow_curation_rewards (true) Allow curation rewards for this post.
def post(options = {})
tags = [options[:tags] || []].flatten
title = options[:title].to_s
permlink = options[:permlink] || title.downcase.gsub(/[^a-z0-9\-]+/, '-')
parent_permlink = options[:parent_permlink] || tags[0]
raise ChainError, 'At least one tag is required or set the parent_permlink directy.' if parent_permlink.nil?
body = options[:body]
parent_author = options[:parent_author] || ''
max_accepted_payout = options[:max_accepted_payout] || default_max_acepted_payout
percent_bears_dollars = options[:percent_bears_dollars]
allow_votes = options[:allow_votes] || true
allow_curation_rewards = options[:allow_curation_rewards] || true
self_vote = options[:self_vote]
tags.insert(0, parent_permlink)
tags = tags.compact.uniq
metadata = {
app: Rubybear::AGENT_ID
}
metadata[:tags] = tags if tags.any?
@operations << {
type: :comment,
parent_permlink: parent_permlink,
author: account_name,
permlink: permlink,
title: title,
body: body,
json_metadata: metadata.to_json,
parent_author: parent_author
}
if (!!max_accepted_payout &&
max_accepted_payout != default_max_acepted_payout
) || !!percent_bears_dollars || !allow_votes || !allow_curation_rewards
@operations << {
type: :comment_options,
author: account_name,
permlink: permlink,
max_accepted_payout: max_accepted_payout,
percent_bears_dollars: percent_bears_dollars,
allow_votes: allow_votes,
allow_curation_rewards: allow_curation_rewards,
extensions: []
}
end
vote(self_vote, account_name, permlink) if !!self_vote
self
end
# Create a vote operation and broadcasts it right away.
#
# bears = Rubybear::Chain.new(chain: :bears, account_name: 'your account name', wif: 'your wif')
# options = {
# title: 'This is my fancy post title.',
# body: 'This is my fancy post body.',
# tags: %w(thess are my fancy tags)
# }
# bears.post!(options)
#
# @see post
def post!(options = {}); post(options).broadcast!(true); end
# Create a delete_comment operation.
#
# Examples:
#
# bears = Rubybear::Chain.new(chain: :bears, account_name: 'your account name', wif: 'your wif')
# bears.delete_comment('permlink')
# bears.broadcast!
#
# @param permlink
def delete_comment(permlink)
@operations << {
type: :delete_comment,
author: account_name,
permlink: permlink
}
self
end
# Create a delete_comment operation and broadcasts it right away.
#
# Examples:
#
# bears = Rubybear::Chain.new(chain: :bears, account_name: 'your account name', wif: 'your wif')
# bears.delete_comment!('permlink')
#
# @see delete_comment
def delete_comment!(permlink); delete_comment(permlink).broadcast!(true); end
end
end
end
| 38.704 | 132 | 0.59198 |
18c6cfb1f5415d84814b8af40ec07b7fc5feecc3 | 6,543 | =begin
PureCloud Platform API
With the PureCloud Platform API, you can control all aspects of your PureCloud environment. With the APIs you can access the system configuration, manage conversations and more.
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
License: ININ
http://www.inin.com
Terms of Service: https://developer.mypurecloud.com/tos
=end
require 'date'
module PureCloud
class ConversationNotification
attr_accessor :id
attr_accessor :max_participants
attr_accessor :participants
attr_accessor :recording_state
attr_accessor :address
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'id' => :'id',
:'max_participants' => :'maxParticipants',
:'participants' => :'participants',
:'recording_state' => :'recordingState',
:'address' => :'address'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'id' => :'String',
:'max_participants' => :'Integer',
:'participants' => :'Array<ConversationNotificationParticipants>',
:'recording_state' => :'String',
:'address' => :'String'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'maxParticipants')
self.max_participants = attributes[:'maxParticipants']
end
if attributes.has_key?(:'participants')
if (value = attributes[:'participants']).is_a?(Array)
self.participants = value
end
end
if attributes.has_key?(:'recordingState')
self.recording_state = attributes[:'recordingState']
end
if attributes.has_key?(:'address')
self.address = attributes[:'address']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
id == o.id &&
max_participants == o.max_participants &&
participants == o.participants &&
recording_state == o.recording_state &&
address == o.address
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[id, max_participants, participants, recording_state, address].hash
end
# build the object from hash
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
else
#TODO show warning in debug mode
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
else
# data not found in attributes(hash), not an issue as the data can be optional
end
end
self
end
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /^(true|t|yes|y|1)$/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
_model = Object.const_get("PureCloud").const_get(type).new
_model.build_from_hash(value)
end
end
def to_s
to_hash.to_s
end
# to_body is an alias to to_body (backward compatibility))
def to_body
to_hash
end
# return the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Method to output non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 21.523026 | 177 | 0.554333 |
ac8433599114b4d9f656a4f576c11fa7d8aebe56 | 834 | require "spec_helper"
describe ChildBenefitRates, type: :model do
let(:year) { 2014 }
let(:first_child_rate) { 42.42 }
let(:additional_child_rate) { 13.17 }
let(:calculator) { ChildBenefitRates.new(year) }
let(:rates) { { year => [first_child_rate, additional_child_rate] } }
before do
allow(calculator).to receive(:rates_for_year).and_return(rates[year])
end
describe "#year" do
it "returns the year passed during initialization" do
expect(calculator.year).to eq(year)
end
end
describe "#first_child_rate" do
it "returns correct rates" do
expect(calculator.first_child_rate).to eq(first_child_rate)
end
end
describe "#additional_child_rate" do
it "returns correct rates" do
expect(calculator.additional_child_rate).to eq(additional_child_rate)
end
end
end
| 26.0625 | 75 | 0.71223 |
ff81396ecd65c70884c05b51ddcd1afcd3571d1c | 1,734 | # frozen_string_literal: true
module Kenna
module Toolkit
class UploadFile < Kenna::Toolkit::BaseTask
def self.metadata
{
id: "upload_file",
name: "Upload File",
description: "This task uploads a file to a specified connector",
options: [
{ name: "kenna_api_key",
type: "api_key",
required: false,
default: nil,
description: "Kenna API Key" },
{ name: "kenna_api_host",
type: "hostname",
required: false,
default: "api.kennasecurity.com",
description: "Kenna API Hostname" },
{ name: "connector_id",
type: "integer",
required: true,
default: -1,
description: "Kenna Connector ID" },
{ name: "file",
type: "filename",
required: false,
default: "input/file.xml",
description: "Path to the data file, relative to #{$basedir}" }
]
}
end
def run(options)
super
api_host = @options[:kenna_api_host]
api_token = @options[:kenna_api_key]
connector_id = @options[:connector_id]
filepath = "#{$basedir}/#{@options[:file]}"
# TODO. ... handled upstream?
# unless api_host && api_token
# print_error "Cannot proceed, missing required options"
# return
# end
api_client = Kenna::Api::Client.new(api_token, api_host)
print_good "Attempting to upload #{filepath}"
api_client.upload_to_connector(connector_id, filepath)
print_good "Done!"
end
end
end
end
| 28.9 | 77 | 0.526528 |
ff324b01dab82ad43e20f51c891e281ed830b55e | 7,438 | require 'active_record'
require 'active_record/base'
require File.expand_path(File.join(File.dirname(__FILE__), "rails_integration_proxy"))
require File.expand_path(File.join(File.dirname(__FILE__), "html_document_handler.rb"))
class Relevance::Tarantula::Crawler
extend Forwardable
include Relevance::Tarantula
class CrawlTimeout < RuntimeError; end
attr_accessor :proxy, :handlers, :skip_uri_patterns, :log_grabber,
:reporters, :links_to_crawl, :links_queued, :forms_to_crawl,
:form_signatures_queued, :max_url_length, :response_code_handler,
:times_to_crawl, :fuzzers, :test_name, :crawl_timeout
attr_reader :transform_url_patterns, :referrers, :failures, :successes, :crawl_start_times, :crawl_end_times
def initialize
@max_url_length = 1024
@successes = []
@failures = []
@handlers = [@response_code_handler = Result]
@links_queued = Set.new
@form_signatures_queued = Set.new
@links_to_crawl = []
@forms_to_crawl = []
@crawl_start_times, @crawl_end_times = [], []
@crawl_timeout = 20.minutes
@referrers = {}
@skip_uri_patterns = [
/^javascript/,
/^mailto/,
/^http/,
]
self.transform_url_patterns = [
[/#.*$/, '']
]
@reporters = [Relevance::Tarantula::IOReporter.new($stderr)]
@decoder = HTMLEntities.new
@times_to_crawl = 1
@fuzzers = [Relevance::Tarantula::FormSubmission]
end
def method_missing(meth, *args)
super unless Result::ALLOW_NNN_FOR =~ meth.to_s
@response_code_handler.send(meth, *args)
end
def transform_url_patterns=(patterns)
@transform_url_patterns = patterns.map do |pattern|
Array === pattern ? Relevance::Tarantula::Transform.new(*pattern) : pattern
end
end
def crawl(url = "/")
orig_links_queued = @links_queued.dup
orig_form_signatures_queued = @form_signatures_queued.dup
orig_links_to_crawl = @links_to_crawl.dup
orig_forms_to_crawl = @forms_to_crawl.dup
@times_to_crawl.times do |num|
queue_link url
begin
do_crawl num
rescue CrawlTimeout => e
puts e.message
end
puts "#{(num+1).ordinalize} crawl" if @times_to_crawl > 1
if num + 1 < @times_to_crawl
@links_queued = orig_links_queued
@form_signatures_queued = orig_form_signatures_queued
@links_to_crawl = orig_links_to_crawl
@forms_to_crawl = orig_forms_to_crawl
@referrers = {}
end
end
rescue Interrupt
$stderr.puts "CTRL-C"
ensure
report_results
end
def finished?
@links_to_crawl.empty? && @forms_to_crawl.empty?
end
def do_crawl(number)
while (!finished?)
@crawl_start_times << Time.now
crawl_queued_links(number)
crawl_queued_forms(number)
@crawl_end_times << Time.now
end
end
def crawl_queued_links(number = 0)
while (link = @links_to_crawl.pop)
response = proxy.send(link.method, link.href)
log "Response #{response.code} for #{link}"
handle_link_results(link, response)
blip(number)
end
end
def save_result(result)
reporters.each do |reporter|
reporter.report(result)
end
end
def handle_link_results(link, response)
handlers.each do |h|
begin
save_result h.handle(Result.new(:method => link.method,
:url => link.href,
:response => response,
:log => grab_log!,
:referrer => referrers[link],
:test_name => test_name).freeze)
rescue Exception => e
log "error handling #{link} #{e.message}"
# TODO: pass to results
end
end
end
def crawl_form(form)
response = proxy.send(form.method, form.action, form.data)
log "Response #{response.code} for #{form}"
response
rescue ActiveRecord::RecordNotFound => e
log "Skipping #{form.action}, presumed ok that record is missing"
Relevance::Tarantula::Response.new(:code => "404", :body => e.message, :content_type => "text/plain")
end
def crawl_queued_forms(number = 0)
while (form = @forms_to_crawl.pop)
response = crawl_form(form)
handle_form_results(form, response)
blip(number)
end
end
def elasped_time_for_pass(num)
Time.now - crawl_start_times[num]
end
def grab_log!
@log_grabber && @log_grabber.grab!
end
def handle_form_results(form, response)
handlers.each do |h|
save_result h.handle(Result.new(:method => form.method,
:url => form.action,
:response => response,
:log => grab_log!,
:referrer => form.action,
:data => form.data.inspect,
:test_name => test_name).freeze)
end
end
def should_skip_url?(url)
return true if url.blank?
if @skip_uri_patterns.any? {|pattern| pattern =~ url}
log "Skipping #{url}"
return true
end
if url.length > max_url_length
log "Skipping long url #{url}"
return true
end
end
def should_skip_link?(link)
should_skip_url?(link.href) || @links_queued.member?(link)
end
def should_skip_form_submission?(fs)
should_skip_url?(fs.action) || @form_signatures_queued.member?(fs.signature)
end
def transform_url(url)
return unless url
url = @decoder.decode(url)
@transform_url_patterns.each do |pattern|
url = pattern[url]
end
url
end
def queue_link(dest, referrer = nil)
dest = Link.new(dest)
dest.href = transform_url(dest.href)
return if should_skip_link?(dest)
@referrers[dest] = referrer if referrer
@links_to_crawl << dest
@links_queued << dest
dest
end
def queue_form(form, referrer = nil)
fuzzers.each do |fuzzer|
fuzzer.mutate(Form.new(form)).each do |fs|
# fs = fuzzer.new(Form.new(form))
fs.action = transform_url(fs.action)
return if should_skip_form_submission?(fs)
@referrers[fs.action] = referrer if referrer
@forms_to_crawl << fs
@form_signatures_queued << fs.signature
end
end
end
def report_dir
File.join(rails_root, "tmp", "tarantula")
end
def generate_reports
errors = []
reporters.each do |reporter|
begin
reporter.finish_report(test_name)
rescue RuntimeError => e
errors << e
end
end
unless errors.empty?
raise errors.map(&:message).join("\n")
end
end
def report_results
generate_reports
end
def total_links_count
@links_queued.size + @form_signatures_queued.size
end
def links_remaining_count
@links_to_crawl.size + @forms_to_crawl.size
end
def links_completed_count
total_links_count - links_remaining_count
end
def blip(number = 0)
unless verbose
print "\r #{links_completed_count} of #{total_links_count} links completed "
timeout_if_too_long(number)
end
end
def timeout_if_too_long(number = 0)
if elasped_time_for_pass(number) > crawl_timeout
raise CrawlTimeout, "Exceeded crawl timeout of #{crawl_timeout} seconds - skipping to the next crawl..."
end
end
end
| 28.067925 | 112 | 0.630949 |
f741a1b7d108eb827db769efe403c66e38f7fa02 | 78 | json.extract! @post, :id, :title, :body, :published, :created_at, :updated_at
| 39 | 77 | 0.705128 |
abccb58f561ac69619ddef297bf3f681f995e1aa | 96 | property :extra_options, Hash,
description: 'Used for setting any HAProxy directives'
| 32 | 64 | 0.729167 |
ed9477ae67d4755e5f9bd181fdc66a55b7cdeb16 | 1,253 | require 'xmlrpc/client'
require 'gandi/zlib_parser_decorator'
require 'gandi/version'
require 'gandi/price'
require 'gandi/contact'
require 'gandi/domain'
require 'gandi/operation'
module Gandi
class Error < StandardError; end
class DataError < Error; end
class ServerError < Error; end
class ValidationError < Error; end
class << self
attr_writer :apikey
attr_writer :mode
def apikey
@apikey || ENV['GANDI_API_KEY']
end
def mode
@mode || ENV['GANDI_API_MODE']
end
def endpoint
self.mode == 'live' ? 'https://rpc.gandi.net/xmlrpc/' : 'https://rpc.ote.gandi.net/xmlrpc/'
end
def client
@client ||= begin
XMLRPC::Config.module_eval do
remove_const(:ENABLE_NIL_PARSER)
const_set(:ENABLE_NIL_PARSER, true)
end
client = XMLRPC::Client.new2(self.endpoint)
client.http_header_extra = {"Accept-Encoding" => "gzip"}
client.set_parser ZlibParserDecorator.new(client.send(:parser))
client
end
end
def call(name, *args)
client.call(name, self.apikey, *args)
rescue XMLRPC::FaultException => e
raise(e.faultCode < 500000 ? ServerError : DataError, e.faultString)
end
end
end
| 23.641509 | 97 | 0.651237 |
bb4eda67345d15901e120e052a9145c38e93c4f6 | 1,960 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Monitor::Mgmt::V2018_06_01_preview
module Models
#
# Guest diagnostic setting resource for patch operations
#
class GuestDiagnosticSettingsAssociationResourcePatch
include MsRestAzure
# @return [Hash{String => String}] Resource tags
attr_accessor :tags
# @return [String] The guest diagnostic settings name.
attr_accessor :guest_diagnostic_settings_name
#
# Mapper for GuestDiagnosticSettingsAssociationResourcePatch class as
# Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'GuestDiagnosticSettingsAssociationResourcePatch',
type: {
name: 'Composite',
class_name: 'GuestDiagnosticSettingsAssociationResourcePatch',
model_properties: {
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
guest_diagnostic_settings_name: {
client_side_validation: true,
required: true,
serialized_name: 'properties.guestDiagnosticSettingsName',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 29.69697 | 77 | 0.538776 |
26ebbaae13af12df7c9896aa4bb91b3614adf53e | 84 | module Subscribem
class ApplicationController < ::ApplicationController
end
end
| 16.8 | 55 | 0.821429 |
ac71f96a74e1549a41e2554f979ed4f2224b4b9e | 74 | # frozen_string_literal: true
module BucketStore
VERSION = "0.5.0"
end
| 12.333333 | 29 | 0.743243 |
1d521e9a998b88fad745d15d90db2fddee07f524 | 811 | # frozen_string_literal: true
require 'uploads'
# An image uploaded by a staff member
class Image < ApplicationRecord
has_one_attached :file
validates :title, presence: true
validate :attached_file_is_image
before_create :create_unique_identifier
def to_param
unique_id
end
def file_variant(width:, height:)
spec = Uploads.resize_to_fill width: width, height: height, blob: file.blob
variation = ActiveStorage::Variation.new spec
ActiveStorage::Variant.new(file.blob, variation)
end
private
def attached_file_is_image
errors.add(:file, 'must be an image') if file.attached? && !file.image?
end
def create_unique_identifier
loop do
self.unique_id = SecureRandom.base58(6)
break unless self.class.exists?(unique_id: unique_id)
end
end
end
| 22.527778 | 79 | 0.738594 |
1c46efa8b0414dbf566b04ce64782ff5bdbf3fd5 | 236 | class Coordinate
attr_reader :x, :y
def initialize(x, y)
@x = x
@y = y
end
def distance_from(other)
# TODO: actually calculate distance between the coordinates.
# e.g. (@x - other.x).abs
0
end
end
| 14.75 | 64 | 0.59322 |
0317a6bd466f8aa1772bae840642d309a1ec5ff6 | 1,526 | module RubyCollections
class LinkedList
attr_accessor :size, :top
#TODO: implement iterator
#TODO: implement to_a
def initialize
@size = 0
@top = nil
end
def empty?
size.zero?
end
def header
@top ? @top.to_s : nil
end
def add(data, index = nil)
return nil if index and index >= size
if index
get(index-1).setNext(data)
else
node = Node.new(data, top)
@top = node
end
@size += 1
end
def get(index)
node = top
index.times {node = node.getNext}
return node
end
def remove(index)
node = get(index-1)
to_be_removed = node.getNext
node.setNext(to_be_removed.getNext)
@size -= 1
end
def to_s
return "" if empty?
data = []
data << (node = top).data
(size-1).times {data << (node = node.getNext).data}
return data.to_s
end
class Node
attr_accessor :data
def initialize(data, next_node)
@data = data
@next = next_node.object_id
end
def getNext
ObjectSpace._id2ref(@next)
end
def setNext(data)
node = Node.new(data, nil)
next_node_id = instance_variable_get(:@next)
@next = node.object_id
node.instance_variable_set(:@next, next_node_id)
return node
end
def to_s
"#{data}"
end
end
end
end | 19.316456 | 58 | 0.521625 |
ff9417e952292db06689e4d876e2f83955c9b1c4 | 2,353 | require 'test_helper'
class UsersControllerTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
@other_user = users(:archer)
end
test "should redirect index when not logged in" do
get users_path
assert_redirected_to login_url
end
test "should get new" do
get signup_url
assert_response :success
end
test "should redirect edit when not logged in" do
get edit_user_path(@user)
assert_not flash.empty?
assert_redirected_to login_url
end
test "should redirect update when not logged in" do
patch user_path(@user), params: { user: { name: @user.name,
email: @user.email } }
assert_not flash.empty?
assert_redirected_to login_url
end
test "should not allow the admin attribute to be edited via the web" do
log_in_as(@other_user)
assert_not @other_user.admin?
patch user_path(@other_user), params: {
user: { password: 'password',
password_confirmation: 'password',
admin: true } }
assert_not @other_user.reload.admin?
end
test "should redirect edit when logged in as wrong user" do
log_in_as(@other_user)
get edit_user_path(@user)
assert flash.empty?
assert_redirected_to root_url
end
test "should redirect update when logged in as wrong user" do
log_in_as(@other_user)
patch user_path(@user), params: { user: { name: @user.name,
email: @user.email } }
assert flash.empty?
assert_redirected_to root_url
end
test "should redirect destroy when not logged in" do
assert_no_difference 'User.count' do
delete user_path(@user)
end
assert_redirected_to login_url
end
test "should redirect destroy when logged in as a non-admin" do
log_in_as(@other_user)
assert_no_difference 'User.count' do
delete user_path(@user)
end
assert_redirected_to root_url
end
test "should redirect following when not logged in" do
get following_user_path(@user)
assert_redirected_to login_url
end
test "should redirect followers when not logged in" do
get followers_user_path(@user)
assert_redirected_to login_url
end
end
| 28.349398 | 78 | 0.650659 |
38b19e23fea54e7495675e9b77c770b11b7151c3 | 190 | # Load the Rails application.
require_relative "application"
# Initialize the Rails application.
Rails.application.initialize!
require 'carrierwave'
require 'carrierwave/orm/activerecord'
| 21.111111 | 38 | 0.821053 |
015ee875c20ad1a72fac4378d84168b5c3a34129 | 2,065 | module MongrelRecipes
module RackStack
module Helpers
def pidfile(port)
"/var/run/mongrel/rails/mongrel.#{port}.pid"
end
def restart_app(command)
instance = instances(:app_master)
old_pid = instance.ssh("cat #{pidfile(5000)}").stdout
instance.ssh!("/engineyard/bin/app_rails #{command}")
instance.ssh!("/engineyard/bin/app_rails status")
new_pid = instance.ssh("cat #{pidfile(5000)}").stdout
assert_not_equal(old_pid, new_pid)
end
end
class StartTest < EY::Sommelier::TestCase
include Helpers
scenario :beta
destructive!
def test_start_app
instance = instances(:app_master)
instance.ssh!("/engineyard/bin/app_rails stop")
assert !instance.ssh("/engineyard/bin/app_rails status").success?
instance.ssh!("/engineyard/bin/app_rails start")
instance.ssh!("/engineyard/bin/app_rails status")
end
end
class RestartTest < EY::Sommelier::TestCase
include Helpers
scenario :beta
destructive!
def test_restart_app
restart_app('restart')
end
end
class DeployTest < EY::Sommelier::TestCase
include Helpers
scenario :beta
destructive!
def test_deploy_app
# mongrels can't do anything different for deploy vs. restart;
# they're synonyms.
restart_app('deploy')
end
end
class ListenTest < EY::Sommelier::TestCase
include Helpers
scenario :beta
destructive!
def test_app_listens
instance = instances(:app_master)
(5000..5007).each do |port|
instance.ssh!("curl http://localhost:#{port}/")
end
end
end
class PidfileTest < EY::Sommelier::TestCase
include Helpers
scenario :beta
destructive!
def test_pidfile_present
instance = instances(:app_master)
(5000..5007).each do |port|
instance.ssh!("test -f #{pidfile(port)}")
end
end
end
end
end
| 22.692308 | 73 | 0.615012 |
39a66637837db5bda0c0980d140672c456b9b466 | 1,097 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20160706192334) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "users", force: :cascade do |t|
t.string "name"
t.integer "age"
t.boolean "active"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 40.62963 | 86 | 0.762078 |
387a50a5fe920311702b296186638d7b1ee0c1b5 | 1,634 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Randomize the order test cases are executed.
config.active_support.test_order = :random
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
config.logger = Logger.new(STDOUT) if ENV['LOG']
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
end
| 40.85 | 85 | 0.77295 |
bbf636eb4b6d57dd47a042028ae9989d741765d1 | 170 | class AddActionTargetTypeToEntourageModerations < ActiveRecord::Migration[4.2]
def change
add_column :entourage_moderations, :action_target_type, :string
end
end
| 28.333333 | 78 | 0.817647 |
62df44d1d7e97b00ff5602e52b409a7079dfef2d | 1,722 | class CaseAssignment < ApplicationRecord
has_paper_trail
belongs_to :casa_case
belongs_to :volunteer, class_name: "User", inverse_of: "case_assignments"
validates :casa_case_id, uniqueness: {scope: :volunteer_id} # only 1 row allowed per case-volunteer pair
validates :volunteer, presence: true
validate :assignee_must_be_volunteer
validate :casa_case_and_volunteer_must_belong_to_same_casa_org, if: -> { casa_case.present? && volunteer.present? }
scope :is_active, -> { where(is_active: true) }
scope :active, -> { where(active: true) }
def self.inactive_this_week(volunteer_id)
this_week = Date.today - 7.days..Date.today
where(updated_at: this_week).where(active: false).where(volunteer_id: volunteer_id)
end
private
def assignee_must_be_volunteer
errors.add(:volunteer, "Case assignee must be an active volunteer") unless volunteer.is_a?(Volunteer) && volunteer.active?
end
def casa_case_and_volunteer_must_belong_to_same_casa_org
return if casa_case.casa_org_id == volunteer.casa_org_id
errors.add(:volunteer, "and case must belong to the same organization")
end
end
# == Schema Information
#
# Table name: case_assignments
#
# id :bigint not null, primary key
# active :boolean default(TRUE), not null
# created_at :datetime not null
# updated_at :datetime not null
# casa_case_id :bigint not null
# volunteer_id :bigint not null
#
# Indexes
#
# index_case_assignments_on_casa_case_id (casa_case_id)
# index_case_assignments_on_volunteer_id (volunteer_id)
#
# Foreign Keys
#
# fk_rails_... (casa_case_id => casa_cases.id)
# fk_rails_... (volunteer_id => users.id)
#
| 31.888889 | 126 | 0.723577 |
033850583806a4f3ca20b9d1e5a85bdb62a04a30 | 4,946 | =begin
This file is part of Viewpoint; the Ruby library for Microsoft Exchange Web Services.
Copyright © 2011 Dan Wanek <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
module Viewpoint
module EWS
# This represents a Mailbox object in the Exchange data store
# @see http://msdn.microsoft.com/en-us/library/aa565036.aspx MSDN docs
# @todo Design a Class method that resolves to an Array of MailboxUsers
class MailboxUser
include Model
def initialize(ews, mbox_user)
super() # Calls initialize in Model (creates @ews_methods Array)
@ews = ews
@ews_item = mbox_user
define_str_var :name, :email_address, :routing_type, :mailbox_type, :item_id
end
def out_of_office_settings
mailbox = {:address => email_address}
resp = @ews.get_user_oof_settings(mailbox)
return resp
s = resp[:oof_settings]
@oof_state = s[:oof_state][:text]
@oof_ext_audience = s[:external_audience][:text]
@oof_start = DateTime.parse(s[:duration][:start_time][:text])
@oof_end = DateTime.parse(s[:duration][:end_time][:text])
@oof_internal_reply = s[:internal_reply][:message][:text]
@oof_external_reply = s[:internal_reply][:message][:text]
true
end
# Get information about when this user is available.
# @param [String] start_time The start of the time range to check as an xs:dateTime.
# @param [String] end_time The end of the time range to check as an xs:dateTime.
# @see http://msdn.microsoft.com/en-us/library/aa494212.aspx
def get_user_availability(start_time, end_time)
return MailboxUser.get_user_availability(self.email_address, start_time, end_time)
end
# Adds one or more delegates to a principal's mailbox and sets specific access permissions
# @see http://msdn.microsoft.com/en-us/library/bb856527.aspx
#
# @param [String,MailboxUser] delegate_email The user you would like to give delegate access to.
# This can either be a simple String e-mail address or you can pass in a MailboxUser object.
# @param [Hash] permissions A hash of folder type keys and permission type values. An example
# would be {:calendar_folder_permission_level => 'Editor'}. Possible keys are:
# :calendar_folder_permission_level, :tasks_folder_permission_level, :inbox_folder_permission_level
# :contacts_folder_permission_level, :notes_folder_permission_level, :journal_folder_permission_level
# and possible values are: None/Editor/Reviewer/Author/Custom
# @return [true] This method either returns true or raises an error with the message
# as to why this operation did not succeed.
def add_delegate!(delegate_email, permissions)
# Use a new hash so the passed hash is not modified in case we are in a loop.
# Thanks to Markus Roberts for pointing this out.
formatted_perms = {}
# Modify permissions so we can pass it to the builders
permissions.each_pair do |k,v|
formatted_perms[k] = {:text => v}
end
resp = (Viewpoint::EWS::EWS.instance).ews.add_delegate(self.email_address, delegate_email, formatted_perms)
if(resp.status == 'Success')
return true
else
raise EwsError, "Could not add delegate access for user #{delegate_email}: #{resp.code}, #{resp.message}"
end
end
def update_delegate!(delegate_email, permissions)
# Modify permissions so we can pass it to the builders
formatted_perms = {}
permissions.each_pair do |k,v|
formatted_perms[k] = {:text => v}
end
resp = (Viewpoint::EWS::EWS.instance).ews.update_delegate(self.email_address, delegate_email, formatted_perms)
if(resp.status == 'Success')
return true
else
raise EwsError, "Could not update delegate access for user #{delegate_email}: #{resp.code}, #{resp.message}"
end
end
def get_delegate_info()
resp = (Viewpoint::EWS::EWS.instance).ews.get_delegate(self.email_address)
# if(resp.status == 'Success')
# return true
# else
# raise EwsError, "Could not update delegate access for user #{delegate_email}: #{resp.code}, #{resp.message}"
# end
end
end # MailboxUser
end # EWS
end # Viewpoint
| 43.769912 | 119 | 0.680146 |
f75cb322fe9fad07c18b8c18c18377296a31c4d3 | 1,773 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Ci::PrepareBuildService do
describe '#execute' do
let(:build) { create(:ci_build, :preparing) }
subject { described_class.new(build).execute }
before do
allow(build).to receive(:prerequisites).and_return(prerequisites)
end
shared_examples 'build enqueueing' do
it 'enqueues the build' do
expect(build).to receive(:enqueue_preparing).once
subject
end
end
context 'build has unmet prerequisites' do
let(:prerequisite) { double(complete!: true) }
let(:prerequisites) { [prerequisite] }
it 'completes each prerequisite' do
expect(prerequisites).to all(receive(:complete!))
subject
end
include_examples 'build enqueueing'
context 'prerequisites fail to complete' do
before do
allow(build).to receive(:enqueue_preparing).and_return(false)
end
it 'drops the build' do
expect(build).to receive(:drop).with(:unmet_prerequisites).once
subject
end
end
context 'prerequisites raise an error' do
before do
allow(prerequisite).to receive(:complete!).and_raise Kubeclient::HttpError.new(401, 'unauthorized', nil)
end
it 'drops the build and notifies Sentry' do
expect(build).to receive(:drop).with(:unmet_prerequisites).once
expect(Gitlab::ErrorTracking).to receive(:track_exception)
.with(instance_of(Kubeclient::HttpError), hash_including(build_id: build.id))
subject
end
end
end
context 'build has no prerequisites' do
let(:prerequisites) { [] }
include_examples 'build enqueueing'
end
end
end
| 25.695652 | 114 | 0.648054 |
b93e61e84c525bb3c114483c44d4f864b1496042 | 1,362 | require_relative "boot"
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_mailbox/engine"
require "action_text/engine"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SmallTownBackend
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.1
# Configuration for the application, engines, and railties goes here.
#
# These settings can be overridden in specific environments using the files
# in config/environments, which are processed later.
#
# config.time_zone = "Central Time (US & Canada)"
# config.eager_load_paths << Rails.root.join("extras")
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
end
end
| 33.219512 | 79 | 0.764317 |
ff7b095d7377ab4c5cdd638bc086d725e5abcf68 | 61 | module Devise
module AuthN
VERSION = "0.1.7"
end
end
| 10.166667 | 21 | 0.639344 |
f74549ecc9fa023a59af5a6597625d9b8faccdd9 | 150 | require_relative '../../../spec_helper'
class CorePlacePage < CoreAuthorityPage
include Logging
include Page
include CollectionSpacePages
end | 16.666667 | 39 | 0.78 |
035e95297df4c8727202ee4007f99787499c9d16 | 557 | Pod::Spec.new do |s|
s.name = 'GSPlayer'
s.version = '0.2.16'
s.summary = '⏯ Video player, support for caching, fullscreen transition and custom control view. 视频播放器,支持边下边播、全屏转场和自定义控制层'
s.homepage = 'https://github.com/wxxsw/GSPlayer'
s.license = 'MIT'
s.author = { 'Gesen' => '[email protected]' }
s.source = { :git => 'https://github.com/wxxsw/GSPlayer.git', :tag => s.version.to_s }
s.source_files = 'GSPlayer/Classes/**/*.swift'
s.ios.deployment_target = '10.0'
s.swift_versions = ['5.0']
end
| 34.8125 | 129 | 0.601436 |
2141e816687e33ceabb694433fc83cb324d85033 | 585 | # Be sure to restart your server when you modify this file.
# Version of your assets, change this if you want to expire all your assets.
# When updating Pageflow, update this as well. It will force a refresh for i18n-js.
Rails.application.config.assets.version = '37'
# Add additional assets to the asset load path
# Rails.application.config.assets.paths << Emoji.images_path
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
Rails.application.config.assets.precompile += %w(react-server.js components.js)
| 45 | 93 | 0.777778 |
285157a0157c07e1de43b6be2f8878d63adf7923 | 1,498 | # frozen_string_literal: true
require "rails_helper"
describe SoftwareType do
describe "fields" do
subject { described_class }
it { is_expected.to have_field(:id).of_type(!types.ID) }
it { is_expected.to have_field(:type).of_type("String!") }
end
describe "software as formatted citation", elasticsearch: true do
let!(:software) do
create(
:doi,
types: { "resourceTypeGeneral" => "Software" },
doi: "10.14454/12345",
aasm_state: "findable",
version_info: "1.0.1",
)
end
before do
Doi.import
sleep 2
@dois = Doi.gql_query(nil, page: { cursor: [], size: 1 }).results.to_a
end
let(:query) do
"query {
software(id: \"https://doi.org/10.14454/12345\") {
id
formattedCitation(style: \"apa\")
}
}"
end
it "returns books" do
response = LupoSchema.execute(query).as_json
expect(response.dig("data", "software", "id")).to eq(
"https://handle.stage.datacite.org/" + software.uid,
)
expect(response.dig("data", "software", "formattedCitation")).to eq(
"Ollomo, B., Durand, P., Prugnolle, F., Douzery, E. J. P., Arnathau, C., Nkoghe, D., Leroy, E., & Renaud, F. (2011). <i>Data from: A new malaria agent in African hominids.</i> (Version 1.0.1) [Computer software]. Dryad Digital Repository. <a href='https://doi.org/10.14454/12345'>https://doi.org/10.14454/12345</a>",
)
end
end
end
| 29.96 | 328 | 0.598131 |
218b6dc019cf8a4b46eda8de9f7ae515e3801f80 | 171 | class CreatePosts < ActiveRecord::Migration[5.2]
def change
create_table :posts do |t|
t.string :name
t.text :body
t.timestamps
end
end
end
| 15.545455 | 48 | 0.631579 |
e263d970275ef376d6b9a7c53d223cb5963ab23f | 327 | # frozen_string_literal: true
FactoryBot.define do
sequence(:email) { |i| "pfinderuser_#{i}@pfinder.local" }
factory :user do
name { Faker::Name.first_name }
scoutname { Faker::Lorem.word }
scoutgroup { Faker::Lorem.word }
canton { :zh }
birthdate { Faker::Date.birthday(18, 50) }
email
end
end
| 21.8 | 59 | 0.654434 |
1c0749e57d317086b777766ecf7638e805e05fd1 | 105 | class DashboardController < ApplicationController
def section_id
:dashboard
end
private
end
| 10.5 | 49 | 0.761905 |
3357abe3dc48f84cad6c74edd94f80b9c3807b77 | 6,787 | # WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::Glacier
class Account
extend Aws::Deprecations
# @overload def initialize(id, options = {})
# @param [String] id
# @option options [Client] :client
# @overload def initialize(options = {})
# @option options [required, String] :id
# @option options [Client] :client
def initialize(*args)
options = Hash === args.last ? args.pop.dup : {}
@id = extract_id(args, options)
@data = Aws::EmptyStructure.new
@client = options.delete(:client) || Client.new(options)
@waiter_block_warned = false
end
# @!group Read-Only Attributes
# @return [String]
def id
@id
end
# @!endgroup
# @return [Client]
def client
@client
end
# @raise [NotImplementedError]
# @api private
def load
msg = "#load is not implemented, data only available via enumeration"
raise NotImplementedError, msg
end
alias :reload :load
# @api private
# @return [EmptyStructure]
def data
@data
end
# @return [Boolean]
# Returns `true` if this resource is loaded. Accessing attributes or
# {#data} on an unloaded resource will trigger a call to {#load}.
def data_loaded?
!!@data
end
# @deprecated Use [Aws::Glacier::Client] #wait_until instead
#
# Waiter polls an API operation until a resource enters a desired
# state.
#
# @note The waiting operation is performed on a copy. The original resource remains unchanged
#
# ## Basic Usage
#
# Waiter will polls until it is successful, it fails by
# entering a terminal state, or until a maximum number of attempts
# are made.
#
# # polls in a loop until condition is true
# resource.wait_until(options) {|resource| condition}
#
# ## Example
#
# instance.wait_until(max_attempts:10, delay:5) {|instance| instance.state.name == 'running' }
#
# ## Configuration
#
# You can configure the maximum number of polling attempts, and the
# delay (in seconds) between each polling attempt. The waiting condition is set
# by passing a block to {#wait_until}:
#
# # poll for ~25 seconds
# resource.wait_until(max_attempts:5,delay:5) {|resource|...}
#
# ## Callbacks
#
# You can be notified before each polling attempt and before each
# delay. If you throw `:success` or `:failure` from these callbacks,
# it will terminate the waiter.
#
# started_at = Time.now
# # poll for 1 hour, instead of a number of attempts
# proc = Proc.new do |attempts, response|
# throw :failure if Time.now - started_at > 3600
# end
#
# # disable max attempts
# instance.wait_until(before_wait:proc, max_attempts:nil) {...}
#
# ## Handling Errors
#
# When a waiter is successful, it returns the Resource. When a waiter
# fails, it raises an error.
#
# begin
# resource.wait_until(...)
# rescue Aws::Waiters::Errors::WaiterFailed
# # resource did not enter the desired state in time
# end
#
#
# @yield param [Resource] resource to be used in the waiting condition
#
# @raise [Aws::Waiters::Errors::FailureStateError] Raised when the waiter terminates
# because the waiter has entered a state that it will not transition
# out of, preventing success.
#
# yet successful.
#
# @raise [Aws::Waiters::Errors::UnexpectedError] Raised when an error is encountered
# while polling for a resource that is not expected.
#
# @raise [NotImplementedError] Raised when the resource does not
#
# @option options [Integer] :max_attempts (10) Maximum number of
# attempts
# @option options [Integer] :delay (10) Delay between each
# attempt in seconds
# @option options [Proc] :before_attempt (nil) Callback
# invoked before each attempt
# @option options [Proc] :before_wait (nil) Callback
# invoked before each wait
# @return [Resource] if the waiter was successful
def wait_until(options = {}, &block)
self_copy = self.dup
attempts = 0
options[:max_attempts] = 10 unless options.key?(:max_attempts)
options[:delay] ||= 10
options[:poller] = Proc.new do
attempts += 1
if block.call(self_copy)
[:success, self_copy]
else
self_copy.reload unless attempts == options[:max_attempts]
:retry
end
end
Aws::Waiters::Waiter.new(options).wait({})
end
# @!group Actions
# @example Request syntax with placeholder values
#
# vault = account.create_vault({
# vault_name: "string", # required
# })
# @param [Hash] options ({})
# @option options [required, String] :vault_name
# The name of the vault.
# @return [Vault]
def create_vault(options = {})
options = options.merge(account_id: @id)
resp = @client.create_vault(options)
Vault.new(
account_id: @id,
name: options[:vault_name],
client: @client
)
end
# @!group Associations
# @param [String] name
# @return [Vault]
def vault(name)
Vault.new(
account_id: @id,
name: name,
client: @client
)
end
# @example Request syntax with placeholder values
#
# account.vaults()
# @param [Hash] options ({})
# @return [Vault::Collection]
def vaults(options = {})
batches = Enumerator.new do |y|
options = options.merge(account_id: @id)
resp = @client.list_vaults(options)
resp.each_page do |page|
batch = []
page.data.vault_list.each do |v|
batch << Vault.new(
account_id: @id,
name: v.vault_name,
data: v,
client: @client
)
end
y.yield(batch)
end
end
Vault::Collection.new(batches)
end
# @deprecated
# @api private
def identifiers
{ id: @id }
end
deprecated(:identifiers)
private
def extract_id(args, options)
value = args[0] || options.delete(:id)
case value
when String then value
when nil then raise ArgumentError, "missing required option :id"
else
msg = "expected :id to be a String, got #{value.class}"
raise ArgumentError, msg
end
end
class Collection < Aws::Resources::Collection; end
end
end
| 28.516807 | 102 | 0.603359 |
262287182032937516b9dec7a88b160d54f5e9ae | 1,604 | Pod::Spec.new do |s|
s.name = "RxSwift"
s.version = "4.4.1"
s.summary = "RxSwift is a Swift implementation of Reactive Extensions"
s.description = <<-DESC
This is a Swift port of [ReactiveX.io](https://github.com/ReactiveX)
Like the original [Rx](https://github.com/Reactive-extensions/Rx.Net), its intention is to enable easy composition of asynchronous operations and event streams.
It tries to port as many concepts from the original Rx as possible, but some concepts were adapted for more pleasant and performant integration with iOS/macOS/Linux environment.
Probably the best analogy for those who have never heard of Rx would be:
```
git diff | grep bug | less # linux pipes - programs communicate by sending
# sequences of bytes, words, lines, '\0' terminated strings...
```
would become if written in RxSwift
```
gitDiff().grep("bug").less // sequences of swift objects
```
DESC
s.homepage = "https://github.com/ReactiveX/RxSwift"
s.license = 'MIT'
s.author = { "Krunoslav Zaher" => "[email protected]" }
s.source = { :git => "https://github.com/ReactiveX/RxSwift.git", :tag => s.version.to_s }
s.requires_arc = true
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.9'
s.watchos.deployment_target = '3.0'
s.tvos.deployment_target = '9.0'
s.source_files = 'RxSwift/**/*.swift', 'Platform/**/*.swift'
s.exclude_files = 'RxSwift/Platform/**/*.swift'
s.dependency 'RxAtomic', '~> 4.4'
end
| 40.1 | 177 | 0.643392 |
f7e5b6e48642554a5029ed48ec1e9ea34e4b4105 | 6,525 | # # Modules: Rails
# Adds settings and tasks for managing Rails projects.
#
# require 'mina/rails'
require 'mina/bundler'
# ## Settings
# Any and all of these settings can be overriden in your `deploy.rb`.
# ### rails_env
# Sets the Rails environment for `rake` and `rails` commands.
#
# Note that changing this will NOT change the environment that your application
# is run in.
set_default :rails_env, 'production'
# ### bundle_prefix
# Prefix for Bundler commands. Often to something like `RAILS_ENV=production
# bundle exec`.
#
# queue! "#{bundle_prefix} annotate -r"
set_default :bundle_prefix, lambda { %{RAILS_ENV="#{rails_env}" #{bundle_bin} exec} }
# ### rake
# The prefix for `rake` commands. Use like so:
#
# queue! "#{rake} db:migrate"
set_default :rake, lambda { %{#{bundle_prefix} rake} }
# ### rails
# The prefix for `rails` commands. Use like so:
#
# queue! "#{rails} console"
set_default :rails, lambda { %{#{bundle_prefix} rails} }
# ### asset_paths
# The paths to be checked.
#
# Whenever assets are compiled, the asset files are checked if they have
# changed from the previous release.
#
# If they're unchanged, compiled assets will simply be copied over to the new
# release.
#
# Override this if you have custom asset paths declared in your Rails's
# `config.assets.paths` setting.
set_default :asset_paths, ['vendor/assets/', 'app/assets/']
# ### compiled_asset_path
# The path to be copied to the new release.
#
# The path your assets are compiled to. If your `assets_path` assets have changed,
# this is the folder that gets copied accross from the current release to the new release.
#
# Override this if you have custom public asset paths.
set_default :compiled_asset_path, 'public/assets'
# ### rake_assets_precompile
# The command to invoke when precompiling assets.
# Override me if you like.
settings.rake_assets_precompile ||= lambda { "#{rake} assets:precompile RAILS_GROUPS=assets" }
# ----
# Macro used later by :rails, :rake, etc
make_run_task = lambda { |name, sample_args|
task name, [:arguments] => :environment do |t, args|
arguments = args[:arguments]
command = send name
unless arguments
puts %{You need to provide arguments. Try: mina "#{name}[#{sample_args}]"}
exit 1
end
queue echo_cmd %[cd "#{deploy_to!}/#{current_path!}" && #{command} #{arguments}]
end
}
def check_for_changes_script(options={})
diffs = options[:at].map { |path|
%[diff -rN "#{deploy_to}/#{current_path}/#{path}" "./#{path}" 2>/dev/null]
}.join("\n")
unindent %[
if [ -e "#{deploy_to}/#{current_path}/#{options[:check]}" ]; then
count=`(
#{reindent 4, diffs}
) | wc -l`
if [ "$((count))" = "0" ]; then
#{reindent 4, options[:skip]} &&
exit
else
#{reindent 4, options[:changed]}
fi
else
#{reindent 2, options[:default]}
fi
]
end
# ## Command-line tasks
# These tasks can be invoked in the command line.
# ### rails[]
# Invokes a rails command.
#
# $ mina "rails[console]"
desc "Execute a Rails command in the current deploy."
make_run_task[:rails, 'console']
# ### rake[]
# Invokes a rake command.
#
# $ mina "rake[db:migrate]"
desc "Execute a Rake command in the current deploy."
make_run_task[:rake, 'db:migrate']
# ### console
# Opens the Ruby console for the currently-deployed version.
#
# $ mina console
desc "Starts an interactive console."
task :console => :environment do
queue echo_cmd %[cd "#{deploy_to!}/#{current_path!}" && #{rails} console && exit]
end
# ### log
# Tail log from server
#
# $ mina log
desc "Tail log from server"
task :log => :environment do
queue %[tail -f #{deploy_to}/#{current_path}/log/#{rails_env}.log]
end
# ## Deploy tasks
# These tasks are meant to be invoked inside deploy scripts, not invoked on
# their own.
namespace :rails do
# ### rails:db_migrate
desc "Migrates the Rails database (skips if nothing has changed since the last release)."
task :db_migrate do
if ENV['force_migrate']
invoke :'rails:db_migrate:force'
else
message = verbose_mode? ?
'$((count)) changes found, migrating database' :
'Migrating database'
queue check_for_changes_script \
:check => 'db/migrate/',
:at => ['db/migrate/'],
:skip => %[
echo "-----> DB migrations unchanged; skipping DB migration"
],
:changed => %[
echo "-----> #{message}"
#{echo_cmd %[#{rake} db:migrate]}
],
:default => %[
echo "-----> Migrating database"
#{echo_cmd %[#{rake} db:migrate]}
]
end
end
# ### rails:db_migrate:force
desc "Migrates the Rails database."
task :'db_migrate:force' do
queue %{
echo "-----> Migrating database"
#{echo_cmd %[#{rake} db:migrate]}
}
end
# ### rails:db_create
desc "Creates the Rails database."
task :'db_create' do
queue %{
echo "-----> Creating database"
#{echo_cmd %[#{rake} db:create]}
}
end
# ### rails:db_rollback
desc "Rollbacks the Rails database."
task :'db_rollback' do
queue %{
echo "-----> Rollbacking database"
#{echo_cmd %[#{rake} db:rollback]}
}
end
# ### rails:assets_precompile:force
desc "Precompiles assets."
task :'assets_precompile:force' do
queue %{
echo "-----> Precompiling asset files"
#{echo_cmd %[#{rake_assets_precompile}]}
}
end
# ### rails:assets_precompile
desc "Precompiles assets (skips if nothing has changed since the last release)."
task :'assets_precompile' do
if ENV['force_assets']
invoke :'rails:assets_precompile:force'
else
message = verbose_mode? ?
'$((count)) changes found, precompiling asset files' :
'Precompiling asset files'
queue check_for_changes_script \
:check => compiled_asset_path,
:at => [*asset_paths],
:skip => %[
echo "-----> Skipping asset precompilation"
#{echo_cmd %[mkdir -p "#{deploy_to}/$build_path/#{compiled_asset_path}"]}
#{echo_cmd %[cp -R "#{deploy_to}/#{current_path}/#{compiled_asset_path}/." "#{deploy_to}/$build_path/#{compiled_asset_path}"]}
],
:changed => %[
echo "-----> #{message}"
#{echo_cmd %[#{rake_assets_precompile}]}
],
:default => %[
echo "-----> Precompiling asset files"
#{echo_cmd %[#{rake_assets_precompile}]}
]
end
end
end
| 26.52439 | 136 | 0.627893 |
18e257bcfa66e035fbddab98e917ada73316f9cf | 11,283 | # frozen_string_literal: true
require_relative './data_absent_reason_checker'
require_relative './profile_definitions/us_core_practitioner_definitions'
module Inferno
module Sequence
class USCore311PractitionerSequence < SequenceBase
include Inferno::DataAbsentReasonChecker
include Inferno::USCore311ProfileDefinitions
title 'Practitioner Tests'
description 'Verify support for the server capabilities required by the US Core Practitioner Profile.'
details %(
# Background
The US Core #{title} sequence verifies that the system under test is able to provide correct responses
for Practitioner queries. These queries must contain resources conforming to US Core Practitioner Profile as specified
in the US Core v3.1.1 Implementation Guide.
# Testing Methodology
Because Practitioner resources are not required by USCDI, no searches are performed on this test sequence. Instead, references to
this profile found in other resources are used for testing. If no references can be found this way, then all the tests
in this sequence are skipped.
## Must Support
Each profile has a list of elements marked as "must support". This test sequence expects to see each of these elements
at least once. If at least one cannot be found, the test will fail. The test will look through the Practitioner
resources found for these elements.
## Profile Validation
Each resource returned from the first search is expected to conform to the [US Core Practitioner Profile](http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner).
Each element is checked against teminology binding and cardinality requirements.
Elements with a required binding is validated against its bound valueset. If the code/system in the element is not part
of the valueset, then the test will fail.
## Reference Validation
Each reference within the resources found from the first search must resolve. The test will attempt to read each reference found
and will fail if any attempted read fails.
)
test_id_prefix 'USCPR'
requires :token
conformance_supports :Practitioner
delayed_sequence
def validate_resource_item(resource, property, value)
case property
when 'name'
values_found = resolve_path(resource, 'name')
value_downcase = value.downcase
match_found = values_found.any? do |name|
name&.text&.downcase&.start_with?(value_downcase) ||
name&.family&.downcase&.include?(value_downcase) ||
name&.given&.any? { |given| given.downcase.start_with?(value_downcase) } ||
name&.prefix&.any? { |prefix| prefix.downcase.start_with?(value_downcase) } ||
name&.suffix&.any? { |suffix| suffix.downcase.start_with?(value_downcase) }
end
assert match_found, "name in Practitioner/#{resource.id} (#{values_found}) does not match name requested (#{value})"
when 'identifier'
values_found = resolve_path(resource, 'identifier')
identifier_system = value.split('|').first.empty? ? nil : value.split('|').first
identifier_value = value.split('|').last
match_found = values_found.any? do |identifier|
identifier.value == identifier_value && (!value.include?('|') || identifier.system == identifier_system)
end
assert match_found, "identifier in Practitioner/#{resource.id} (#{values_found}) does not match identifier requested (#{value})"
end
end
def patient_ids
@instance.patient_ids.split(',').map(&:strip)
end
@resources_found = false
test :resource_read do
metadata do
id '01'
name 'Server returns correct Practitioner resource from the Practitioner read interaction'
link 'https://www.hl7.org/fhir/us/core/CapabilityStatement-us-core-server.html'
description %(
This test will attempt to Reference to Practitioner can be resolved and read.
)
versions :r4
end
skip_if_known_not_supported(:Practitioner, [:read])
practitioner_references = @instance.resource_references.select { |reference| reference.resource_type == 'Practitioner' }
skip 'No Practitioner references found from the prior searches' if practitioner_references.blank?
@practitioner_ary = practitioner_references.map do |reference|
validate_read_reply(
FHIR::Practitioner.new(id: reference.resource_id),
FHIR::Practitioner,
check_for_data_absent_reasons
)
end
@practitioner = @practitioner_ary.first
@resources_found = @practitioner.present?
end
test :validate_resources do
metadata do
id '02'
name 'Practitioner resources returned from previous search conform to the US Core Practitioner Profile.'
link 'http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner'
description %(
This test verifies resources returned from the first search conform to the [US Core Practitioner Profile](http://hl7.org/fhir/us/core/StructureDefinition/us-core-practitioner).
It verifies the presence of manditory elements and that elements with required bindgings contain appropriate values.
CodeableConcept element bindings will fail if none of its codings have a code/system that is part of the bound ValueSet.
Quantity, Coding, and code element bindings will fail if its code/system is not found in the valueset.
)
versions :r4
end
skip_if_not_found(resource_type: 'Practitioner', delayed: true)
test_resources_against_profile('Practitioner')
bindings = USCore311PractitionerSequenceDefinitions::BINDINGS
invalid_binding_messages = []
invalid_binding_resources = Set.new
bindings.select { |binding_def| binding_def[:strength] == 'required' }.each do |binding_def|
begin
invalid_bindings = resources_with_invalid_binding(binding_def, @practitioner_ary)
rescue Inferno::Terminology::UnknownValueSetException => e
warning do
assert false, e.message
end
invalid_bindings = []
end
invalid_bindings.each { |invalid| invalid_binding_resources << "#{invalid[:resource]&.resourceType}/#{invalid[:resource].id}" }
invalid_binding_messages.concat(invalid_bindings.map { |invalid| invalid_binding_message(invalid, binding_def) })
end
assert invalid_binding_messages.blank?, "#{invalid_binding_messages.count} invalid required #{'binding'.pluralize(invalid_binding_messages.count)}" \
" found in #{invalid_binding_resources.count} #{'resource'.pluralize(invalid_binding_resources.count)}: " \
"#{invalid_binding_messages.join('. ')}"
bindings.select { |binding_def| binding_def[:strength] == 'extensible' }.each do |binding_def|
begin
invalid_bindings = resources_with_invalid_binding(binding_def, @practitioner_ary)
binding_def_new = binding_def
# If the valueset binding wasn't valid, check if the codes are in the stated codesystem
if invalid_bindings.present?
invalid_bindings = resources_with_invalid_binding(binding_def.except(:system), @practitioner_ary)
binding_def_new = binding_def.except(:system)
end
rescue Inferno::Terminology::UnknownValueSetException, Inferno::Terminology::ValueSet::UnknownCodeSystemException => e
warning do
assert false, e.message
end
invalid_bindings = []
end
invalid_binding_messages.concat(invalid_bindings.map { |invalid| invalid_binding_message(invalid, binding_def_new) })
end
warning do
invalid_binding_messages.each do |error_message|
assert false, error_message
end
end
end
test 'All must support elements are provided in the Practitioner resources returned.' do
metadata do
id '03'
link 'http://www.hl7.org/fhir/us/core/general-guidance.html#must-support'
description %(
US Core Responders SHALL be capable of populating all data elements as part of the query results as specified by the US Core Server Capability Statement.
This will look through the Practitioner resources found previously for the following must support elements:
* Practitioner.identifier:NPI
* identifier
* identifier.system
* identifier.value
* name
* name.family
)
versions :r4
end
skip_if_not_found(resource_type: 'Practitioner', delayed: true)
must_supports = USCore311PractitionerSequenceDefinitions::MUST_SUPPORTS
missing_slices = must_supports[:slices].reject do |slice|
@practitioner_ary&.any? do |resource|
slice_found = find_slice(resource, slice[:path], slice[:discriminator])
slice_found.present?
end
end
missing_must_support_elements = must_supports[:elements].reject do |element|
@practitioner_ary&.any? do |resource|
value_found = resolve_element_from_path(resource, element[:path]) do |value|
value_without_extensions = value.respond_to?(:to_hash) ? value.to_hash.reject { |key, _| key == 'extension' } : value
value_without_extensions.present? && (element[:fixed_value].blank? || value == element[:fixed_value])
end
value_found.present?
end
end
missing_must_support_elements.map! { |must_support| "#{must_support[:path]}#{': ' + must_support[:fixed_value] if must_support[:fixed_value].present?}" }
missing_must_support_elements += missing_slices.map { |slice| slice[:name] }
skip_if missing_must_support_elements.present?,
"Could not find #{missing_must_support_elements.join(', ')} in the #{@practitioner_ary&.length} provided Practitioner resource(s)"
@instance.save!
end
test 'Every reference within Practitioner resources can be read.' do
metadata do
id '04'
link 'http://hl7.org/fhir/references.html'
description %(
This test will attempt to read the first 50 reference found in the resources from the first search.
The test will fail if Inferno fails to read any of those references.
)
versions :r4
end
skip_if_known_not_supported(:Practitioner, [:search, :read])
skip_if_not_found(resource_type: 'Practitioner', delayed: true)
validated_resources = Set.new
max_resolutions = 50
@practitioner_ary&.each do |resource|
validate_reference_resolutions(resource, validated_resources, max_resolutions) if validated_resources.length < max_resolutions
end
end
end
end
end
| 45.132 | 188 | 0.672516 |
918d27b105362395a83d4121a56300edd083cced | 13,613 | require "net/https"
require "digest"
module RETS
class HTTP
attr_accessor :login_uri
##
# Creates a new HTTP instance which will automatically handle authenting to the RETS server.
def initialize(args)
@headers = {"User-Agent" => "Ruby RETS/v#{RETS::VERSION}", "Accept-Encoding" => "none", "RETS-Version" => "1.8.0"}
@request_count = 0
@config = {:http => {}}.merge(args)
@rets_data, @cookie_list = {}, {}
if @config[:useragent] and @config[:useragent][:name]
@headers["User-Agent"] = @config[:useragent][:name]
end
if @config[:rets_version]
@rets_data[:version] = @config[:rets_version]
self.setup_ua_authorization(:version => @config[:rets_version])
end
if @config[:auth_mode] == :basic
@auth_mode = @config.delete(:auth_mode)
end
end
def url_encode(str)
encoded_string = ""
str.each_char do |char|
case char
when "+"
encoded_string << "%2b"
when "="
encoded_string << "%3d"
when "?"
encoded_string << "%3f"
when "&"
encoded_string << "%26"
when "%"
encoded_string << "%25"
when ","
encoded_string << "%2C"
else
encoded_string << char
end
end
encoded_string
end
def get_digest(header)
return unless header
header.each do |text|
mode, text = text.split(" ", 2)
return text if mode == "Digest"
end
nil
end
##
# Creates and manages the HTTP digest auth
# if the WWW-Authorization header is passed, then it will overwrite what it knows about the auth data.
def save_digest(header)
@request_count = 0
@digest = {}
header.split(",").each do |line|
k, v = line.strip.split("=", 2)
@digest[k] = (k != "algorithm" and k != "stale") && v[1..-2] || v
end
@digest_type = @digest["qop"] ? @digest["qop"].split(",") : []
end
##
# Creates a HTTP digest header.
def create_digest(method, request_uri)
# http://en.wikipedia.org/wiki/Digest_access_authentication
first = Digest::MD5.hexdigest("#{@config[:username]}:#{@digest["realm"]}:#{@config[:password]}")
second = Digest::MD5.hexdigest("#{method}:#{request_uri}")
# Using the "newer" authentication QOP
if @digest_type.include?("auth")
cnonce = Digest::MD5.hexdigest("#{@headers["User-Agent"]}:#{@config[:password]}:#{@request_count}:#{@digest["nonce"]}")
hash = Digest::MD5.hexdigest("#{first}:#{@digest["nonce"]}:#{"%08X" % @request_count}:#{cnonce}:#{@digest["qop"]}:#{second}")
# Nothing specified, so default to the old one
elsif @digest_type.empty?
hash = Digest::MD5.hexdigest("#{first}:#{@digest["nonce"]}:#{second}")
else
raise RETS::HTTPError, "Cannot determine auth type for server (#{@digest_type.join(",")})"
end
http_digest = "Digest username=\"#{@config[:username]}\", "
http_digest << "realm=\"#{@digest["realm"]}\", "
http_digest << "nonce=\"#{@digest["nonce"]}\", "
http_digest << "uri=\"#{request_uri}\", "
http_digest << "algorithm=MD5, " unless @digest_type.empty?
http_digest << "response=\"#{hash}\", "
http_digest << "opaque=\"#{@digest["opaque"]}\""
unless @digest_type.empty?
http_digest << ", "
http_digest << "qop=\"#{@digest["qop"]}\", "
http_digest << "nc=#{"%08X" % @request_count}, "
http_digest << "cnonce=\"#{cnonce}\""
end
http_digest
end
##
# Creates a HTTP basic header.
def create_basic
"Basic " << ["#{@config[:username]}:#{@config[:password]}"].pack("m").delete("\r\n")
end
##
# Finds the ReplyText and ReplyCode attributes in the response
#
# @param [Nokogiri::XML::NodeSet] rets <RETS> attributes found
#
# @return [String] RETS ReplyCode
# @return [String] RETS ReplyText
def get_rets_response(rets)
code, text = nil, nil
rets.attributes.each do |attr|
key = attr.first.downcase
if key == "replycode"
code = attr.last.value
elsif key == "replytext"
text = attr.last.value
end
end
# puts "replycode: #{code}"
return code, text
end
##
# Handles managing the relevant RETS-UA-Authorization headers
#
# @param [Hash] args
# @option args [String] :version RETS Version
# @option args [String, Optional] :session_id RETS Session ID
def setup_ua_authorization(args)
# Most RETS implementations don't care about RETS-Version for RETS-UA-Authorization, they don't require RETS-Version in general.
# Rapattoni require RETS-Version even without RETS-UA-Authorization, so will try and set the header when possible from the HTTP request rather than implying it.
# Interealty requires RETS-Version for RETS-UA-Authorization, so will fake it when we get an 20037 error
@headers["RETS-Version"] = args[:version] if args[:version]
if @headers["RETS-Version"] and @config[:useragent] and @config[:useragent][:password]
login = Digest::MD5.hexdigest("#{@config[:useragent][:name]}:#{@config[:useragent][:password]}")
@headers.merge!("RETS-UA-Authorization" => "Digest #{Digest::MD5.hexdigest("#{login}::#{args[:session_id]}:#{@headers["RETS-Version"]}")}")
end
end
##
# Sends a request to the RETS server.
#
# @param [Hash] args
# @option args [URI] :url URI to request data from
# @option args [Hash, Optional] :params Query string to include with the request
# @option args [Integer, Optional] :read_timeout How long to wait for the socket to return data before timing out
#
# @raise [RETS::APIError]
# @raise [RETS::HTTPError]
# @raise [RETS::Unauthorized]
def request(args, &block)
if args[:params]
url_terminator = (args[:url].request_uri.include?("?")) ? "&" : "?"
request_uri = "#{args[:url].request_uri}#{url_terminator}"
args[:params].each do |k, v|
request_uri << "#{k}=#{url_encode(v.to_s)}&" if v
end
else
request_uri = args[:url].request_uri
end
request_uri = request_uri[0..-2] if request_uri[-1] == '&'
headers = args[:headers]
# Digest will change every time due to how its setup
@request_count += 1
if @auth_mode == :digest
if headers
headers["Authorization"] = create_digest("GET", request_uri)
else
headers = {"Authorization" => create_digest("GET", request_uri)}
end
end
headers = headers ? @headers.merge(headers) : @headers
http = ::Net::HTTP.new(args[:url].host, args[:url].port)
http.read_timeout = args[:read_timeout] if args[:read_timeout]
http.set_debug_output(@config[:debug_output]) if @config[:debug_output]
if args[:url].scheme == "https"
http.use_ssl = true
http.verify_mode = @config[:http][:verify_mode] || OpenSSL::SSL::VERIFY_NONE
http.ca_file = @config[:http][:ca_file] if @config[:http][:ca_file]
http.ca_path = @config[:http][:ca_path] if @config[:http][:ca_path]
end
puts "request_uri: #{request_uri}"
# puts "headers: #{headers.to_s}"
http.start do
http.request_get(request_uri, headers) do |response|
# Pass along the cookies
# Some servers will continually call Set-Cookie with the same value for every single request
# to avoid authentication problems from cookies being stomped over (which is sad, nobody likes having their cookies crushed).
# We keep a hash of every cookie set and only update it if something changed
if response.header["set-cookie"]
cookies_changed = nil
response.header.get_fields("set-cookie").each do |cookie|
key, value = cookie.split(";").first.split("=")
key.strip!
value.strip!
# If it's a RETS-Session-ID, it needs to be shoved into the RETS-UA-Authorization field
# Save the RETS-Session-ID so it can be used with RETS-UA-Authorization
if key.downcase == "rets-session-id"
@rets_data[:session_id] = value
self.setup_ua_authorization(@rets_data) if @rets_data[:version]
end
cookies_changed = true if @cookie_list[key] != value
@cookie_list[key] = value
end
if cookies_changed
@headers.merge!("Cookie" => @cookie_list.map {|k, v| "#{k}=#{v}"}.join("; "))
end
end
# puts "response.code: #{response.code}"
# puts "response.body: #{response.body}"
# puts "response.message: #{response.message}"
# Rather than returning HTTP 401 when User-Agent authentication is needed, Retsiq returns HTTP 200
# with RETS error 20037. If we get a 20037, will let it pass through and handle it as if it was a HTTP 401.
# Retsiq apparently returns a 20041 now instead of a 20037 for the same use case.
# StratusRETS returns 20052 for an expired season
rets_code = nil
if response.code != "401" and ( response.code != "200" or args[:check_response] )
if response.body =~ /<RETS/i
# puts "found RETS in response body"
rets_code, text = self.get_rets_response(Nokogiri::XML(response.body).xpath("//RETS").first)
unless rets_code == "20037" or rets_code == "20041" or rets_code == "20052" or rets_code == "0"
raise RETS::APIError.new("#{rets_code}: #{text}", rets_code, text)
end
elsif !args[:check_response]
# puts "didn't find RETS in response body and check_response is false"
raise RETS::HTTPError.new("#{response.code}: #{response.message}", response.code, response.message)
end
end
# Strictly speaking, we do not need to set a RETS-Version in most cases, if RETS-UA-Authorization is not used
# It makes more sense to be safe and set it. Innovia at least does not set this until authentication is successful
# which is why this check is also here for HTTP 200s and not just 401s
if response.code == "200" and !@rets_data[:version] and response.header["rets-version"] != ""
@rets_data[:version] = response.header["rets-version"]
end
# Digest can become stale requiring us to reload data
if @auth_mode == :digest and response.header["www-authenticate"] =~ /stale=true/i
save_digest(get_digest(response.header.get_fields("www-authenticate")))
args[:block] ||= block
return self.request(args)
elsif response.code == "401" or rets_code == "20037" or rets_code == "20041" or rets_code == "20052"
raise RETS::Unauthorized, "Cannot login, check credentials" if ( @auth_mode and @retried_request ) or ( @retried_request and rets_code == "20037" )
@retried_request = true
# We already have an auth mode, and the request wasn't retried.
# Meaning we know that we had a successful authentication but something happened so we should relogin.
if @auth_mode
@headers.delete("Cookie")
@cookie_list = {}
self.request(:url => login_uri)
return self.request(args.merge(:block => block))
end
# Find a valid way of authenticating to the server as some will support multiple methods
if response.header.get_fields("www-authenticate") and !response.header.get_fields("www-authenticate").empty?
digest = get_digest(response.header.get_fields("www-authenticate"))
if digest
save_digest(digest)
@auth_mode = :digest
else
@headers.merge!("Authorization" => create_basic)
@auth_mode = :basic
end
unless @auth_mode
raise RETS::HTTPError.new("Cannot authenticate, no known mode found", response.code)
end
end
# Check if we need to deal with User-Agent authorization
if response.header["rets-version"] and response.header["rets-version"] != ""
@rets_data[:version] = response.header["rets-version"]
# If we get a 20037 error, it could be due to not having a RETS-Version set
# Under Innovia, passing RETS/1.7 will cause some errors
# because they don't pass the RETS-Version header until a successful login which is a HTTP 200
# They also don't use RETS-UA-Authorization, and it's better to not imply the RETS-Version header
# unless necessary, so will only do it for 20037 errors now.
elsif !@rets_data[:version] and rets_code == "20037"
@rets_data[:version] = "RETS/1.7"
end
self.setup_ua_authorization(@rets_data)
args[:block] ||= block
return self.request(args)
# We just tried to auth and don't have access to the original block in yieldable form
elsif args[:block]
@retried_request = nil
args.delete(:block).call(response)
elsif block_given?
@retried_request = nil
yield response
end
end
end
end
end
end
| 40.038235 | 166 | 0.596121 |
1d80c5ec500a8080f23aa816424f4deff9d5805a | 327 | # encoding: utf-8
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
module Azure end
module Azure::Customimagesearch end
module Azure::Customimagesearch::Profiles end
module Azure::Customimagesearch::Profiles::Latest end
| 36.333333 | 94 | 0.807339 |
8714be352a13fea5c62b0bbeeca95f92fa05aab3 | 1,355 | require 'spec_helper'
RSpec.describe Newgistics::StringHelper do
describe '.camelize' do
context "when upcase_first is true" do
it "transforms an underscored string as expected" do
result = described_class.camelize('a_sample_string', upcase_first: true)
expect(result).to eq('ASampleString')
end
it "transforms a mixed string as expected" do
result = described_class.camelize('a_Sample_String', upcase_first: true)
expect(result).to eq('ASampleString')
end
end
context "when upcase_first is false" do
it "transforms an underscored string as expected" do
result = described_class.camelize('a_sample_string', upcase_first: false)
expect(result).to eq('aSampleString')
end
it "transforms a mixed string as expected" do
result = described_class.camelize('A_Sample_String', upcase_first: false)
expect(result).to eq('aSampleString')
end
end
it "can handle nil values" do
expect(described_class.camelize(nil)).to eq('')
end
end
describe '.underscore' do
it "transforms a camel-cased string properly" do
expect(described_class.underscore('aNiceString')).to eq('a_nice_string')
end
it "can handle nil values" do
expect(described_class.underscore(nil)).to eq("")
end
end
end
| 28.229167 | 81 | 0.681919 |
3361fc5c087fc0ab81dff830e32da650e6630839 | 813 | require 'spec_helper'
module Alf
describe Reader do
subject{ Reader }
it { should respond_to(:rash) }
describe "rash on a real IO" do
subject{ Reader.rash($stdin) }
it{ should be_a(Reader::Rash) }
end
describe "reader" do
specify "when associated" do
r = Reader.reader('suppliers.rash')
expect(r).to be_kind_of(Reader::Rash)
end
specify "when not associated" do
expect(lambda{ Reader.reader('.noone') }).to raise_error(ArgumentError)
end
specify "when an IO" do
expect(Reader.reader($stdin)).to be_kind_of(Reader::Rash)
end
specify "with options" do
r = Reader.reader('suppliers.rash', {:hello => "world"})
expect(r.options).to eq({:hello => "world"})
end
end
end
end
| 21.972973 | 79 | 0.602706 |
7abeb3b406734dd0e78f5649a9d610f5ccc8d138 | 221 | class CreatePosts < ActiveRecord::Migration[6.1]
def change
create_table :posts do |t|
t.string :title
t.text :body
t.integer :user_id
t.boolean :edited
t.timestamps
end
end
end
| 17 | 48 | 0.624434 |
ff2b4594a374e6c3f27b8bac8ed66ab23cbdf4fd | 3,824 | require File.expand_path('../../test_helper', __FILE__)
class DomainMembersControllerTest < ActionController::TestCase
def setup
@user = with_unique_user
with_domain
end
def with_domain
@domain = Domain.first :as => @user
unless @domain
@domain = Domain.new(get_post_form.merge(:as => @user))
flunk @domain.errors.inspect unless @domain.save
end
@domain
end
def test_get_index_redirects
get :index, {:domain_id => @domain}
assert_redirected_to domain_path(@domain)
end
def test_update_without_params
Domain.any_instance.expects(:update_members).never
put :update, {:domain_id => @domain}
assert_redirected_to domain_path(@domain)
end
def test_update_single_member_error
put :update, {
:domain_id => @domain,
:members => {:type => 'user', :role => 'view', :login => 'x', :adding => 'true'}
}
assert_response :success
assert_template 'domains/show'
assert_equal "The domain members could not be updated.", flash[:error]
assert new_members = assigns[:new_members]
assert_equal 1, new_members.count
assert new_members[0].errors[:login].to_s =~ /x/
assert_select '.members.editing'
assert_select "input[name='members[][login]']", :count => 2
assert_select "tr.template input[name='members[][login]']", :count => 1
end
def test_update_multi_member_error
Console.config.capabilities_model_class.any_instance.expects(:max_teams).at_least(0).returns(1)
put :update, {
:domain_id => @domain,
:members => [
{:type => 'user', :role => 'view', :login => 'x', :adding => 'true'},
{:type => 'user', :role => 'view', :login => 'y', :adding => 'true'},
{:type => 'team', :role => 'view', :name => 'team1', :id => 'tid1', :adding => 'true'}
]
}
assert_response :success
assert_template 'domains/show'
assert_equal "The domain members could not be updated.", flash[:error]
assert new_members = assigns[:new_members]
assert_equal 3, new_members.count
assert new_members[0].errors[:login].to_s =~ /x/
assert new_members[1].errors[:login].to_s =~ /y/
assert new_members[2].errors[:id].to_s =~ /tid1/
assert_select '.members.editing'
assert_select "input[name='members[][login]']", :count => 3
assert_select "tr.template input[name='members[][login]']", :count => 1
assert_select "input[name='members[][name]']", :count => 2
assert_select "tr.template input[name='members[][name]']", :count => 1
end
def test_update_single_member_success
Domain.any_instance.expects(:update_members).returns(true)
put :update, {
:domain_id => @domain,
:members => {:type => 'user', :role => 'view', :login => 'x'}
}
assert_redirected_to domain_path(@domain)
assert flash[:success]
end
def test_update_multi_member_success
Domain.any_instance.expects(:update_members).returns(true)
put :update, {
:domain_id => @domain,
:members => [
{:type => 'user', :role => 'view', :login => 'x'},
{:type => 'user', :role => 'none', :login => 'y'}
]
}
assert_redirected_to domain_path(@domain)
assert flash[:success]
end
def test_leave
Domain.any_instance.expects(:leave).never
get :leave, {:domain_id => @domain}
assert_response :success
assert_template 'members/leave'
Domain.any_instance.expects(:leave).once.returns(false)
post :leave, {:domain_id => @domain}
assert_redirected_to domain_path(@domain)
assert flash[:error]
Domain.any_instance.expects(:leave).once.returns(true)
post :leave, {:domain_id => @domain}
assert_redirected_to console_path
assert flash[:success]
end
protected
def get_post_form
{:name => "d#{uuid[0..12]}"}
end
end
| 31.344262 | 99 | 0.647228 |
b90dce46e6c4d71003e9bb9e04da20a25c8daec0 | 951 | default['nomad']['version'] = '0.7.1'
default['nomad']['service_name'] = 'nomad'
default['nomad']['service_user'] = 'nomad'
default['nomad']['service_group'] = 'nomad'
default['nomad']['create_service_user'] = true
default['nomad']['archive_name'] = 'nomad'
default['nomad']['archive_url'] = 'https://releases.hashicorp.com/nomad/'
default['nomad']['config']['path'] = '/etc/nomad/default.json'
default['nomad']['config']['data_dir'] = '/var/lib/nomad'
default['nomad']['config']['bind_addr'] = '0.0.0.0'
default['nomad']['config']['name'] = node['fqdn']
default['nomad']['config']['datacenter'] = 'dc1'
default['nomad']['config']['advertise']['http'] = node['ipaddress']
default['nomad']['config']['advertise']['rpc'] = node['ipaddress']
default['nomad']['config']['advertise']['serf'] = node['ipaddress']
default['nomad']['config']['consul']['address'] = "#{node['ipaddress']}:8500"
default['nomad']['service']['config_dir'] = '/etc/nomad/conf.d'
| 47.55 | 77 | 0.649842 |
33d8d4e36c022d149c2c9a9b707277429cc5d0c7 | 766 | require 'benchmark'
total = (ENV['TOTAL'] || 100_000).to_i
const = 2 ** 12
Benchmark.bmbm do |x|
x.report("Fixnum#+const") do
total.times do |i|
const + const
end
end
x.report("Fixnum#*const") do
total.times do |i|
const * const
end
end
x.report("Fixnum#+i") do
total.times do |i|
const + i
end
end
x.report("Fixnum#*i") do
total.times do |i|
const * i
end
end
x.report("Fixnum#<<2") do
total.times do |i|
i << 2
end
end
x.report("Fixnum#>>2") do
total.times do |i|
i >> 2
end
end
x.report("Fixnum#%29") do
total.times do |i|
i % 29
end
end
x.report("Fixnum#**i") do
total.times do |i|
2 ** (i % 29)
end
end
end
| 13.678571 | 38 | 0.520888 |
5da927a40811f9aa7a8e195c9b2138220fea13c8 | 524 | module WashOut
module Model
def wash_out_columns
columns_hash
end
def wash_out_param_map
types = {
:text => :string,
:float => :double,
:decimal => :double,
:timestamp => :string
}
map = {}
wash_out_columns.each do |key, column|
type = column.type
type = types[type] if types.has_key?(type)
map[key] = type
end
map
end
def wash_out_param_name
return name.underscore
end
end
end | 18.068966 | 50 | 0.538168 |
081e63eb09435c4a020c04aa7c41789851557370 | 391 | # == Schema Information
#
# Table name: subjects
#
# created_at :datetime not null
# id :bigint(8) not null, primary key
# name :string not null
# updated_at :datetime not null
#
class Subject < ApplicationRecord
has_and_belongs_to_many :activity_types, join_table: :activity_type_subjects
scope :by_name, -> { order(name: :asc) }
end
| 26.066667 | 78 | 0.644501 |
bb9e109bc45656717aedf0b951e061882ba96cdb | 5,049 | a = File.absolute_path(__FILE__).split "/"
a.pop 2
$:.unshift a.join("/") + "/lib"
require "pry"
require "washington/global"
def log text
puts "\e[1m" + text + "\e[0m"
end
def cleanup!
Washington.reset
end
log "Washington"
log "=========="
log ""
#############################################################################
log "Calling the function should get back an instance"
the_example = example()
unless the_example.is_a? Washington::Example
raise "it should be a Washington::Example"
end
cleanup!
#############################################################################
log "The message should be stored in the instance"
the_example = example "Message"
unless the_example.message == "Message"
raise "The message should be stored"
end
cleanup!
#############################################################################
log "The example proc should also be stored"
example_proc = proc {}
the_example = example "The message", example_proc
unless the_example.function == example_proc
raise "The proc should be stored"
end
cleanup!
#############################################################################
log "The example should be registered in the list"
the_example = example "Registered"
unless Washington.list.include? the_example
raise "The example should be registered"
end
cleanup!
#############################################################################
log "The example should change itself to a Success when successful"
Washington.use "silent"
example_proc = proc { raise "fail" unless 2 + 2 == 4 }
the_example = example "To the infinite and beyond!", example_proc
success = the_example.run
unless Washington.picked[0].is_a? Washington::Success
binding.pry
raise "It should convert to a success"
end
unless Washington.picked[0].message == "To the infinite and beyond!"
raise "The success should contain the message"
end
unless Washington.picked[0].function == example_proc
raise "The success should contain the function"
end
unless Washington.picked[0].original == the_example
raise "The success should contain the original example"
end
unless Washington.picked[0] == success
raise "The return of #run should be the same Success instance"
end
unless Washington.picked[0] == Washington.successful[0]
raise "The success should be available on the #successful list"
end
cleanup!
#############################################################################
log "The example should change itself to a Failure when failing"
Washington.use "silent"
example_proc = proc { raise ArgumentError, "no bueno" unless 2 + 3 == 4 }
the_example = example "To the failure and beyond!", example_proc
failure = the_example.run
unless Washington.picked[0].is_a? Washington::Failure
raise "It should be a Failure"
end
unless Washington.picked[0].message == "To the failure and beyond!"
raise "It should have the message"
end
unless Washington.picked[0].error.is_a? ArgumentError
raise "It should have the error"
end
unless Washington.picked[0].error.message == "no bueno"
raise "The error should have the message"
end
unless Washington.picked[0].function == example_proc
raise "It should have the proc"
end
unless Washington.picked[0].original == the_example
raise "It should have the original"
end
unless Washington.picked[0] == failure
raise "It should return the failure"
end
unless Washington.failing[0] == Washington.picked[0]
raise "It should be in the failing list"
end
cleanup!
#############################################################################
log "The example should change itself to a Pending when there is no function"
Washington.use "silent"
the_example = example "This is not defined yet"
pending = the_example.run
unless Washington.picked[0].is_a? Washington::Pending
raise "It should be a Pending"
end
unless Washington.picked[0].message == "This is not defined yet"
raise "It should have the message"
end
unless Washington.picked[0].original == the_example
raise "It should have the original"
end
unless Washington.picked[0] == pending
raise "It should return the picked"
end
unless Washington.pending[0] == Washington.picked[0]
raise "It should be in the pending list"
end
cleanup!
#############################################################################
log "By default the formatter should be set as formatter"
unless Washington.formatter.is_a? Washington::Formatter
raise "It should use the default formatter"
end
cleanup!
#############################################################################
log "You should be able to replace the formatter by a different one"
a_formatter = Object.new
Washington.use a_formatter
unless Washington.formatter == a_formatter
raise "It should be the new formatter"
end
cleanup!
#############################################################################
log "It is not complete if there are examples not done"
Washington.picked = [Washington::Example.new]
if Washington.complete?
raise "It shouldn't be complete"
end
cleanup!
| 22.95 | 77 | 0.634977 |
7a016f402878fbe091a72d7d3365b9212c7ff8b7 | 909 | class Node
attr_accessor :value, :next_node
def initialize(value, next_node = nil)
@value = value
@next_node = next_node
end
end
class Stack
def initialize
@head = nil
@min = nil
end
def push(number)
# your code here
@min = Node.new(number, @min) if @min == nil || number < @min.value
@head = Node.new(number, @head)
end
def pop
# your code here
puts 'empty' if @head.nil?
value = @head.value
@head = @head.next_node
@min = @min.next_node if @min.value == value
value
end
def min
# your code here
puts 'empty' if @min.nil?
return @min.value
end
end
stack = Stack.new
stack.push(3)
stack.push(5)
puts stack.min
# => 3
stack.pop
stack.push(7)
puts stack.min
# => 3
stack.push(2)
puts stack.min
# => 2
stack.pop
puts stack.min
# => 3 | 15.672414 | 74 | 0.561056 |
1adca9cb4450aed402baec85ac95f5ff9c398b66 | 108 | # -*- encoding : utf-8 -*-
require 'spec_helper'
describe UserTotal do
pending "put some tests here"
end
| 15.428571 | 31 | 0.694444 |
1c0f582e16d44c55c6c830f21d35085cb252a1ec | 368 | module FbGraph
module Connections
module PromotablePosts
def promotable_posts(options = {})
posts = self.connection :promotable_posts, options
posts.map! do |post|
PromotablePost.new post[:id], post.merge(
:access_token => options[:access_token] || self.access_token
)
end
end
end
end
end | 26.285714 | 72 | 0.616848 |
7a43a68209a87df260074a543016d8c24f74a665 | 10,562 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
require 'rack/test'
describe 'API v3 Grids resource', type: :request, content_type: :json do
include Rack::Test::Methods
include API::V3::Utilities::PathHelper
shared_let(:current_user) do
FactoryBot.create(:user)
end
let(:my_page_grid) { FactoryBot.create(:my_page, user: current_user) }
let(:other_user) do
FactoryBot.create(:user)
end
let(:other_my_page_grid) { FactoryBot.create(:my_page, user: other_user) }
before do
login_as(current_user)
end
subject(:response) { last_response }
describe '#get INDEX' do
let(:path) { api_v3_paths.grids }
let(:stored_grids) do
my_page_grid
other_my_page_grid
end
before do
stored_grids
get path
end
it 'sends a collection of grids but only those visible to the current user' do
expect(subject.body)
.to be_json_eql('Collection'.to_json)
.at_path('_type')
expect(subject.body)
.to be_json_eql('Grid'.to_json)
.at_path('_embedded/elements/0/_type')
expect(subject.body)
.to be_json_eql(1.to_json)
.at_path('total')
end
context 'with a filter on the scope attribute' do
shared_let(:other_grid) do
grid = Grids::Grid.new(row_count: 20,
column_count: 20)
grid.save
Grids::Grid
.where(id: grid.id)
.update_all(user_id: current_user.id)
grid
end
let(:stored_grids) do
my_page_grid
other_my_page_grid
other_grid
end
let(:path) do
filter = [{ 'scope' =>
{
'operator' => '=',
'values' => [my_page_path]
} }]
"#{api_v3_paths.grids}?#{{ filters: filter.to_json }.to_query}"
end
it 'responds with 200 OK' do
expect(subject.status).to eq(200)
end
it 'sends only the my page of the current user' do
expect(subject.body)
.to be_json_eql('Collection'.to_json)
.at_path('_type')
expect(subject.body)
.to be_json_eql('Grid'.to_json)
.at_path('_embedded/elements/0/_type')
expect(subject.body)
.to be_json_eql(1.to_json)
.at_path('total')
end
end
end
describe '#get' do
let(:path) { api_v3_paths.grid(my_page_grid.id) }
let(:stored_grids) do
my_page_grid
end
before do
stored_grids
get path
end
it 'responds with 200 OK' do
expect(subject.status).to eq(200)
end
it 'sends a grid block' do
expect(subject.body)
.to be_json_eql('Grid'.to_json)
.at_path('_type')
end
it 'identifies the url the grid is stored for' do
expect(subject.body)
.to be_json_eql(my_page_path.to_json)
.at_path('_links/scope/href')
end
context 'with the page not existing' do
let(:path) { api_v3_paths.grid(5) }
it 'responds with 404 NOT FOUND' do
expect(subject.status).to eql 404
end
end
context 'with the grid belonging to someone else' do
let(:stored_grids) do
my_page_grid
other_my_page_grid
end
let(:path) { api_v3_paths.grid(other_my_page_grid.id) }
it 'responds with 404 NOT FOUND' do
expect(subject.status).to eql 404
end
end
end
describe '#patch' do
let(:path) { api_v3_paths.grid(my_page_grid.id) }
let(:params) do
{
"rowCount": 10,
"name": 'foo',
"columnCount": 15,
"widgets": [{
"identifier": "work_packages_assigned",
"startRow": 4,
"endRow": 8,
"startColumn": 2,
"endColumn": 5
}]
}.with_indifferent_access
end
let(:stored_grids) do
my_page_grid
end
before do
stored_grids
patch path, params.to_json, 'CONTENT_TYPE' => 'application/json'
end
it 'responds with 200 OK' do
expect(subject.status).to eq(200)
end
it 'returns the altered grid block' do
expect(subject.body)
.to be_json_eql('Grid'.to_json)
.at_path('_type')
expect(subject.body)
.to be_json_eql('foo'.to_json)
.at_path('name')
expect(subject.body)
.to be_json_eql(params['rowCount'].to_json)
.at_path('rowCount')
expect(subject.body)
.to be_json_eql(params['widgets'][0]['identifier'].to_json)
.at_path('widgets/0/identifier')
end
it 'perists the changes' do
expect(my_page_grid.reload.row_count)
.to eql params['rowCount']
end
context 'with invalid params' do
let(:params) do
{
"rowCount": -5,
"columnCount": 15,
"widgets": [{
"identifier": "work_packages_assigned",
"startRow": 4,
"endRow": 8,
"startColumn": 2,
"endColumn": 5
}]
}.with_indifferent_access
end
it 'responds with 422 and mentions the error' do
expect(subject.status).to eq 422
expect(subject.body)
.to be_json_eql('Error'.to_json)
.at_path('_type')
expect(subject.body)
.to be_json_eql("Widgets is outside of the grid.".to_json)
.at_path('_embedded/errors/0/message')
expect(subject.body)
.to be_json_eql("Number of rows must be greater than 0.".to_json)
.at_path('_embedded/errors/1/message')
end
it 'does not persist the changes to widgets' do
expect(my_page_grid.reload.widgets.count)
.to eql MyPage::GridRegistration.defaults[:widgets].size
end
end
context 'with a scope param' do
let(:params) do
{
"_links": {
"scope": {
"href": ''
}
}
}.with_indifferent_access
end
it 'responds with 422 and mentions the error' do
expect(subject.status).to eq 422
expect(subject.body)
.to be_json_eql('Error'.to_json)
.at_path('_type')
expect(subject.body)
.to be_json_eql("You must not write a read-only attribute.".to_json)
.at_path('message')
expect(subject.body)
.to be_json_eql("scope".to_json)
.at_path('_embedded/details/attribute')
end
end
context 'with the page not existing' do
let(:path) { api_v3_paths.grid(5) }
it 'responds with 404 NOT FOUND' do
expect(subject.status).to eql 404
end
end
context 'with the grid belonging to someone else' do
let(:stored_grids) do
my_page_grid
other_my_page_grid
end
let(:path) { api_v3_paths.grid(other_my_page_grid.id) }
it 'responds with 404 NOT FOUND' do
expect(subject.status).to eql 404
end
end
end
describe '#post' do
let(:path) { api_v3_paths.grids }
let(:params) do
{
"rowCount": 10,
"columnCount": 15,
"widgets": [{
"identifier": "work_packages_assigned",
"startRow": 4,
"endRow": 8,
"startColumn": 2,
"endColumn": 5
}],
"_links": {
"scope": {
"href": my_page_path
}
}
}.with_indifferent_access
end
before do
post path, params.to_json, 'CONTENT_TYPE' => 'application/json'
end
it 'responds with 201 CREATED' do
expect(subject.status).to eq(201)
end
it 'returns the created grid block' do
expect(subject.body)
.to be_json_eql('Grid'.to_json)
.at_path('_type')
expect(subject.body)
.to be_json_eql(params['rowCount'].to_json)
.at_path('rowCount')
expect(subject.body)
.to be_json_eql(params['widgets'][0]['identifier'].to_json)
.at_path('widgets/0/identifier')
end
it 'persists the grid' do
expect(Grids::Grid.count)
.to eql(1)
end
context 'with invalid params' do
let(:params) do
{
"rowCount": -5,
"columnCount": "sdjfksdfsdfdsf",
"widgets": [{
"identifier": "work_packages_assigned",
"startRow": 4,
"endRow": 8,
"startColumn": 2,
"endColumn": 5
}],
"_links": {
"scope": {
"href": my_page_path
}
}
}.with_indifferent_access
end
it 'responds with 422' do
expect(subject.status).to eq(422)
end
it 'does not create a grid' do
expect(Grids::Grid.count)
.to eql(0)
end
it 'returns the errors' do
expect(subject.body)
.to be_json_eql('Error'.to_json)
.at_path('_type')
expect(subject.body)
.to be_json_eql("Widgets is outside of the grid.".to_json)
.at_path('_embedded/errors/0/message')
expect(subject.body)
.to be_json_eql("Number of rows must be greater than 0.".to_json)
.at_path('_embedded/errors/1/message')
expect(subject.body)
.to be_json_eql("Number of columns must be greater than 0.".to_json)
.at_path('_embedded/errors/2/message')
end
end
end
end
| 25.450602 | 91 | 0.587483 |
91097075fdf75b32b1a27bdc9c9b8db1dd996aeb | 2,331 | # encoding: utf-8
module Slideshow
class ManifestFinder
include LogUtils::Logging
include ManifestHelper
def initialize( config )
@config = config
@usrdir = File.expand_path( Dir.pwd ) # save original (current) working directory
end
attr_reader :config
attr_reader :usrdir # original working dir (user called slideshow from)
def find_manifestsrc( manifest_arg ) ## rename - just use find_manifest ??
manifest_path_or_name = manifest_arg.dup ## make a copy
# add .txt file extension if missing (for convenience)
if manifest_path_or_name.downcase.ends_with?( '.txt' ) == false
manifest_path_or_name << '.txt'
end
logger.debug "manifest=#{manifest_path_or_name}"
# check if file exists (if yes use custom template package!) - allows you to override builtin package with same name
if File.exists?( manifest_path_or_name )
manifestsrc = manifest_path_or_name
else
# check for builtin manifests
manifests = installed_template_manifests
matches = manifests.select { |m| m[0] == manifest_path_or_name }
if matches.empty?
puts "*** error: unknown template manifest '#{manifest_path_or_name}'"
puts
puts "Use"
puts " slideshow list # or"
puts " slideshow ls"
puts "to see what template packs you have installed."
puts
puts "Use"
puts " slideshow install #{manifest_path_or_name.sub('.txt','')} # or"
puts " slideshow i #{manifest_path_or_name.sub('.txt','')}"
puts "to (try to) install the missing template pack."
puts
puts "See github.com/slideshow-templates for some ready-to-use/download template packs"
puts "or use your very own."
puts
# todo: list installed manifests - why? why not?
exit 2
end
manifestsrc = matches[0][1]
end
### todo: use File.expand_path( xx, relative_to ) always with second arg
## do NOT default to cwd (because cwd will change!)
# Reference src with absolute path, because this can be used with different pwd
manifestsrc = File.expand_path( manifestsrc, usrdir )
manifestsrc
end
end # class ManifestFinder
end # class Slideshow
| 30.671053 | 121 | 0.642643 |
7aa37616bf64b0927d80bb1251b632cd3203dd5c | 3,105 | ##
# MachineLearning module
# Copyright (c) Hiroyuki Matsuzaki 2015
#
# See Copyright Notice in LICENSE
#
module MachineLearning
##
# NaiveBayes class
#
class NaiveBayes
##
# call-seq:
# nb.word_count_up(word, category)
#
def word_count_up(word, category)
unless self.vocabularies.include?(word)
# new word
self.vocabularies << word
end
if self.word_count.include?(category)
if self.word_count[category].include?(word)
self.word_count[category][word] += 1
else
# new word in this category
self.word_count[category].store(word, 1)
end
else
# new category
self.word_count.store(category, Hash.new(0))
self.word_count[category].store(word, 1)
end
end
##
# call-seq:
# nb.category_count_up(category)
#
def category_count_up(category)
if self.category_count.include?(category)
self.category_count[category] += 1
else
self.category_count.store(category, 1)
end
end
##
# call-seq:
# nb.training(words, category)
#
# ex) words = ["word_1","word_2", ...]
#
def training(words, category)
words.each do |w|
self.word_count_up(w, category)
end
self.category_count_up(category)
end
##
# call-seq:
# nb.word_count_in_category(word, category)
#
def word_count_in_category(word, category)
if self.word_count[category].include?(word)
return self.word_count[category][word]
else
return 0
end
end
##
# call-seq:
# nb.all_word_count_in_category(category)
#
def all_word_count_in_category(category)
sum = 0
self.word_count[category].each do |w, count|
sum += count
end
return sum
end
##
# call-seq:
# nb.priorprob(category) => P(cat)
#
def priorprob(category)
sum = 0
self.category_count.each do |cat, count|
sum += count
end
return self.category_count[category] / sum
end
##
# call-seq:
# nb.wordprob(word, category) => P(word|cat)
#
def wordprob(word, category)
numerator = self.word_count_in_category(word, category) + 1
denominator = self.all_word_count_in_category(category) + self.vocabularies.size
return numerator / denominator
end
##
# call-seq:
# nb.category_score(words, category)
#
def category_score(words, category)
score = Math.log(self.priorprob(category))
words.each do |w|
score += Math.log(self.wordprob(w, category))
end
return score
end
##
# call-seq:
# nb.category_score(words)
#
def classifier(words)
best_category = "others."
max = 0.0
self.category_count.each do |cat, count|
prob = self.score(words, cat)
if prob > max
best_category = cat
max = prob
end
end
return best_category
end
end # class NaiveBayes
end
| 22.021277 | 86 | 0.585829 |
7a24d65e5a8d3b50978d9b14e76271c74981bbf2 | 1,948 | module Voltron
module Upload
module CarrierWave
module Uploader
module Base
def initialize(*args)
self.class.send(:before, :store, :save_timestamp)
self.class.send(:after, :store, :apply_timestamp)
super(*args)
end
def to_upload_json
if present?
{
id: id,
url: url,
name: file.filename,
size: file.size,
type: file.content_type
}
else
{}
end
end
def id
if stored?
[File.mtime(full_store_path).to_i, file.filename].join('/')
elsif cached? && File.exists?(Rails.root.join('public', cache_path))
[cached?, file.filename].join('/')
else
file.filename
end
end
def stored?
File.exists?(full_store_path)
end
def full_store_path
Rails.root.join('public', store_path(file.filename))
end
private
# Before we store the file for good, grab the offset number
# so it can be used to create a unique timestamp after storing
def save_timestamp(*args)
id_components = File.basename(File.expand_path('..', file.path)).split('-')
@offset = id_components[2].to_i + 1000
end
# Update the modified time of the file to a unique timestamp
# This timestamp will later be used to help identify the file,
# as it will be part of the generated id
def apply_timestamp(*args)
@offset ||= rand(1..1000)
if File.exist?(file.path)
FileUtils.touch file.path, mtime: Time.now + @offset.seconds
end
end
end
end
end
end
end
| 28.231884 | 89 | 0.499487 |
1c9bd77c7768796bf69b6e3d6bf12de8a83d50d5 | 598 | module Vultr
class UserResource < Resource
def list(**params)
response = get_request("users", params: params)
Collection.from_response(response, key: "users", type: User)
end
def create(**attributes)
User.new post_request("users", body: attributes).body.dig("user")
end
def retrieve(user_id:)
User.new get_request("users/#{user_id}").body.dig("user")
end
def update(user_id:, **attributes)
patch_request("users/#{user_id}", body: attributes)
end
def delete(user_id:)
delete_request("users/#{user_id}")
end
end
end
| 23.92 | 71 | 0.647157 |
030364d5e6af9ff595a84d9ed1b20971de6bdcc1 | 210 | # frozen_string_literal: true
require 'rspec/expectations'
require 'semantic_puppet'
RSpec::Matchers.define :be_semanticlly_correct do
match do |actual|
SemanticPuppet::Version.valid?(actual)
end
end
| 19.090909 | 49 | 0.785714 |
f8cc48b8f363af3e6974f4ba8f70ddb4133b0e75 | 625 | module Setup
class PullImport < Setup::BasePull
include Setup::DataUploader
include RailsAdmin::Models::Setup::PullImportAdmin
build_in_data_type
def run(message)
message[:discard_collection] = true unless message.has_key?('discard_collection')
super
end
protected
def source_shared_collection
unless @shared_collection
pull_data = hashify(data)
@shared_collection = Setup::CrossSharedCollection.new(data: pull_data)
%w(name title readme).each { |key| @shared_collection[key] = pull_data[key] }
end
@shared_collection
end
end
end
| 24.038462 | 87 | 0.6944 |
bfd666ac7445614c50b977a79cc2de9c47fef4cd | 61 | module DwollaV2
class InvalidScopesError < Error
end
end
| 12.2 | 34 | 0.786885 |
bb0b5bc72f31e32a9fd0dec4b627927be4669689 | 2,842 | require "rails_helper"
RSpec.describe "routes for authentication", type: :routing do
describe "production settings" do
context "dfe_signin" do
it "default default" do
expect(get: "/auth/dfe/callback").to route_to("sessions#create")
expect(get: "/auth/dfe/signout").to route_to("sessions#destroy")
expect_magic_to_route_to_not_found
expect_persona_to_route_to_not_found
end
it "when opps still defaults", authentication_mode: :opps do
expect(get: "/auth/dfe/callback").to route_to("sessions#create")
expect(get: "/auth/dfe/signout").to route_to("sessions#destroy")
expect_magic_to_route_to_not_found
expect_persona_to_route_to_not_found
end
end
describe "dfe_signin is down turn on" do
context "magic_link", authentication_mode: :magic_link do
it "routes magic" do
expect(post: "/send_magic_link").to route_to("sessions#send_magic_link")
expect(get: "/magic_link_sent").to route_to("sessions#magic_link_sent")
expect(get: "/signin_with_magic_link").to route_to("sessions#create_by_magic")
expect_dfe_to_route_to_not_found
expect_persona_to_route_to_not_found
end
end
end
end
describe "non-production settings" do
context "persona", authentication_mode: :persona do
it "unsafe routes" do
expect_magic_to_route_to_not_found
expect_dfe_to_route_to_not_found
expect(get: "/personas").to route_to("personas#index")
expect(post: "/auth/developer/callback").to route_to("sessions#create")
expect(get: "/auth/developer/signout").to route_to("sessions#destroy")
end
end
end
end
def expect_magic_to_route_to_not_found
expect(post: "/send_magic_link")
.to route_to(controller: "errors", action: "not_found", path: "send_magic_link")
expect(get: "/magic_link_sent")
.to route_to(controller: "errors", action: "not_found", path: "magic_link_sent")
expect(get: "/signin_with_magic_link")
.to route_to(controller: "errors", action: "not_found", path: "signin_with_magic_link")
end
def expect_persona_to_route_to_not_found
expect(get: "/personas")
.to route_to(controller: "errors", action: "not_found", path: "personas")
expect(post: "/auth/developer/callback")
.to route_to(controller: "errors", action: "not_found", path: "auth/developer/callback")
expect(get: "/auth/developer/signout")
.to route_to(controller: "errors", action: "not_found", path: "auth/developer/signout")
end
def expect_dfe_to_route_to_not_found
expect(get: "/auth/dfe/callback")
.to route_to(controller: "errors", action: "not_found", path: "auth/dfe/callback")
expect(get: "/auth/dfe/signout")
.to route_to(controller: "errors", action: "not_found", path: "auth/dfe/signout")
end
| 40.028169 | 92 | 0.705841 |
796e1a562659bb6c51ee7c4e3244de144280bed8 | 1,641 | # -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{has_draft}
s.version = "0.1.1"
s.required_rubygems_version = Gem::Requirement.new(">= 1.2") if s.respond_to? :required_rubygems_version=
s.authors = ["Ben Hughes"]
s.date = %q{2009-03-09}
s.description = %q{Allows for your ActiveRecord models to have drafts which are stored in a separate duplicate table.}
s.email = %q{[email protected]}
s.extra_rdoc_files = ["CHANGELOG", "lib/has_draft.rb", "README.rdoc", "tasks/has_draft_tasks.rake"]
s.files = ["CHANGELOG", "has_draft.gemspec", "init.rb", "install.rb", "lib/has_draft.rb", "MIT-LICENSE", "Rakefile", "README.rdoc", "tasks/has_draft_tasks.rake", "test/config/database.yml", "test/fixtures/article_drafts.yml", "test/fixtures/articles.yml", "test/has_draft_test.rb", "test/models/article.rb", "test/schema.rb", "test/test_helper.rb", "uninstall.rb", "Manifest"]
s.has_rdoc = true
s.homepage = %q{http://github.com/railsgarden/has_draft}
s.rdoc_options = ["--line-numbers", "--inline-source", "--title", "Has_draft", "--main", "README.rdoc"]
s.require_paths = ["lib"]
s.rubyforge_project = %q{has_draft}
s.rubygems_version = %q{1.3.1}
s.summary = %q{Allows for your ActiveRecord models to have drafts which are stored in a separate duplicate table.}
s.test_files = ["test/has_draft_test.rb", "test/test_helper.rb"]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 2
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
| 49.727273 | 378 | 0.703839 |
181a605cc4dc7e5c557b680a963fe2f6ac4a6b26 | 2,778 | # frozen_string_literal: true
require "dry/configurable"
require "dry/core/constants"
module Hanami
class Application
# Application settings
#
# Users are expected to inherit from this class to define their application
# settings.
#
# @example
# # config/settings.rb
# # frozen_string_literal: true
#
# require "hanami/application/settings"
# require "my_app/types"
#
# module MyApp
# class Settings < Hanami::Application::Settings
# setting :database_url
# setting :feature_flag, default: false, constructor: Types::Params::Bool
# end
# end
#
# Settings are defined with
# [dry-configurable](https://dry-rb.org/gems/dry-configurable/), so you can
# take a look there to see the supported syntax.
#
# Users work with an instance of this class made available within the
# `settings` key in the container. The instance gets its settings populated
# from a configurable store, which defaults to
# {Hanami::Application::Settings::DotenvStore}.
#
# A different store can be set through the `settings_store` Hanami
# configuration option. All it needs to do is implementing a `#fetch` method
# with the same signature as `Hash#fetch`.
#
# @see Hanami::Application::Settings::DotenvStore
# @since 2.0.0
class Settings
# Exception for errors in the definition of settings.
#
# Its message collects all the individual errors that can be raised for
# each setting.
InvalidSettingsError = Class.new(StandardError) do
def initialize(errors)
@errors = errors
end
def to_s
<<~STR.strip
Could not initialize settings. The following settings were invalid:
#{@errors.map { |setting, message| "#{setting}: #{message}" }.join("\n")}
STR
end
end
# @api private
EMPTY_STORE = Dry::Core::Constants::EMPTY_HASH
include Dry::Configurable
# @api private
def initialize(store = EMPTY_STORE)
errors = config._settings.map(&:name).reduce({}) do |errs, name|
public_send("#{name}=", store.fetch(name) { Dry::Core::Constants::Undefined })
errs
rescue => e # rubocop:disable Style/RescueStandardError
errs.merge(name => e)
end
raise InvalidSettingsError, errors if errors.any?
end
private
def method_missing(name, *args, &block)
if config.respond_to?(name)
config.send(name, *args, &block)
else
super
end
end
def respond_to_missing?(name, _include_all = false)
config.respond_to?(name) || super
end
end
end
end
| 29.553191 | 88 | 0.62167 |
181f96af2b62938683856c512ebe78554bfe6ae6 | 164 | require 'global_phone'
puts "\n\n\n\n\n\n\n\n\n\n\n#{ConSync::Engine.root}\n\n\n\n\n\n\n\n"
GlobalPhone.db_path = ConSync::Engine.root.join('db/global_phone.json')
| 41 | 71 | 0.707317 |
bf324a100b0c587708f382eb0f9915cd5b9b2272 | 86 | class AddQuestionTypetoQuestion < ActiveRecord::Migration[5.1]
def change
end
end
| 17.2 | 62 | 0.790698 |
878c840cd69b9f49ee6263b3167b772b5791abdd | 12,113 | class Cookbook < ActiveRecord::Base
include PgSearch
#
# Query cookbooks by case-insensitive name.
#
# @param name [String, Array<String>] a single name, or a collection of names
#
# @example
# Cookbook.with_name('redis').first
# #<Cookbook name: "redis"...>
# Cookbook.with_name(['redis', 'apache2']).to_a
# [#<Cookbook name: "redis"...>, #<Cookbook name: "apache2"...>]
#
# @todo: query and index by +LOWER(name)+ when ruby schema dumps support such
# a thing.
#
scope :with_name, lambda { |names|
lowercase_names = Array(names).map { |name| name.to_s.downcase }
where(lowercase_name: lowercase_names)
}
scope :ordered_by, lambda { |ordering|
reorder({
'recently_updated' => 'updated_at DESC',
'recently_added' => 'id DESC',
'most_downloaded' => '(cookbooks.web_download_count + cookbooks.api_download_count) DESC, id ASC',
'most_followed' => 'cookbook_followers_count DESC, id ASC'
}.fetch(ordering, 'name ASC'))
}
scope :owned_by, lambda { |username|
joins(owner: :chef_account).where('accounts.username = ?', username)
}
scope :index, lambda { |opts = {}|
includes(:cookbook_versions, owner: :chef_account)
.ordered_by(opts.fetch(:order, 'name ASC'))
.limit(opts.fetch(:limit, 10))
.offset(opts.fetch(:start, 0))
}
scope :featured, -> { where(featured: true) }
scope :filter_platforms, lambda { |platforms|
joins(cookbook_versions: :supported_platforms)
.where('supported_platforms.name IN (?)', platforms).distinct
.select('cookbooks.*', '(cookbooks.web_download_count + cookbooks.api_download_count)')
}
# Search
# --------------------
pg_search_scope(
:search,
against: {
name: 'A'
},
associated_against: {
chef_account: { username: 'B' },
cookbook_versions: { description: 'C' }
},
using: {
tsearch: { dictionary: 'english', only: [:username, :description], prefix: true },
trigram: { only: [:name] }
},
ranked_by: ':trigram + (0.5 * :tsearch)',
order_within_rank: 'cookbooks.name'
)
# Callbacks
# --------------------
before_validation :copy_name_to_lowercase_name
# Associations
# --------------------
has_many :cookbook_versions, dependent: :destroy
has_many :cookbook_followers
has_many :followers, through: :cookbook_followers, source: :user
belongs_to :category
belongs_to :owner, class_name: 'User', foreign_key: :user_id
has_one :chef_account, through: :owner
belongs_to :replacement, class_name: 'Cookbook', foreign_key: :replacement_id
has_many :collaborators, as: :resourceable
has_many :collaborator_users, through: :collaborators, source: :user
# Delegations
# --------------------
delegate :description, to: :latest_cookbook_version
delegate :foodcritic_failure, to: :latest_cookbook_version
delegate :foodcritic_feedback, to: :latest_cookbook_version
# Validations
# --------------------
validates :name, presence: true, uniqueness: { case_sensitive: false }, format: /\A[\w_-]+\z/i
validates :lowercase_name, presence: true, uniqueness: true
validates :cookbook_versions, presence: true
validates :source_url, url: {
allow_blank: true,
allow_nil: true
}
validates :issues_url, url: {
allow_blank: true,
allow_nil: true
}
validates :replacement, presence: true, if: :deprecated?
#
# The total number of times a cookbook has been downloaded from Supermarket
#
# @return [Fixnum]
#
def self.total_download_count
sum(:api_download_count) + sum(:web_download_count)
end
#
# Sorts cookbook versions according to their semantic version
#
# @return [Array<CookbookVersion>] the sorted CookbookVersion records
#
def sorted_cookbook_versions
@sorted_cookbook_versions ||= cookbook_versions.sort_by { |v| Semverse::Version.new(v.version) }.reverse
end
#
# Transfers ownership of this cookbook to someone else. If the user id passed
# in represents someone that is already a collaborator on this cookbook, or
# if the User initiating this transfer is an admin, then we just assign the
# new owner and move on. If they're not already a collaborator, then we send
# them an email asking if they want ownership of this cookbook. This
# prevents abuse of people assigning random owners without getting permission.
#
# @param initiator [User] the User initiating the transfer
# @param recipient [User] the User to assign ownership to
#
# @return [String] a key representing a message to display to the user
#
def transfer_ownership(initiator, recipient)
if initiator.is?(:admin) || collaborator_users.include?(recipient)
collaborator = Collaborator.new(user_id: user_id, resourceable: self)
update_attribute(:user_id, recipient.id)
collaborator.save!
if collaborator_users.include?(recipient)
collaborator = collaborators.where(
user_id: recipient.id,
resourceable: self
).first
collaborator.destroy unless collaborator.nil?
end
'cookbook.ownership_transfer.done'
else
transfer_request = OwnershipTransferRequest.create(
sender: initiator,
recipient: recipient,
cookbook: self
)
CookbookMailer.delay.transfer_ownership_email(transfer_request)
'cookbook.ownership_transfer.email_sent'
end
end
#
# The most recent CookbookVersion, based on the semantic version number
#
# @return [CookbookVersion] the most recent CookbookVersion
#
def latest_cookbook_version
@latest_cookbook_version ||= sorted_cookbook_versions.first
end
#
# Return all of the cookbook errors as well as full error messages for any of
# the CookbookVersions
#
# @return [Array<String>] all the error messages
#
def seriously_all_of_the_errors
messages = errors.full_messages.reject { |e| e == 'Cookbook versions is invalid' }
cookbook_versions.each do |version|
almost_everything = version.errors.full_messages.reject { |x| x =~ /Tarball can not be/ }
messages += almost_everything
end
messages
end
#
# Returns the name of the +Cookbook+ parameterized.
#
# @return [String] the name of the +Cookbook+ parameterized
#
def to_param
name.parameterize
end
#
# Return the specified +CookbookVersion+. Raises an
# +ActiveRecord::RecordNotFound+ if the version does not exist. Versions can
# be specified with either underscores or dots.
#
# @example
# cookbook.get_version!("1_0_0")
# cookbook.get_version!("1.0.0")
# cookbook.get_version!("latest")
#
# @param version [String] the version of the Cookbook to find. Pass in
# 'latest' to return the latest version of the
# cookbook.
#
# @return [CookbookVersion] the +CookbookVersion+ with the version specified
#
def get_version!(version)
version.gsub!('_', '.')
if version == 'latest'
latest_cookbook_version
else
cookbook_versions.find_by!(version: version)
end
end
#
# Saves a new version of the cookbook as specified by the given metadata, tarball
# and readme. If it's a new cookbook the user specified becomes the owner.
#
# @raise [ActiveRecord::RecordInvalid] if the new version fails validation
# @raise [ActiveRecord::RecordNotUnique] if the new version is a duplicate of
# an existing version for this cookbook
#
# @return [CookbookVersion] the Cookbook Version that was published
#
# @param params [CookbookUpload::Parameters] the upload parameters
#
def publish_version!(params)
metadata = params.metadata
if metadata.privacy &&
ENV['ENFORCE_PRIVACY'].present? &&
ENV['ENFORCE_PRIVACY'] == 'true'
errors.add(:base, I18n.t('api.error_messages.privacy_violation'))
raise ActiveRecord::RecordInvalid.new(self)
end
tarball = params.tarball
readme = params.readme
changelog = params.changelog
dependency_names = metadata.dependencies.keys
existing_cookbooks = Cookbook.with_name(dependency_names)
cookbook_version = nil
transaction do
cookbook_version = cookbook_versions.build(
cookbook: self,
description: metadata.description,
license: metadata.license,
version: metadata.version,
tarball: tarball,
readme: readme.contents,
readme_extension: readme.extension,
changelog: changelog.contents,
changelog_extension: changelog.extension
)
self.updated_at = Time.now
[:source_url, :issues_url].each do |url|
url_val = metadata.send(url)
if url_val.present?
write_attribute(url, url_val)
end
end
self.privacy = metadata.privacy
save!
metadata.platforms.each do |name, version_constraint|
cookbook_version.add_supported_platform(name, version_constraint)
end
metadata.dependencies.each do |name, version_constraint|
cookbook_version.cookbook_dependencies.create!(
name: name,
version_constraint: version_constraint,
cookbook: existing_cookbooks.find { |c| c.name == name }
)
end
end
cookbook_version
end
#
# Returns true if the user passed follows the cookbook.
#
# @return [TrueClass]
#
# @param user [User]
#
def followed_by?(user)
cookbook_followers.where(user: user).any?
end
#
# Returns the platforms supported by the latest version of this cookbook.
#
# @return [Array<SupportedVersion>]
#
def supported_platforms
latest_cookbook_version.supported_platforms
end
#
# Returns the dependencies of the latest version of this cookbook.
#
# @return [Array<CookbookDependency>]
#
def cookbook_dependencies
latest_cookbook_version.cookbook_dependencies
end
#
# Returns all of the CookbookDependency records that are contingent upon this one.
#
# @return [Array<CookbookDependency>]
#
def contingents
CookbookDependency.includes(cookbook_version: :cookbook)
.where(cookbook_id: id)
.sort_by do |cd|
[
cd.cookbook_version.cookbook.name,
Semverse::Version.new(cd.cookbook_version.version)
]
end
end
#
# The username of this cookbook's owner
#
# @return [String]
#
def maintainer
owner.username
end
#
# The total number of times this cookbook has been downloaded
#
# @return [Fixnum]
#
def download_count
web_download_count + api_download_count
end
#
# Sets the cookbook's deprecated attribute to true, assigns the replacement
# cookbook if specified and saves the cookbook.
#
# A cookbook can only be replaced with a cookbook that is not deprecated.
#
# @param replacement_cookbook [Cookbook] the cookbook to succeed this cookbook
# once deprecated
#
# @return [Boolean] whether or not the cookbook was successfully deprecated
# and saved
#
def deprecate(replacement_cookbook)
if replacement_cookbook.deprecated?
errors.add(:base, I18n.t('cookbook.deprecate_with_deprecated_failure'))
return false
else
self.deprecated = true
self.replacement = replacement_cookbook
save
end
end
#
# Searches for cookbooks based on the +query+ parameter. Returns results that
# are elgible for deprecation (not deprecated and not this cookbook).
#
# @param query [String] the search term
#
# @return [Array<Cookbook> the +Cookbook+ search results
#
def deprecate_search(query)
Cookbook.search(query).where(deprecated: false).where.not(id: id)
end
private
#
# Populates the +lowercase_name+ attribute with the lowercase +name+
#
# This exists until Rails schema dumping supports Posgres's expression
# indices, which would allow us to create an index on LOWER(name). To do that
# now, we'd have to use the raw SQL schema dumping functionality, which is
# less-than ideal
#
def copy_name_to_lowercase_name
self.lowercase_name = name.to_s.downcase
end
end
| 29.472019 | 108 | 0.683728 |
6afba203a23b487c5cd8e505b7fe0647355bd43e | 1,643 | module EventsHelper
def show_attendees
content_tag_for(:h2, @event.attendees) do |attendee|
"#{@event.attendees.index(attendee) + 1})" + attendee.name unless @event.attendees.nil?
end
end
def past_events_header
content_tag(:h1) do
'past events' unless @past_events.nil?
end
end
def past_events_index
content_tag_for(:h2, @past_events) do |event|
"#{@past_events.index(event) + 1})#{event.name}to take place on #{event.date}" unless @past_events.nil?
end
end
def past_events_see
content_tag_for(:h2, @past_events) do |event|
link_to 'see event details', event_path(event) unless @past_events.nil?
end
end
def upcoming_events_header
content_tag(:h1) do
'upcoming events' unless @upcoming_events.nil?
end
end
def upcoming_events_index
content_tag_for(:h2, @upcoming_events) do |event|
"#{@upcoming_events.index(event) + 1})#{event.name}to take place on #{event.date}" unless @upcoming_events.nil?
end
end
def upcoming_events_see
content_tag_for(:h2, @upcoming_events) do |event|
link_to 'see event details', event_path(event) unless @upcoming_events.nil?
end
end
def upcoming_events_book
content_tag_for(:h2, @upcoming_events) do |event|
(link_to 'book', "/attend/#{event.id}", method: :post) unless @upcoming_events.nil?
end
end
def cancel
content_tag(:h3) do
if current_user.created_events.include?(@event)
(link_to 'cancel this event', event_path(@event),
method: :delete,
data: { confirm: 'Are you sure?' })
end
end
end
end
| 27.383333 | 117 | 0.670116 |
bb24263266a637a4cd72d94d792892b92c92bf02 | 3,092 | ##########################################################################
# Copyright 2007 Applied Research in Patacriticism and the University of Virginia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
class AddTitleMessageAnnotationMessageFields < ActiveRecord::Migration
class ExhibitType < ActiveRecord::Base
end
class ExhibitPageType < ActiveRecord::Base
end
class ExhibitSectionType < ActiveRecord::Base
end
def self.up
add_column :exhibit_types, :title_message, :string
add_column :exhibit_types, :annotation_message, :string
add_column :exhibit_page_types, :title_message, :string
add_column :exhibit_page_types, :annotation_message, :string
add_column :exhibit_section_types, :title_message, :string
add_column :exhibit_section_types, :annotation_message, :string
ann_bib = ExhibitType.find_by_template "annotated_bibliography"
ExhibitType.update ann_bib.id, :title_message => "(Insert Bibliography Title)", :annotation_message => "(Insert General Description)" rescue nil
ill_ess = ExhibitType.find_by_template "illustrated_essay"
ExhibitType.update ill_ess.id, :title_message => "(Insert Essay Title)", :annotation_message => "(Insert Abstract)" rescue nil
ann_bib_page = ExhibitPageType.find_by_name "Annotated Bibliography Page Type"
ExhibitPageType.update ann_bib_page.id, :title_message => "(Insert Page Title)", :annotation_message => "(Insert Page Notes)" rescue nil
ill_ess_page = ExhibitPageType.find_by_name "Illustrated Essay Page Type"
ExhibitPageType.update ill_ess_page.id, :title_message => "(Insert Page Title)", :annotation_message => "(Insert Page Notes)" rescue nil
ann_bib_section = ExhibitSectionType.find_by_template "citation"
ExhibitSectionType.update ann_bib_section.id, :title_message => "(Insert Section Title)", :annotation_message => "(Insert Section Description)" rescue nil
ill_ess_section = ExhibitSectionType.find_by_template "ie_generic"
ExhibitSectionType.update ill_ess_section.id, :title_message => "(Insert Section Title)", :annotation_message => "(Insert Text)" rescue nil
end
def self.down
remove_column :exhibit_types, :title_message
remove_column :exhibit_types, :annotation_message
remove_column :exhibit_page_types, :title_message
remove_column :exhibit_page_types, :annotation_message
remove_column :exhibit_section_types, :title_message
remove_column :exhibit_section_types, :annotation_message
end
end
| 47.569231 | 158 | 0.733182 |
03ce48f6f47a38ef4017f72134a132ad17eb3372 | 711 | cask 'keka' do
version '1.0.6'
sha256 'c164cdca023c47a9c61ef1c93bf64252636ed73433bf36117408db4a1647c966'
url "http://www.kekaosx.com/release/Keka-#{version}.dmg"
appcast 'http://www.kekaosx.com/update/keka.xml',
checkpoint: '76c07c4ead37d0ec9771f0aec315596d496fa0f80aacb9188c5ac5d9cf631e4e'
name 'Keka'
homepage 'http://www.kekaosx.com/'
auto_updates true
depends_on macos: '>= :lion'
app 'Keka.app'
zap delete: [
'~/Library/Application Support/Keka',
'~/Library/Caches/com.aone.keka',
'~/Library/Preferences/com.aone.keka.plist',
'~/Library/Saved Application State/com.aone.keka.savedState',
]
end
| 30.913043 | 88 | 0.658228 |
33f92a0388eec8bdb88f32c09a7ae685a9bfcbdd | 6,870 | describe Fastlane do
describe Fastlane::FastFile do
describe "upload_symbols_to_crashlytics" do
before :each do
allow(FastlaneCore::FastlaneFolder).to receive(:path).and_return(nil)
end
it "extracts zip files" do
binary_path = './fastlane/spec/fixtures/screenshots/screenshot1.png'
dsym_path = './fastlane/spec/fixtures/dSYM/Themoji.dSYM.zip'
expect(Fastlane::Actions).to receive(:sh).with("unzip -qo #{File.expand_path(dsym_path).shellescape}")
Fastlane::FastFile.new.parse("lane :test do
upload_symbols_to_crashlytics(
dsym_path: '#{dsym_path}',
api_token: 'something123',
binary_path: '#{binary_path}')
end").runner.execute(:test)
end
it "uploads dSYM files with api_token" do
binary_path = './spec/fixtures/screenshots/screenshot1.png'
dsym_path = './spec/fixtures/dSYM/Themoji.dSYM'
gsp_path = './spec/fixtures/plist/With Space.plist'
command = []
command << File.expand_path(File.join("fastlane", binary_path)).shellescape
command << "-a something123"
command << "-p ios"
command << File.expand_path(File.join("fastlane", dsym_path)).shellescape
expect(Fastlane::Actions).to receive(:sh).with(command.join(" "), log: false)
Fastlane::FastFile.new.parse("lane :test do
upload_symbols_to_crashlytics(
dsym_path: 'fastlane/#{dsym_path}',
api_token: 'something123',
binary_path: 'fastlane/#{binary_path}')
end").runner.execute(:test)
end
it "uploads dSYM files with gsp_path" do
binary_path = './spec/fixtures/screenshots/screenshot1.png'
dsym_path = './spec/fixtures/dSYM/Themoji.dSYM'
gsp_path = './spec/fixtures/plist/With Space.plist'
command = []
command << File.expand_path(File.join("fastlane", binary_path)).shellescape
command << "-gsp #{File.expand_path(File.join('fastlane', gsp_path)).shellescape}"
command << "-p ios"
command << File.expand_path(File.join("fastlane", dsym_path)).shellescape
expect(Fastlane::Actions).to receive(:sh).with(command.join(" "), log: false)
Fastlane::FastFile.new.parse("lane :test do
upload_symbols_to_crashlytics(
dsym_path: 'fastlane/#{dsym_path}',
gsp_path: 'fastlane/#{gsp_path}',
binary_path: 'fastlane/#{binary_path}')
end").runner.execute(:test)
end
it "uploads dSYM files with auto-finding gsp_path" do
binary_path = './spec/fixtures/screenshots/screenshot1.png'
dsym_path = './spec/fixtures/dSYM/Themoji.dSYM'
gsp_path = './spec/fixtures/plist/GoogleService-Info.plist'
command = []
command << File.expand_path(File.join("fastlane", binary_path)).shellescape
command << "-gsp #{File.expand_path(File.join('fastlane', gsp_path)).shellescape}"
command << "-p ios"
command << File.expand_path(File.join("fastlane", dsym_path)).shellescape
expect(Fastlane::Actions).to receive(:sh).with(command.join(" "), log: false)
Fastlane::FastFile.new.parse("lane :test do
upload_symbols_to_crashlytics(
dsym_path: 'fastlane/#{dsym_path}',
binary_path: 'fastlane/#{binary_path}')
end").runner.execute(:test)
end
it "raises exception if no api access is given" do
allow(Fastlane::Actions::UploadSymbolsToCrashlyticsAction).to receive(:find_gsp_path).and_return(nil)
binary_path = './spec/fixtures/screenshots/screenshot1.png'
dsym_path = './spec/fixtures/dSYM/Themoji.dSYM'
expect do
result = Fastlane::FastFile.new.parse("lane :test do
upload_symbols_to_crashlytics(
dsym_path: 'fastlane/#{dsym_path}',
binary_path: 'fastlane/#{binary_path}')
end").runner.execute(:test)
end.to raise_error(FastlaneCore::Interface::FastlaneError)
end
it "raises exception if given gsp_path is not found" do
gsp_path = './spec/fixtures/plist/_Not Exist_.plist'
expect do
Fastlane::FastFile.new.parse("lane :test do
upload_symbols_to_crashlytics(
gsp_path: 'fastlane/#{gsp_path}',
api_token: 'something123')
end").runner.execute(:test)
end.to raise_error(FastlaneCore::Interface::FastlaneError)
end
context "with dsym_paths" do
before :each do
# dsym_path option to be nil
ENV[Fastlane::Actions::SharedValues::DSYM_OUTPUT_PATH.to_s] = nil
Fastlane::Actions.lane_context[Fastlane::Actions::SharedValues::DSYM_PATHS] = nil
allow(Dir).to receive(:[]).and_return([])
end
it "uploads dSYM files with gsp_path" do
binary_path = './spec/fixtures/screenshots/screenshot1.png'
dsym_1_path = './spec/fixtures/dSYM/Themoji.dSYM'
dsym_2_path = './spec/fixtures/dSYM/Themoji2.dSYM'
gsp_path = './spec/fixtures/plist/With Space.plist'
command = []
command << File.expand_path(File.join("fastlane", binary_path)).shellescape
command << "-gsp #{File.expand_path(File.join('fastlane', gsp_path)).shellescape}"
command << "-p ios"
command_1 = command + [File.expand_path(File.join("fastlane", dsym_1_path)).shellescape]
command_2 = command + [File.expand_path(File.join("fastlane", dsym_2_path)).shellescape]
expect(Fastlane::Actions).to receive(:sh).with(command_1.join(" "), log: false)
expect(Fastlane::Actions).to receive(:sh).with(command_2.join(" "), log: false)
Fastlane::FastFile.new.parse("lane :test do
upload_symbols_to_crashlytics(
dsym_paths: ['fastlane/#{dsym_1_path}', 'fastlane/#{dsym_2_path}'],
gsp_path: 'fastlane/#{gsp_path}',
binary_path: 'fastlane/#{binary_path}')
end").runner.execute(:test)
end
it "raises exception if a dsym_paths not found" do
binary_path = './spec/fixtures/screenshots/screenshot1.png'
dsym_1_path = './spec/fixtures/dSYM/Themoji.dSYM'
dsym_not_here_path = './spec/fixtures/dSYM/Themoji_not_here.dSYM'
gsp_path = './spec/fixtures/plist/With Space.plist'
expect do
Fastlane::FastFile.new.parse("lane :test do
upload_symbols_to_crashlytics(
dsym_paths: ['fastlane/#{dsym_1_path}', 'fastlane/#{dsym_not_here_path}'],
gsp_path: 'fastlane/#{gsp_path}',
binary_path: 'fastlane/#{binary_path}')
end").runner.execute(:test)
end.to raise_error(FastlaneCore::Interface::FastlaneError)
end
end
end
end
end
| 42.670807 | 110 | 0.629694 |
08a3101313c801251e357b58fe168139b5551fdf | 5,511 | # frozen_string_literal: true
require "abstract_class"
require_relative "core"
require_relative "mixins/built_in"
module RSpec
module JsonMatchers
module Expectations
# @api private
# All classes within module should NOT be able
# to be used directly / extended
#
# All classes in this module are internal expectations used
# when non-expectation object/class is passed in
# Extension gems should have their own namespace and
# should NOT add new classes to this namespace
# Classes here have dependency on {Core} & {Mixins::BuiltIn}
#
# TODO: Remove dependency on {Mixins::BuiltIn}
module Private
# @api private
# User should just pass an object in
#
# Takes exactly one object
# Use stored value & `==` for checking `value`
class Eq < Core::SingleValueCallableExpectation
def expect?(value)
value == expected_value
end
private
attr_reader :expected_value
def initialize(value)
@expected_value = value
end
end
# @api private
# User should just pass a class in
#
# Takes exactly one object
# Use stored class for checking `value`
#
# @note
# Might use a whitelist of acceptable classes
# and raise error if other things passed in
# in the future
class KindOf < Core::SingleValueCallableExpectation
EXPECTED_CLASS = Class
private_constant :EXPECTED_CLASS
def expect?(value)
value.is_a?(expected_class)
end
private
attr_reader :expected_class
def initialize(value)
unless value.is_a?(EXPECTED_CLASS)
raise ArgumentError, "a #{EXPECTED_CLASS} is required"
end
@expected_class = value
end
end
# @api private
# User should just pass a {Range} in
#
# Takes exactly one object
# Use stored proc for checking `value`
class InRange < Core::SingleValueCallableExpectation
EXPECTED_CLASS = Range
private_constant :EXPECTED_CLASS
def expect?(value)
range.cover?(value)
end
private
attr_reader :range
def initialize(value)
unless value.is_a?(EXPECTED_CLASS)
raise ArgumentError, "a #{EXPECTED_CLASS} is required"
end
@range = value
end
end
# @api private
# User should just pass a {Regexp} in
#
# Takes exactly one object
# Use stored regexp for checking `value`
class MatchingRegexp < Core::SingleValueCallableExpectation
EXPECTED_CLASS = Regexp
private_constant :EXPECTED_CLASS
def expect?(value)
# regex =~ string seems to be fastest
# @see https://stackoverflow.com/questions/11887145/fastest-way-to-check-if-a-string-matches-or-not-a-regexp-in-ruby
value.is_a?(String) && regexp =~ value
end
private
attr_reader :regexp
def initialize(value)
unless value.is_a?(EXPECTED_CLASS)
raise ArgumentError, "a #{EXPECTED_CLASS} is required"
end
@regexp = value
end
end
# @api private
# User should just pass a callable in
#
# Takes exactly one object
# Use stored proc for checking `value`
class SatisfyingCallable < Core::SingleValueCallableExpectation
def expect?(value)
callable.call(value)
end
private
attr_reader :callable
def initialize(value)
unless value.respond_to?(:call)
raise(
ArgumentError,
"an object which respond to `:call` is required",
)
end
@callable = value
end
end
# @api private
# Used internally for returning false
#
# Always "fail"
class Nothing < Expectations::Core::SingletonExpectation
def expect?(*_args)
false
end
end
# @api private
# Used internally by a matcher method
#
# Comparing to {Expectations::Mixins::BuiltIn::ArrayWithSize}
# This also accepts `Hash` and `Array`
# and return false for collection matching
class ArrayWithSize < Expectations::Mixins::BuiltIn::ArrayWithSize
# `Fixnum` & `Bignum` will be returned instead of `Integer`
# in `#class` for numbers
ADDITIONAL_EXPECTATION_CLASS_MAPPINGS = {
Array => ->(_) { Expectations::Private::Nothing::INSTANCE },
Hash => ->(_) { Expectations::Private::Nothing::INSTANCE },
}.freeze
private_constant :ADDITIONAL_EXPECTATION_CLASS_MAPPINGS
class << self
private
# Overrides
# {Expectations::Mixins::BuiltIn::ArrayWithSize.
# expectation_classes_mappings}
#
# @return [Hash]
def expectation_classes_mappings
super.merge(
ADDITIONAL_EXPECTATION_CLASS_MAPPINGS,
)
end
end
end
end
end
end
end
| 28.117347 | 128 | 0.561604 |
623ae241051d4ce01dbadb452321b532717e0fd8 | 491 | #!/usr/bin/env rspec
require 'spec_helper'
require 'puppet/node'
require 'puppet/indirector/memory'
require 'puppet/indirector/catalog/store_configs'
class Puppet::Resource::Catalog::StoreConfigsTesting < Puppet::Indirector::Memory
end
describe Puppet::Resource::Catalog::StoreConfigs do
after :each do
Puppet::Resource::Catalog.indirection.reset_terminus_class
Puppet::Resource::Catalog.indirection.cache_class = nil
end
it_should_behave_like "a StoreConfigs terminus"
end
| 27.277778 | 81 | 0.796334 |
621935c60a87225f9631fe4045f0b722f3ae5bb2 | 22,836 | #-- vim:sw=2:et
#++
#
# :title: Markov plugin
#
# Author:: Tom Gilbert <[email protected]>
# Copyright:: (C) 2005 Tom Gilbert
#
# Contribute to chat with random phrases built from word sequences learned
# by listening to chat
class MarkovPlugin < Plugin
Config.register Config::BooleanValue.new('markov.enabled',
:default => false,
:desc => "Enable and disable the plugin")
Config.register Config::IntegerValue.new('markov.probability',
:default => 25,
:validate => Proc.new { |v| (0..100).include? v },
:desc => "Percentage chance of markov plugin chipping in")
Config.register Config::ArrayValue.new('markov.ignore',
:default => [],
:desc => "Hostmasks and channel names markov should NOT learn from (e.g. idiot*!*@*, #privchan).")
Config.register Config::ArrayValue.new('markov.readonly',
:default => [],
:desc => "Hostmasks and channel names markov should NOT talk to (e.g. idiot*!*@*, #privchan).")
Config.register Config::IntegerValue.new('markov.max_words',
:default => 50,
:validate => Proc.new { |v| (0..100).include? v },
:desc => "Maximum number of words the bot should put in a sentence")
Config.register Config::FloatValue.new('markov.learn_delay',
:default => 0.5,
:validate => Proc.new { |v| v >= 0 },
:desc => "Time the learning thread spends sleeping after learning a line. If set to zero, learning from files can be very CPU intensive, but also faster.")
Config.register Config::IntegerValue.new('markov.delay',
:default => 5,
:validate => Proc.new { |v| v >= 0 },
:desc => "Wait short time before contributing to conversation.")
Config.register Config::IntegerValue.new('markov.answer_addressed',
:default => 50,
:validate => Proc.new { |v| (0..100).include? v },
:desc => "Probability of answer when addressed by nick")
Config.register Config::ArrayValue.new('markov.ignore_patterns',
:default => [],
:desc => "Ignore these word patterns")
MARKER = :"\r\n"
# upgrade a registry entry from 0.9.14 and earlier, converting the Arrays
# into Hashes of weights
def upgrade_entry(k, logfile)
logfile.puts "\t#{k.inspect}"
logfile.flush
logfile.fsync
ar = @registry[k]
# wipe the current key
@registry.delete(k)
# discard empty keys
if ar.empty?
logfile.puts "\tEMPTY"
return
end
# otherwise, proceed
logfile.puts "\t#{ar.inspect}"
# re-encode key to UTF-8 and cleanup as needed
words = k.split.map do |w|
BasicUserMessage.strip_formatting(
@bot.socket.filter.in(w)
).sub(/\001$/,'')
end
# old import that failed to split properly?
if words.length == 1 and words.first.include? '/'
# split at the last /
unsplit = words.first
at = unsplit.rindex('/')
words = [unsplit[0,at], unsplit[at+1..-1]]
end
# if any of the re-split/re-encoded words have spaces,
# or are empty, we would get a chain we can't convert,
# so drop it
if words.first.empty? or words.first.include?(' ') or
words.last.empty? or words.last.include?(' ')
logfile.puts "\tSKIPPED"
return
end
# former unclean CTCP, we can't convert this
if words.first[0] == 1
logfile.puts "\tSKIPPED"
return
end
# nonword CTCP => SKIP
# someword CTCP => nonword someword
if words.last[0] == 1
if words.first == "nonword"
logfile.puts "\tSKIPPED"
return
end
words.unshift MARKER
words.pop
end
# intern the old keys
words.map! do |w|
['nonword', MARKER].include?(w) ? MARKER : w.chomp("\001")
end
newkey = words.join(' ')
logfile.puts "\t#{newkey.inspect}"
# the new key exists already, so we want to merge
if k != newkey and @registry.key? newkey
ar2 = @registry[newkey]
logfile.puts "\tMERGE"
logfile.puts "\t\t#{ar2.inspect}"
ar.push(*ar2)
# and get rid of the key
@registry.delete(newkey)
end
total = 0
hash = Hash.new(0)
@chains_mutex.synchronize do
if @chains.key? newkey
ar2 = @chains[newkey]
total += ar2.first
hash.update ar2.last
end
ar.each do |word|
case word
when :nonword
# former marker
sym = MARKER
else
# we convert old words into UTF-8, cleanup, resplit if needed,
# and only get the first word. we may lose some data for old
# missplits, but this is the best we can do
w = BasicUserMessage.strip_formatting(
@bot.socket.filter.in(word).split.first
)
case w
when /^\001\S+$/, "\001", ""
# former unclean CTCP or end of CTCP
next
else
# intern after clearing leftover end-of-actions if present
sym = w.chomp("\001")
end
end
hash[sym] += 1
total += 1
end
if hash.empty?
logfile.puts "\tSKIPPED"
return
end
logfile.puts "\t#{[total, hash].inspect}"
@chains[newkey] = [total, hash]
end
end
def upgrade_registry
# we load all the keys and then iterate over this array because
# running each() on the registry and updating it at the same time
# doesn't work
keys = @registry.keys
# no registry, nothing to do
return if keys.empty?
ki = 0
log "starting markov database conversion thread (v1 to v2, #{keys.length} keys)"
keys.each { |k| @upgrade_queue.push k }
@upgrade_queue.push nil
@upgrade_thread = Thread.new do
logfile = File.open(@bot.path('markov-conversion.log'), 'a')
logfile.puts "=== conversion thread started #{Time.now} ==="
while k = @upgrade_queue.pop
ki += 1
logfile.puts "Key #{ki} (#{@upgrade_queue.length} in queue):"
begin
upgrade_entry(k, logfile)
rescue Exception => e
logfile.puts "=== ERROR ==="
logfile.puts e.pretty_inspect
logfile.puts "=== EREND ==="
end
sleep @bot.config['markov.learn_delay'] unless @bot.config['markov.learn_delay'].zero?
end
logfile.puts "=== conversion thread stopped #{Time.now} ==="
logfile.close
end
@upgrade_thread.priority = -1
end
attr_accessor :chains
def initialize
super
@registry.set_default([])
if @registry.has_key?('enabled')
@bot.config['markov.enabled'] = @registry['enabled']
@registry.delete('enabled')
end
if @registry.has_key?('probability')
@bot.config['markov.probability'] = @registry['probability']
@registry.delete('probability')
end
if @bot.config['markov.ignore_users']
debug "moving markov.ignore_users to markov.ignore"
@bot.config['markov.ignore'] = @bot.config['markov.ignore_users'].dup
@bot.config.delete('markov.ignore_users'.to_sym)
end
@chains = @registry.sub_registry('v2')
@chains.set_default([])
@rchains = @registry.sub_registry('v2r')
@rchains.set_default([])
@chains_mutex = Mutex.new
@rchains_mutex = Mutex.new
@upgrade_queue = Queue.new
@upgrade_thread = nil
upgrade_registry
@learning_queue = Queue.new
@learning_thread = Thread.new do
while s = @learning_queue.pop
learn_line s
sleep @bot.config['markov.learn_delay'] unless @bot.config['markov.learn_delay'].zero?
end
end
@learning_thread.priority = -1
end
def cleanup
if @upgrade_thread and @upgrade_thread.alive?
debug 'closing conversion thread'
@upgrade_queue.clear
@upgrade_queue.push nil
@upgrade_thread.join
debug 'conversion thread closed'
end
debug 'closing learning thread'
@learning_queue.clear
@learning_queue.push nil
@learning_thread.join
debug 'learning thread closed'
@chains.close
@rchains.close
super
end
# pick a word from the registry using the pair as key.
def pick_word(word1, word2=MARKER, chainz=@chains)
k = "#{word1} #{word2}"
return MARKER unless chainz.key? k
wordlist = chainz[k]
pick_word_from_list wordlist
end
# pick a word from weighted hash
def pick_word_from_list(wordlist)
total = wordlist.first
hash = wordlist.last
return MARKER if total == 0
return hash.keys.first if hash.length == 1
hit = rand(total)
ret = MARKER
hash.each do |k, w|
hit -= w
if hit < 0
ret = k
break
end
end
return ret
end
def generate_string(word1, word2)
# limit to max of markov.max_words words
if word2
output = [word1, word2]
else
output = word1
keys = []
@chains.each_key(output) do |key|
if key.downcase.include? output
keys << key
else
break
end
end
return nil if keys.empty?
output = keys[rand(keys.size)].split(/ /)
end
output = output.split(/ /) unless output.is_a? Array
input = [word1, word2]
while output.length < @bot.config['markov.max_words'] and (output.first != MARKER or output.last != MARKER) do
if output.last != MARKER
output << pick_word(output[-2], output[-1])
end
if output.first != MARKER
output.insert 0, pick_word(output[0], output[1], @rchains)
end
end
output.delete MARKER
if output == input
nil
else
output.join(" ")
end
end
def help(plugin, topic="")
topic, subtopic = topic.split
case topic
when "delay"
"markov delay <value> => Set message delay"
when "ignore"
case subtopic
when "add"
"markov ignore add <hostmask|channel> => ignore a hostmask or a channel"
when "list"
"markov ignore list => show ignored hostmasks and channels"
when "remove"
"markov ignore remove <hostmask|channel> => unignore a hostmask or channel"
else
"ignore hostmasks or channels -- topics: add, remove, list"
end
when "readonly"
case subtopic
when "add"
"markov readonly add <hostmask|channel> => read-only a hostmask or a channel"
when "list"
"markov readonly list => show read-only hostmasks and channels"
when "remove"
"markov readonly remove <hostmask|channel> => unreadonly a hostmask or channel"
else
"restrict hostmasks or channels to read only -- topics: add, remove, list"
end
when "status"
"markov status => show if markov is enabled, probability and amount of messages in queue for learning"
when "probability"
"markov probability [<percent>] => set the % chance of rbot responding to input, or display the current probability"
when "chat"
case subtopic
when "about"
"markov chat about <word> [<another word>] => talk about <word> or riff on a word pair (if possible)"
else
"markov chat => try to say something intelligent"
end
when "learn"
["markov learn from <file> [testing [<num> lines]] [using pattern <pattern>]:",
"learn from the text in the specified <file>, optionally using the given <pattern> to filter the text.",
"you can sample what would be learned by specifying 'testing <num> lines'"].join(' ')
else
"markov plugin: listens to chat to build a markov chain, with which it can (perhaps) attempt to (inanely) contribute to 'discussion'. Sort of.. Will get a *lot* better after listening to a lot of chat. Usage: 'chat' to attempt to say something relevant to the last line of chat, if it can -- help topics: ignore, readonly, delay, status, probability, chat, chat about, learn"
end
end
def clean_message(m)
str = m.plainmessage.dup
str =~ /^(\S+)([:,;])/
if $1 and m.target.is_a? Irc::Channel and m.target.user_nicks.include? $1.downcase
str.gsub!(/^(\S+)([:,;])\s+/, "")
end
str.gsub!(/\s{2,}/, ' ') # fix for two or more spaces
return str.strip
end
def probability?
return @bot.config['markov.probability']
end
def status(m,params)
if @bot.config['markov.enabled']
reply = _("markov is currently enabled, %{p}% chance of chipping in") % { :p => probability? }
l = @learning_queue.length
reply << (_(", %{l} messages in queue") % {:l => l}) if l > 0
l = @upgrade_queue.length
reply << (_(", %{l} chains to upgrade") % {:l => l}) if l > 0
else
reply = _("markov is currently disabled")
end
m.reply reply
end
def ignore?(m=nil)
return false unless m
return true if m.private?
return true if m.prefixed?
@bot.config['markov.ignore'].each do |mask|
return true if m.channel.downcase == mask.downcase
return true if m.source.matches?(mask)
end
return false
end
def readonly?(m=nil)
return false unless m
@bot.config['markov.readonly'].each do |mask|
return true if m.channel.downcase == mask.downcase
return true if m.source.matches?(mask)
end
return false
end
def ignore(m, params)
action = params[:action]
user = params[:option]
case action
when 'remove'
if @bot.config['markov.ignore'].include? user
s = @bot.config['markov.ignore']
s.delete user
@bot.config['ignore'] = s
m.reply _("%{u} removed") % { :u => user }
else
m.reply _("not found in list")
end
when 'add'
if user
if @bot.config['markov.ignore'].include?(user)
m.reply _("%{u} already in list") % { :u => user }
else
@bot.config['markov.ignore'] = @bot.config['markov.ignore'].push user
m.reply _("%{u} added to markov ignore list") % { :u => user }
end
else
m.reply _("give the name of a person or channel to ignore")
end
when 'list'
m.reply _("I'm ignoring %{ignored}") % { :ignored => @bot.config['markov.ignore'].join(", ") }
else
m.reply _("have markov ignore the input from a hostmask or a channel. usage: markov ignore add <mask or channel>; markov ignore remove <mask or channel>; markov ignore list")
end
end
def readonly(m, params)
action = params[:action]
user = params[:option]
case action
when 'remove'
if @bot.config['markov.readonly'].include? user
s = @bot.config['markov.readonly']
s.delete user
@bot.config['markov.readonly'] = s
m.reply _("%{u} removed") % { :u => user }
else
m.reply _("not found in list")
end
when 'add'
if user
if @bot.config['markov.readonly'].include?(user)
m.reply _("%{u} already in list") % { :u => user }
else
@bot.config['markov.readonly'] = @bot.config['markov.readonly'].push user
m.reply _("%{u} added to markov readonly list") % { :u => user }
end
else
m.reply _("give the name of a person or channel to read only")
end
when 'list'
m.reply _("I'm only reading %{readonly}") % { :readonly => @bot.config['markov.readonly'].join(", ") }
else
m.reply _("have markov not answer to input from a hostmask or a channel. usage: markov readonly add <mask or channel>; markov readonly remove <mask or channel>; markov readonly list")
end
end
def enable(m, params)
@bot.config['markov.enabled'] = true
m.okay
end
def probability(m, params)
if params[:probability]
@bot.config['markov.probability'] = params[:probability].to_i
m.okay
else
m.reply _("markov has a %{prob}% chance of chipping in") % { :prob => probability? }
end
end
def disable(m, params)
@bot.config['markov.enabled'] = false
m.okay
end
def should_talk(m)
return false unless @bot.config['markov.enabled']
prob = m.address? ? @bot.config['markov.answer_addressed'] : probability?
return true if prob > rand(100)
return false
end
# Generates all sequence pairs from array
# seq_pairs [1,2,3,4] == [ [1,2], [2,3], [3,4]]
def seq_pairs(arr)
res = []
0.upto(arr.size-2) do |i|
res << [arr[i], arr[i+1]]
end
res
end
def set_delay(m, params)
if params[:delay] == "off"
@bot.config["markov.delay"] = 0
m.okay
elsif !params[:delay]
m.reply _("Message delay is %{delay}" % { :delay => @bot.config["markov.delay"]})
else
@bot.config["markov.delay"] = params[:delay].to_i
m.okay
end
end
def reply_delay(m, line)
m.replied = true
if @bot.config['markov.delay'] > 0
@bot.timer.add_once(1 + rand(@bot.config['markov.delay'])) {
m.reply line, :nick => false, :to => :public
}
else
m.reply line, :nick => false, :to => :public
end
end
def random_markov(m, message)
return unless should_talk(m)
words = clean_message(m).split(/\s+/)
if words.length < 2
line = generate_string words.first, nil
if line and message.index(line) != 0
reply_delay m, line
return
end
else
pairs = seq_pairs(words).sort_by { rand }
pairs.each do |word1, word2|
line = generate_string(word1, word2)
if line and message.index(line) != 0
reply_delay m, line
return
end
end
words.sort_by { rand }.each do |word|
line = generate_string word.first, nil
if line and message.index(line) != 0
reply_delay m, line
return
end
end
end
end
def chat(m, params)
line = generate_string(params[:seed1], params[:seed2])
if line and line != [params[:seed1], params[:seed2]].compact.join(" ")
m.reply line
else
m.reply _("I can't :(")
end
end
def rand_chat(m, params)
# pick a random pair from the db and go from there
word1, word2 = MARKER, MARKER
output = Array.new
@bot.config['markov.max_words'].times do
word3 = pick_word(word1, word2)
break if word3 == MARKER
output << word3
word1, word2 = word2, word3
end
if output.length > 1
m.reply output.join(" ")
else
m.reply _("I can't :(")
end
end
def learn(*lines)
lines.each { |l| @learning_queue.push l }
end
def unreplied(m)
return if ignore? m
# in channel message, the kind we are interested in
message = m.plainmessage
if m.action?
message = "#{m.sourcenick} #{message}"
end
random_markov(m, message) unless readonly? m or m.replied?
learn clean_message(m)
end
def learn_triplet(word1, word2, word3)
k = "#{word1} #{word2}"
rk = "#{word2} #{word3}"
@chains_mutex.synchronize do
total = 0
hash = Hash.new(0)
if @chains.key? k
t2, h2 = @chains[k]
total += t2
hash.update h2
end
hash[word3] += 1
total += 1
@chains[k] = [total, hash]
end
@rchains_mutex.synchronize do
# Reverse
total = 0
hash = Hash.new(0)
if @rchains.key? rk
t2, h2 = @rchains[rk]
total += t2
hash.update h2
end
hash[word1] += 1
total += 1
@rchains[rk] = [total, hash]
end
end
def learn_line(message)
# debug "learning #{message.inspect}"
wordlist = message.strip.split(/\s+/).reject do |w|
@bot.config['markov.ignore_patterns'].map do |pat|
w =~ Regexp.new(pat.to_s)
end.select{|v| v}.size != 0
end
return unless wordlist.length >= 2
word1, word2 = MARKER, MARKER
wordlist << MARKER
wordlist.each do |word3|
learn_triplet(word1, word2, word3.to_sym)
word1, word2 = word2, word3
end
end
# TODO allow learning from URLs
def learn_from(m, params)
begin
path = params[:file]
file = File.open(path, "r")
pattern = params[:pattern].empty? ? nil : Regexp.new(params[:pattern].to_s)
rescue Errno::ENOENT
m.reply _("no such file")
return
end
if file.eof?
m.reply _("the file is empty!")
return
end
if params[:testing]
lines = []
range = case params[:lines]
when /^\d+\.\.\d+$/
Range.new(*params[:lines].split("..").map { |e| e.to_i })
when /^\d+$/
Range.new(1, params[:lines].to_i)
else
Range.new(1, [@bot.config['send.max_lines'], 3].max)
end
file.each do |line|
next unless file.lineno >= range.begin
lines << line.chomp
break if file.lineno == range.end
end
lines = lines.map do |l|
pattern ? l.scan(pattern).to_s : l
end.reject { |e| e.empty? }
if pattern
unless lines.empty?
m.reply _("example matches for that pattern at lines %{range} include: %{lines}") % {
:lines => lines.map { |e| Underline+e+Underline }.join(", "),
:range => range.to_s
}
else
m.reply _("the pattern doesn't match anything at lines %{range}") % {
:range => range.to_s
}
end
else
m.reply _("learning from the file without a pattern would learn, for example: ")
lines.each { |l| m.reply l }
end
return
end
if pattern
file.each { |l| learn(l.scan(pattern).to_s) }
else
file.each { |l| learn(l.chomp) }
end
m.okay
end
def stats(m, params)
m.reply "Markov status: chains: #{@chains.length} forward, #{@rchains.length} reverse, queued phrases: #{@learning_queue.size}"
end
end
plugin = MarkovPlugin.new
plugin.map 'markov delay :delay', :action => "set_delay"
plugin.map 'markov delay', :action => "set_delay"
plugin.map 'markov ignore :action :option', :action => "ignore"
plugin.map 'markov ignore :action', :action => "ignore"
plugin.map 'markov ignore', :action => "ignore"
plugin.map 'markov readonly :action :option', :action => "readonly"
plugin.map 'markov readonly :action', :action => "readonly"
plugin.map 'markov readonly', :action => "readonly"
plugin.map 'markov enable', :action => "enable"
plugin.map 'markov disable', :action => "disable"
plugin.map 'markov status', :action => "status"
plugin.map 'markov stats', :action => "stats"
plugin.map 'chat about :seed1 [:seed2]', :action => "chat"
plugin.map 'chat', :action => "rand_chat"
plugin.map 'markov probability [:probability]', :action => "probability",
:requirements => {:probability => /^\d+%?$/}
plugin.map 'markov learn from :file [:testing [:lines lines]] [using pattern *pattern]', :action => "learn_from", :thread => true,
:requirements => {
:testing => /^testing$/,
:lines => /^(?:\d+\.\.\d+|\d+)$/ }
plugin.default_auth('ignore', false)
plugin.default_auth('probability', false)
plugin.default_auth('learn', false)
| 30.007884 | 381 | 0.600937 |
6a25546a4547451f421cb9b75dd5e6b61e6c8d8a | 111 | class ApplicationController < ActionController::Base
before_action :masquerade!
protect_from_forgery
end
| 15.857143 | 52 | 0.828829 |
ed9cce74cb8d7b21765ea2e2877edbc073c3a7af | 865 | require 'spec_helper'
describe Owenbot::Commands::Calculate do
let(:app) { Owenbot::Bot.instance }
it 'adds two numbers' do
expect(message: 'owenbot calculate 2+2', channel: 'channel').to respond_with_slack_message('4')
end
it 'adds two numbers via =' do
expect(message: '= 2+2', channel: 'channel').to respond_with_slack_message('4')
end
it 'adds two numbers via = without a space' do
expect(message: '=2+2', channel: 'channel').to respond_with_slack_message('4')
end
it 'sends something without an answer' do
expect(message: 'owenbot calculate pi', channel: 'channel').to respond_with_slack_message('Sorry, no value provided for variables: pi.')
end
it 'reports division by zero' do
expect(message: 'owenbot calculate 1/0', channel: 'channel').to respond_with_slack_message('Sorry, Dentaku::ZeroDivisionError.')
end
end
| 41.190476 | 140 | 0.721387 |
2152f90e37d935b5b93f2c8dc921c7bd3965da50 | 4,119 | # frozen_string_literal: true
require "json"
module Kenna
module Toolkit
module Contrast
class Client
def initialize(contrast_host, contrast_port, contrast_api_key, contrast_auth_header, contrast_org_id, contrast_use_https)
protocol = contrast_use_https ? "https://" : "http://"
@base_url = "#{protocol}#{contrast_host}#{contrast_port.nil? ? '' : ':'}#{contrast_port}/Contrast/api/ng/#{contrast_org_id}"
print "Base URL is #{@base_url}"
@headers = { "Authorization" => contrast_auth_header, "API-Key" => contrast_api_key, "Content-Type" => "application/json" }
@recs = {}
@tags = {}
end
def get_vulns(tags, environments, severities, offset, limit)
url = "#{@base_url}/orgtraces/filter?expand=application&offset=#{offset}&limit=#{limit}&applicationTags=#{tags}&environments=#{environments}&severities=#{severities}&licensedOnly=true"
response = http_get(url, @headers, 1)
return nil if response.nil?
body = JSON.parse response.body
more_results = !(response.nil? || response.empty? || (offset + limit) >= body["count"])
ceiling = [limit + offset, body["count"]].min
print "Fetched #{ceiling} of #{body['count']} vulnerabilities"
[body["traces"], more_results, body["count"]]
rescue RestClient::ExceptionWithResponse => e
print_error "Error getting vulnerabilities: #{e.message}"
rescue SocketError => e
print_error "Error calling API, check server address: #{e.message}"
end
def get_vulnerable_libraries(apps, offset, limit)
payload = {
quickFilter: "VULNERABLE",
"apps": apps
}
url = "#{@base_url}/libraries/filter?offset=#{offset}&limit=#{limit}&sort=score&expand=skip_links%2Capps%2Cvulns%2Cstatus%2Cusage_counts"
response = http_post(url, @headers, payload.to_json)
return nil if response.nil?
body = JSON.parse response.body
more_results = !(response.nil? || response.empty? || (offset + limit) >= body["count"])
ceiling = [limit + offset, body["count"]].min
print "Fetched #{ceiling} of #{body['count']} libraries"
[body["libraries"], more_results, body["count"]]
rescue RestClient::ExceptionWithResponse => e
print_error "Error getting vulnerable libraries for apps #{apps}: #{e}"
end
def get_application_ids(tags)
url = "#{@base_url}/applications/filter/short?filterTags=#{tags}"
response = http_get(url, @headers, 1)
return nil if response.nil?
temp = JSON.parse response.body
temp["applications"]
rescue RestClient::ExceptionWithResponse => e
print_error "Error getting applications for tags #{tags}: #{e}"
end
def get_application_tags(app_id)
if @tags[app_id].nil?
url = "#{@base_url}/tags/application/list/#{app_id}"
response = http_get(url, @headers, 1)
temp = JSON.parse response.body
@tags[app_id] = temp["tags"]
end
@tags[app_id]
rescue RestClient::ExceptionWithResponse => e
print_error "Error getting application tags for app id #{app_id}: #{e}"
end
def get_trace_recommendation(id, rule_name)
if @recs[rule_name].nil?
url = "#{@base_url}/traces/#{id}/recommendation"
response = http_get(url, @headers)
@recs[rule_name] = JSON.parse response.body
end
@recs[rule_name]
rescue RestClient::ExceptionWithResponse => e
print_error "Error fetching trace recommendation for #{id}: #{e}"
end
def get_trace_story(id)
url = "#{@base_url}/traces/#{id}/story"
response = http_get(url, @headers)
JSON.parse response.body
rescue RestClient::ExceptionWithResponse => e
print_error "Error fetching trace story for #{id}: #{e}"
end
end
end
end
end
| 38.495327 | 194 | 0.606215 |
39afc2a65d6929cc468f5e0456b06527e09a1d66 | 684 | module Notification
class Base
include Rails.application.routes.url_helpers
def self.key
name.split("::").last.underscore.to_sym
end
def self.description
name.split("::").last.underscore.gsub("_", " ").capitalize
end
# :nocov:
def text
raise "Expected `#{self.class}` to define `text`"
end
def email
raise "Expected `#{self.class}` to define `email`"
end
def sms
raise "Expected `#{self.class}` to define `sms`"
end
# :nocov:
def slack
text
end
def for_user?
!!@for_user
end
private
def slack_link label, path
"<#{path}|#{label}>"
end
end
end
| 16.285714 | 64 | 0.576023 |
62fd0d84ea323312ec5e4bece5445c703176d4de | 865 | Gitlab::Seeder.quiet do
Project.all.reject(&:empty_repo?).each do |project|
branches = project.repository.branch_names
branches.each do |branch_name|
break if branches.size < 2
source_branch = branches.pop
target_branch = branches.pop
# Random user
user = project.team.users.sample
next unless user
params = {
source_branch: source_branch,
target_branch: target_branch,
title: Faker::Lorem.sentence(6),
description: Faker::Lorem.sentences(3).join(" ")
}
merge_request = MergeRequests::CreateService.new(project, user, params).execute
if merge_request.valid?
merge_request.assignee = user
merge_request.milestone = project.milestones.sample
merge_request.save
print '.'
else
print 'F'
end
end
end
end
| 25.441176 | 85 | 0.641618 |
62c15fb38e45b687e8de9fa481101d0afeb8663d | 2,660 | class UserAgent
module Browsers
module All
include Comparable
def <=>(other)
if respond_to?(:browser) && other.respond_to?(:browser) &&
browser == other.browser
version <=> Version.new(other.version)
else
false
end
end
def eql?(other)
self == other
end
def to_s
to_str
end
def to_str
join(" ")
end
def application
first
end
def browser
application && application.product
end
def version
application && application.version
end
def platform
nil
end
def os
nil
end
def respond_to?(symbol)
detect_product(symbol) ? true : super
end
def method_missing(method, *args, &block)
detect_product(method) || super
end
def webkit?
false
end
def mobile?
if browser == 'webOS'
true
elsif platform == 'Symbian'
true
elsif detect_product('Mobile') || detect_comment('Mobile')
true
elsif android?
true
elsif ios?
true
elsif application && application.comment &&
application.comment.detect { |k, v| k =~ /^IEMobile/ }
true
else
false
end
end
def ios?
['iPad', 'iPhone', 'iPod'].include?(platform)
end
def android?
os =~ /Android/
end
def mac_os?
['Macintosh'].include?(platform)
end
def bot?
# If UA has no application type, its probably generated by a
# shitty bot.
if application.nil?
true
# Match common case when bots refer to themselves as bots in
# the application comment. There are no standards for how bots
# should call themselves so its not an exhaustive method.
#
# If you want to expand the scope, override the method and
# provide your own regexp. Any patches to future extend this
# list will be rejected.
elsif comment = application.comment
comment.any? { |c| c =~ /bot/i }
elsif product = application.product
product.include?('bot')
else
false
end
end
private
def detect_product(product)
detect { |useragent| useragent.product.to_s.downcase == product.to_s.downcase }
end
def detect_comment(comment)
detect { |useragent| useragent.comment && useragent.comment.include?(comment) }
end
end
end
end
| 21.626016 | 89 | 0.538722 |
910ed4bbf48fbef15a5c8401e2bed528e552c38d | 938 | module Babeltrace
class Trace
attr_reader :context
attr_reader :handle_id
def initialize(context, handle_id)
@context = context
@handle_id = handle_id
end
def get_path
Babeltrace.bt_trace_handle_get_path(@context, @handle_id)
end
def get_timestamp_begin(clock_type = :REAL)
t = Babeltrace.bt_trace_handle_get_timestamp_begin(@context, @handle_id, clock_type)
t = Time.at(0, t, :nsec) if clock_type == :REAL
end
def get_timestamp_end(clock_type = :REAL)
t = Babeltrace.bt_trace_handle_get_timestamp_end(@context, @handle_id, clock_type)
t = Time.at(0, t, :nsec) if clock_type == :REAL
end
end
attach_function :bt_trace_handle_get_path, [Context, :int], :string
attach_function :bt_trace_handle_get_timestamp_begin, [Context, :int, ClockType], :uint64
attach_function :bt_trace_handle_get_timestamp_end, [Context, :int, ClockType], :uint64
end
| 32.344828 | 91 | 0.721748 |
0836704224c748d042308779904d6fded9324077 | 2,276 | require 'spec_helper'
describe Taza::ProjectGenerator do
context "taza create foo_site" do
context "creates" do
let(:subject) { Taza::ProjectGenerator.new(['foo_site']) }
let(:output) { capture(:stdout) { subject.create } }
it 'a Gemfile' do
expect(output).to include("Gemfile")
expect(File.exists?('Gemfile')).to be_true
end
it 'a Rakefile' do
expect(output).to include('Rakefile')
expect(File.exists?('Rakefile')).to be_true
end
it 'the Rakefile can be required' do
output
system("ruby -c Rakefile > #{null_device}").should be_true
end
it 'config/config.yml' do
expect(output).to include('config/config.yml')
expect(File.exists?('config/config.yml')).to be_true
end
it 'lib/sites' do
expect(output).to include('lib/sites')
expect(File.directory?('lib/sites')).to be_true
end
it 'a spec_helper.rb' do
expect(output).to include('spec/spec_helper.rb')
expect(File.exists?('spec/spec_helper.rb')).to be_true
end
it 'spec_helper.rb can be required' do
output
system("ruby -c spec/spec_helper.rb > #{null_device}").should be_true
end
it 'spec/isolation' do
expect(output).to include('spec/isolation')
expect(File.directory?('spec/isolation')).to be_true
end
it 'spec/integration' do
expect(output).to include('spec/integration')
expect(File.directory?('spec/integration')).to be_true
end
it 'bin' do
expect(output).to include('bin')
expect(File.directory?('bin')).to be_true
end
it 'the taza executable' do
expect(output).to include('spec/spec_helper.rb')
expect(File.exists?('spec/spec_helper.rb')).to be_true
end
xit "should generate a console script" do
run_generator('taza', [APP_ROOT], generator_sources)
File.exists?(File.join(APP_ROOT, 'script', 'console')).should be_true
end
xit "should generate a windows console script" do
run_generator('taza', [APP_ROOT], generator_sources)
File.exists?(File.join(APP_ROOT, 'script', 'console.cmd')).should be_true
end
end
end
end
| 29.558442 | 81 | 0.621265 |
f851f4242969ff69b8f4475cad8047f2d2bf0f34 | 421 | # frozen_string_literal: true
class SchoolManagerMailer < ApplicationMailer
def new_member(school_manager:, member:)
@school_manager = school_manager
@member_presenter = ::Presenters::User.new(member)
@school_manager_presenter = ::Presenters::User.new(school_manager)
mail(subject: "Nouveau #{@member_presenter.role_name}: #{@member_presenter.full_name}",
to: school_manager.email)
end
end
| 32.384615 | 91 | 0.750594 |
11331dae121fb64a4869548eb35d2e30b42e90db | 1,022 | require File.dirname(__FILE__) + "/lib/stupidedi/version"
Gem::Specification.new do |s|
s.name = "stupidedi"
s.summary = "Parse, generate, validate ASC X12 EDI"
s.description = "Ruby API for parsing and generating ASC X12 EDI transactions"
s.homepage = "https://github.com/irobayna/stupidedi"
s.version = Stupidedi::VERSION
s.date = "2018-05-27"
s.author = "Kyle Putnam/Isi Robayna"
s.email = "[email protected]"
s.files = ["README.md", "Rakefile",
"bin/*",
"lib/**/*",
"doc/**/*.md",
"spec/**/*"].map { |glob| Dir[glob] }.flatten
s.test_files = Dir["spec/examples/**/*.example"].to_a
s.has_rdoc = false
s.bindir = "bin"
s.executables = ["edi-pp", "edi-ed"]
s.require_path = "lib"
s.add_dependency "term-ansicolor", "~> 1.3"
s.add_dependency "cantor", "~> 1.2.1"
# s.metadata["yard.run"] = "yard doc"
end
| 35.241379 | 80 | 0.533268 |
38042a20b941dcbc69d195323834d9c81f337c19 | 754 | cask "unity-ios-support-for-editor" do
version "2020.1.12f1,55b56f0a86e3"
sha256 "ecccacac1c64857bce84ec65b2031f038ce4e1893f5ac4c96b97bdf9a2711bd4"
# download.unity3d.com/download_unity/ was verified as official when first introduced to the cask
url "https://download.unity3d.com/download_unity/#{version.after_comma}/MacEditorTargetInstaller/UnitySetup-iOS-Support-for-Editor-#{version.before_comma}.pkg"
appcast "https://public-cdn.cloud.unity3d.com/hub/prod/releases-darwin.json"
name "Unity iOS Build Support"
desc "iOS taget support for Unity"
homepage "https://unity.com/products"
depends_on cask: "unity"
pkg "UnitySetup-iOS-Support-for-Editor-#{version.before_comma}.pkg"
uninstall pkgutil: "com.unity3d.iOSSupport"
end
| 41.888889 | 161 | 0.789125 |
acbd13a360ea4c48b49478a69beee7c5bb61c8ad | 178 | json.array!(@trackers) do |tracker|
json.extract! tracker, :id, :wb_id, :part_code, :employee_id, :shift, :device_id, :count
json.url tracker_url(tracker, format: :json)
end
| 35.6 | 90 | 0.719101 |
ed3c0de45fdc2417e0a903e9ba8207e55978b5b8 | 3,132 | module Api
module V1
class CommoditiesController < ApiController
before_action :find_commodity, only: [:show, :changes]
def show
@measures = MeasurePresenter.new(
@commodity.measures_dataset.eager(
{ footnotes: :footnote_descriptions },
{ measure_type: :measure_type_description },
{ measure_components: [{ duty_expression: :duty_expression_description },
{ measurement_unit: :measurement_unit_description },
:monetary_unit,
:measurement_unit_qualifier] },
{ measure_conditions: [{ measure_action: :measure_action_description},
{ certificate: :certificate_descriptions },
{ certificate_type: :certificate_type_description },
{ measurement_unit: :measurement_unit_description },
:monetary_unit,
:measurement_unit_qualifier,
{ measure_condition_code: :measure_condition_code_description },
{ measure_condition_components: [:measure_condition,
:duty_expression,
:measurement_unit,
:monetary_unit,
:measurement_unit_qualifier]
}]
},
{ quota_order_number: :quota_definition },
{ excluded_geographical_areas: :geographical_area_descriptions },
{ geographical_area: [:geographical_area_descriptions,
{ contained_geographical_areas: :geographical_area_descriptions }] },
:additional_code,
:full_temporary_stop_regulations,
:measure_partial_temporary_stops
).all, @commodity
).validate!
@commodity_cache_key = "commodity-#{@commodity.goods_nomenclature_sid}-#{actual_date}-#{TradeTariffBackend.currency}"
respond_with @commodity
end
def changes
key = "commodity-#{@commodity.goods_nomenclature_sid}-#{actual_date}-#{TradeTariffBackend.currency}/changes"
@changes = Rails.cache.fetch(key, expires_at: actual_date.end_of_day) do
ChangeLog.new(@commodity.changes.where { |o|
o.operation_date <= actual_date
})
end
render 'api/v1/changes/changes'
end
private
def find_commodity
@commodity = Commodity.actual
.declarable
.by_code(params[:id])
.take
raise Sequel::RecordNotFound if @commodity.children.any?
raise Sequel::RecordNotFound if @commodity.goods_nomenclature_item_id.in? HiddenGoodsNomenclature.codes
end
end
end
end
| 46.058824 | 125 | 0.526501 |
26e36f139725a8bd5680dac4da0c490e2e6817cd | 19,810 | # frozen_string_literal: true
require "rubocops/extend/formula"
module RuboCop
module Cop
module FormulaAudit
# This cop checks for various miscellaneous Homebrew coding styles.
class Lines < FormulaCop
def audit_formula(_node, _class_node, _parent_class_node, _body_node)
[:automake, :ant, :autoconf, :emacs, :expat, :libtool, :mysql, :perl,
:postgresql, :python, :python3, :rbenv, :ruby].each do |dependency|
next unless depends_on?(dependency)
problem ":#{dependency} is deprecated. Usage should be \"#{dependency}\"."
end
{ apr: "apr-util", fortran: "gcc", gpg: "gnupg", hg: "mercurial",
mpi: "open-mpi", python2: "python" }.each do |requirement, dependency|
next unless depends_on?(requirement)
problem ":#{requirement} is deprecated. Usage should be \"#{dependency}\"."
end
problem ":tex is deprecated." if depends_on?(:tex)
end
end
class ClassInheritance < FormulaCop
def audit_formula(_node, class_node, parent_class_node, _body_node)
begin_pos = start_column(parent_class_node)
end_pos = end_column(class_node)
return unless begin_pos-end_pos != 3
problem "Use a space in class inheritance: " \
"class #{@formula_name.capitalize} < #{class_name(parent_class_node)}"
end
end
class Comments < FormulaCop
def audit_formula(_node, _class_node, _parent_class_node, _body_node)
audit_comments do |comment|
[
"# PLEASE REMOVE",
"# Documentation:",
"# if this fails, try separate make/make install steps",
"# The URL of the archive",
"## Naming --",
"# if your formula requires any X11/XQuartz components",
"# if your formula fails when building in parallel",
"# Remove unrecognized options if warned by configure",
'# system "cmake',
].each do |template_comment|
next unless comment.include?(template_comment)
problem "Please remove default template comments"
break
end
end
audit_comments do |comment|
# Commented-out depends_on
next unless comment =~ /#\s*depends_on\s+(.+)\s*$/
problem "Commented-out dependency #{Regexp.last_match(1)}"
end
return if formula_tap != "homebrew-core"
# Citation and tag comments from third-party taps
audit_comments do |comment|
next if comment !~ /#\s*(cite(?=\s*\w+:)|doi(?=\s*['"])|tag(?=\s*['"]))/
problem "Formulae in homebrew/core should not use `#{Regexp.last_match(1)}` comments"
end
end
end
class AssertStatements < FormulaCop
def audit_formula(_node, _class_node, _parent_class_node, body_node)
find_every_method_call_by_name(body_node, :assert).each do |method|
if method_called_ever?(method, :include?) && !method_called_ever?(method, :!)
problem "Use `assert_match` instead of `assert ...include?`"
end
if method_called_ever?(method, :exist?) && !method_called_ever?(method, :!)
problem "Use `assert_predicate <path_to_file>, :exist?` instead of `#{method.source}`"
end
if method_called_ever?(method, :exist?) && method_called_ever?(method, :!)
problem "Use `refute_predicate <path_to_file>, :exist?` instead of `#{method.source}`"
end
if method_called_ever?(method, :executable?) && !method_called_ever?(method, :!)
problem "Use `assert_predicate <path_to_file>, :executable?` instead of `#{method.source}`"
end
end
end
end
class OptionDeclarations < FormulaCop
def audit_formula(_node, _class_node, _parent_class_node, body_node)
problem "Use new-style option definitions" if find_method_def(body_node, :options)
find_instance_method_call(body_node, :build, :without?) do |method|
next unless unless_modifier?(method.parent)
correct = method.source.gsub("out?", "?")
problem "Use if #{correct} instead of unless #{method.source}"
end
find_instance_method_call(body_node, :build, :with?) do |method|
next unless unless_modifier?(method.parent)
correct = method.source.gsub("?", "out?")
problem "Use if #{correct} instead of unless #{method.source}"
end
find_instance_method_call(body_node, :build, :with?) do |method|
next unless expression_negated?(method)
problem "Don't negate 'build.with?': use 'build.without?'"
end
find_instance_method_call(body_node, :build, :without?) do |method|
next unless expression_negated?(method)
problem "Don't negate 'build.without?': use 'build.with?'"
end
find_instance_method_call(body_node, :build, :without?) do |method|
arg = parameters(method).first
next unless match = regex_match_group(arg, /^-?-?without-(.*)/)
problem "Don't duplicate 'without': " \
"Use `build.without? \"#{match[1]}\"` to check for \"--without-#{match[1]}\""
end
find_instance_method_call(body_node, :build, :with?) do |method|
arg = parameters(method).first
next unless match = regex_match_group(arg, /^-?-?with-(.*)/)
problem "Don't duplicate 'with': Use `build.with? \"#{match[1]}\"` to check for \"--with-#{match[1]}\""
end
find_instance_method_call(body_node, :build, :include?) do |method|
arg = parameters(method).first
next unless match = regex_match_group(arg, /^with(out)?-(.*)/)
problem "Use build.with#{match[1]}? \"#{match[2]}\" instead of " \
"build.include? 'with#{match[1]}-#{match[2]}'"
end
find_instance_method_call(body_node, :build, :include?) do |method|
arg = parameters(method).first
next unless match = regex_match_group(arg, /^\-\-(.*)$/)
problem "Reference '#{match[1]}' without dashes"
end
return if formula_tap != "homebrew-core"
# Use of build.with? implies options, which are forbidden in homebrew/core
find_instance_method_call(body_node, :build, :without?) do
problem "Formulae in homebrew/core should not use `build.without?`."
end
find_instance_method_call(body_node, :build, :with?) do
problem "Formulae in homebrew/core should not use `build.with?`."
end
end
def unless_modifier?(node)
return false unless node.if_type?
node.modifier_form? && node.unless?
end
end
class MpiCheck < FormulaCop
def audit_formula(_node, _class_node, _parent_class_node, body_node)
# Enforce use of OpenMPI for MPI dependency in core
return unless formula_tap == "homebrew-core"
find_method_with_args(body_node, :depends_on, "mpich") do
problem "Formulae in homebrew/core should use 'depends_on \"open-mpi\"' " \
"instead of '#{@offensive_node.source}'."
end
end
def autocorrect(node)
# The dependency nodes may need to be re-sorted because of this
lambda do |corrector|
corrector.replace(node.source_range, "depends_on \"open-mpi\"")
end
end
end
class Miscellaneous < FormulaCop
def audit_formula(_node, _class_node, _parent_class_node, body_node)
# FileUtils is included in Formula
# encfs modifies a file with this name, so check for some leading characters
find_instance_method_call(body_node, "FileUtils", nil) do |method_node|
problem "Don't need 'FileUtils.' before #{method_node.method_name}"
end
# Check for long inreplace block vars
find_all_blocks(body_node, :inreplace) do |node|
block_arg = node.arguments.children.first
next unless block_arg.source.size > 1
problem "\"inreplace <filenames> do |s|\" is preferred over \"|#{block_arg.source}|\"."
end
[:rebuild, :version_scheme].each do |method_name|
find_method_with_args(body_node, method_name, 0) do
problem "'#{method_name} 0' should be removed"
end
end
[:mac?, :linux?].each do |method_name|
next if formula_tap != "homebrew-core" || file_path&.include?("linuxbrew")
find_instance_method_call(body_node, "OS", method_name) do |check|
problem "Don't use #{check.source}; homebrew/core only supports macOS"
end
end
find_instance_call(body_node, "ARGV") do |method_node|
next if [:debug?, :verbose?, :value].index(method_node.method_name)
problem "Use build instead of ARGV to check options"
end
find_instance_method_call(body_node, :man, :+) do |method|
next unless match = regex_match_group(parameters(method).first, /^man[1-8]$/)
problem "\"#{method.source}\" should be \"#{match[0]}\""
end
# Avoid hard-coding compilers
find_every_method_call_by_name(body_node, :system).each do |method|
param = parameters(method).first
if match = regex_match_group(param, %r{^(/usr/bin/)?(gcc|llvm-gcc|clang)\s?})
problem "Use \"\#{ENV.cc}\" instead of hard-coding \"#{match[2]}\""
elsif match = regex_match_group(param, %r{^(/usr/bin/)?((g|llvm-g|clang)\+\+)\s?})
problem "Use \"\#{ENV.cxx}\" instead of hard-coding \"#{match[2]}\""
end
end
find_instance_method_call(body_node, "ENV", :[]=) do |method|
param = parameters(method)[1]
if match = regex_match_group(param, %r{^(/usr/bin/)?(gcc|llvm-gcc|clang)\s?})
problem "Use \"\#{ENV.cc}\" instead of hard-coding \"#{match[2]}\""
elsif match = regex_match_group(param, %r{^(/usr/bin/)?((g|llvm-g|clang)\+\+)\s?})
problem "Use \"\#{ENV.cxx}\" instead of hard-coding \"#{match[2]}\""
end
end
# Prefer formula path shortcuts in strings
formula_path_strings(body_node, :share) do |p|
next unless match = regex_match_group(p, %r{^(/(man))/?})
problem "\"\#{share}#{match[1]}\" should be \"\#{#{match[2]}}\""
end
formula_path_strings(body_node, :prefix) do |p|
if match = regex_match_group(p, %r{^(/share/(info|man))$})
problem "\"\#\{prefix}#{match[1]}\" should be \"\#{#{match[2]}}\""
end
if match = regex_match_group(p, %r{^((/share/man/)(man[1-8]))})
problem "\"\#\{prefix}#{match[1]}\" should be \"\#{#{match[3]}}\""
end
if match = regex_match_group(p, %r{^(/(bin|include|libexec|lib|sbin|share|Frameworks))}i)
problem "\"\#\{prefix}#{match[1]}\" should be \"\#{#{match[2].downcase}}\""
end
end
find_every_method_call_by_name(body_node, :depends_on).each do |method|
key, value = destructure_hash(parameters(method).first)
next if key.nil? || value.nil?
next unless match = regex_match_group(value, /^(lua|perl|python|ruby)(\d*)/)
problem "#{match[1]} modules should be vendored rather than use deprecated #{method.source}`"
end
find_every_method_call_by_name(body_node, :system).each do |method|
next unless match = regex_match_group(parameters(method).first, /^(env|export)(\s+)?/)
problem "Use ENV instead of invoking '#{match[1]}' to modify the environment"
end
find_every_method_call_by_name(body_node, :depends_on).each do |method|
param = parameters(method).first
dep, option_child_nodes = hash_dep(param)
next if dep.nil? || option_child_nodes.empty?
option_child_nodes.each do |option|
find_strings(option).each do |dependency|
next unless match = regex_match_group(dependency, /(with(out)?-\w+|c\+\+11)/)
problem "Dependency #{string_content(dep)} should not use option #{match[0]}"
end
end
end
find_instance_method_call(body_node, :version, :==) do |method|
next unless parameters_passed?(method, "HEAD")
problem "Use 'build.head?' instead of inspecting 'version'"
end
find_instance_method_call(body_node, "ARGV", :include?) do |method|
param = parameters(method).first
next unless match = regex_match_group(param, /^--(HEAD|devel)/)
problem "Use \"if build.#{match[1].downcase}?\" instead"
end
find_const(body_node, "MACOS_VERSION") do
problem "Use MacOS.version instead of MACOS_VERSION"
end
find_const(body_node, "MACOS_FULL_VERSION") do
problem "Use MacOS.full_version instead of MACOS_FULL_VERSION"
end
conditional_dependencies(body_node) do |node, method, param, dep_node|
dep = string_content(dep_node)
if node.if?
if (method == :include? && regex_match_group(param, /^with-#{dep}$/)) ||
(method == :with? && regex_match_group(param, /^#{dep}$/))
offending_node(dep_node.parent)
problem "Replace #{node.source} with #{dep_node.parent.source} => :optional"
end
elsif node.unless?
if (method == :include? && regex_match_group(param, /^without-#{dep}$/)) ||
(method == :without? && regex_match_group(param, /^#{dep}$/))
offending_node(dep_node.parent)
problem "Replace #{node.source} with #{dep_node.parent.source} => :recommended"
end
end
end
find_method_with_args(body_node, :fails_with, :llvm) do
problem "'fails_with :llvm' is now a no-op so should be removed"
end
find_method_with_args(body_node, :needs, :openmp) do
problem "'needs :openmp' should be replaced with 'depends_on \"gcc\"'"
end
find_method_with_args(body_node, :system, /^(otool|install_name_tool|lipo)/) do
problem "Use ruby-macho instead of calling #{@offensive_node.source}"
end
find_every_method_call_by_name(body_node, :system).each do |method_node|
# Skip Kibana: npm cache edge (see formula for more details)
next if @formula_name.match?(/^kibana(@\d[\d.]*)?$/)
first_param, second_param = parameters(method_node)
next if !node_equals?(first_param, "npm") ||
!node_equals?(second_param, "install")
offending_node(method_node)
problem "Use Language::Node for npm install args" unless languageNodeModule?(method_node)
end
problem "Use new-style test definitions (test do)" if find_method_def(body_node, :test)
find_method_with_args(body_node, :skip_clean, :all) do
problem "`skip_clean :all` is deprecated; brew no longer strips symbols. " \
"Pass explicit paths to prevent Homebrew from removing empty folders."
end
if find_method_def(@processed_source.ast)
problem "Define method #{method_name(@offensive_node)} in the class body, not at the top-level"
end
find_instance_method_call(body_node, :build, :universal?) do
next if @formula_name == "wine"
problem "macOS has been 64-bit only since 10.6 so build.universal? is deprecated."
end
find_instance_method_call(body_node, "ENV", :universal_binary) do
next if @formula_name == "wine"
problem "macOS has been 64-bit only since 10.6 so ENV.universal_binary is deprecated."
end
find_instance_method_call(body_node, "ENV", :x11) do
problem 'Use "depends_on :x11" instead of "ENV.x11"'
end
find_every_method_call_by_name(body_node, :depends_on).each do |method|
next unless method_called?(method, :new)
problem "`depends_on` can take requirement classes instead of instances"
end
find_instance_method_call(body_node, "Dir", :[]) do |method|
next unless parameters(method).size == 1
path = parameters(method).first
next unless path.str_type?
next unless match = regex_match_group(path, /^[^\*{},]+$/)
problem "Dir([\"#{string_content(path)}\"]) is unnecessary; just use \"#{match[0]}\""
end
fileutils_methods = Regexp.new(
FileUtils.singleton_methods(false)
.map { |m| "(?-mix:^" + Regexp.escape(m) + "$)" }
.join("|"),
)
find_every_method_call_by_name(body_node, :system).each do |method|
param = parameters(method).first
next unless match = regex_match_group(param, fileutils_methods)
problem "Use the `#{match}` Ruby method instead of `#{method.source}`"
end
return if formula_tap != "homebrew-core"
# Avoid build-time checks in homebrew/core
find_every_method_call_by_name(body_node, :system).each do |method|
next if @formula_name.start_with?("lib")
next if %w[
beecrypt
ccrypt
git
gmp
gnupg
[email protected]
google-sparsehash
jemalloc
jpeg-turbo
mpfr
open-mpi
[email protected]
pcre
wolfssl
xz
].include?(@formula_name)
params = parameters(method)
next unless node_equals?(params[0], "make")
params[1..].each do |arg|
next unless regex_match_group(arg, /^(checks?|tests?)$/)
offending_node(method)
problem "Formulae in homebrew/core (except e.g. cryptography, libraries) " \
"should not run build-time checks"
end
end
end
def modifier?(node)
return false unless node.if_type?
node.modifier_form?
end
def_node_search :conditional_dependencies, <<~EOS
{$(if (send (send nil? :build) ${:include? :with? :without?} $(str _))
(send nil? :depends_on $({str sym} _)) nil?)
$(if (send (send nil? :build) ${:include? :with? :without?} $(str _)) nil?
(send nil? :depends_on $({str sym} _)))}
EOS
def_node_matcher :hash_dep, <<~EOS
(hash (pair $(str _) $...))
EOS
def_node_matcher :destructure_hash, <<~EOS
(hash (pair $(str _) $(sym _)))
EOS
def_node_search :formula_path_strings, <<~EOS
{(dstr (begin (send nil? %1)) $(str _ ))
(dstr _ (begin (send nil? %1)) $(str _ ))}
EOS
# Node Pattern search for Language::Node
def_node_search :languageNodeModule?, <<~EOS
(const (const nil? :Language) :Node)
EOS
end
end
end
end
| 40.101215 | 115 | 0.576779 |
3877897a1c9ef0578d806ff3562f243c5b22722f | 221 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'gastly'
require 'coveralls'
Coveralls.wear!
RSpec.configure do |config|
config.before(:suite) do
Phantomjs.implode!
Phantomjs.path
end
end
| 17 | 58 | 0.723982 |
284c43113db5f5e2aeda39fff6e6e1dab078ca55 | 977 | require 'spec_helper'
describe EventInvitationMailer do
let(:email) { ActionMailer::Base.deliveries.last }
let(:event) { Fabricate(:event, date_and_time: Time.zone.local(2017, 11, 12, 10, 0), name: 'Test event') }
let(:member) { Fabricate(:member) }
let(:invitation) { Fabricate(:invitation, event: event, member: member) }
it '#invite_student' do
email_subject = "Invitation: #{event.name}"
EventInvitationMailer.invite_student(event, member, invitation).deliver_now
expect(email.subject).to eq(email_subject)
end
it '#invite_coach' do
email_subject = "Coach Invitation: #{event.name}"
EventInvitationMailer.invite_coach(event, member, invitation).deliver_now
expect(email.subject).to eq(email_subject)
end
it '#attending' do
email_subject = "Your spot to #{event.name} has been confirmed."
EventInvitationMailer.attending(event, member, invitation).deliver_now
expect(email.subject).to eq(email_subject)
end
end
| 32.566667 | 108 | 0.728762 |
87392e454ddb75dae07e1598f77da103c4426d2b | 1,180 | module ThemeInterface
def self.getlist
raise NotImplementedError.new("#{self.class}##{__method__} are not exist")
end
def self.get
raise NotImplementedError.new("#{self.class}##{__method__} are not exist")
end
end
class Theme
include ThemeInterface
attr_reader
public
def self.getlist
themedir = "root/rbcms/themes/"
themeDatas = Array.new
filedatas = Array.new
begin
Dir.glob(themedir+"*"){|dir| File.open(dir+"/themeInfo",mode="r",:encoding=>'UTF-8'){|filedata| data = filedata.gets.chomp("\n");filedatas.push(data)}}
filedatas.each do |filedata|
fileInfo = Hash.new
filecontents = filedata.split(";")
filecontents.each do |filecontent|
content = filecontent.split(":",2)
fileInfo.store(content[0],content[1])
end
themeDatas.push(fileInfo)
end
rescue => exception
SysLogger.error exception.message
ensure
return themeDatas
end
end
def self.get
GetTheme.run
end
end
# 文民
| 27.44186 | 163 | 0.566949 |
288da3c8cd90227821f094e0c71cb3dcc5910ad6 | 1,884 | module Selenium
module WebDriver
module Chrome
#
# @api private
#
class Service
START_TIMEOUT = 20
STOP_TIMEOUT = 5
MISSING_TEXT = "Unable to find the chromedriver executable. Please download the server from http://code.google.com/p/selenium/downloads/list and place it somewhere on your PATH. More info at http://code.google.com/p/selenium/wiki/ChromeDriver."
attr_reader :uri
def self.executable_path
@executable_path ||= (
Platform.find_binary "chromedriver" or raise Error::WebDriverError, MISSING_TEXT
)
end
def self.executable_path=(path)
Platform.assert_executable path
@executable_path = path
end
def self.default_service
new executable_path, PortProber.random
end
def initialize(executable_path, port)
@uri = URI.parse "http://#{Platform.localhost}:#{port}"
server_command = [executable_path, "--port=#{port}"]
@process = ChildProcess.build(*server_command)
@socket_poller = SocketPoller.new Platform.localhost, port, START_TIMEOUT
@process.io.inherit! if $DEBUG == true
end
def start
@process.start
unless @socket_poller.connected?
raise Error::WebDriverError, "unable to connect to chromedriver #{@uri}"
end
at_exit { stop } # make sure we don't leave the server running
end
def stop
return if @process.nil? || @process.exited?
Net::HTTP.get uri.host, '/shutdown', uri.port
@process.poll_for_exit STOP_TIMEOUT
rescue ChildProcess::TimeoutError
# ok, force quit
@process.stop STOP_TIMEOUT
end
end # Service
end # Chrome
end # WebDriver
end # Service | 29.4375 | 253 | 0.608811 |
08246d3ec2b1b20cca596a006dc8d400c960c0dd | 1,922 | require 'spec_helper'
describe 'keepalived::instance', :type => :define do
let :title do
'something'
end
let :default_params do
{
:interface => 'eth0',
:virtual_ips => [ '10.0.0.1 dev bond0.X' ],
:state => 'BACKUP',
:state => 'BACKUP',
:priority => 1,
:name => 'something',
}
end
let :fragment_file do
'/var/lib/puppet/concat/_etc_keepalived_keepalived.conf/fragments/50_keepalived_something'
end
let :facts do
{
:concat_basedir => '/var/lib/puppet/concat'
}
end
let :pre_condition do
'class { "concat::setup": }
concat { "/etc/keepalived/keepalived.conf": }'
end
describe 'when passing default parameters' do
let :params do
default_params
end
it 'should build the fragment with correct content' do
verify_contents(subject, fragment_file,
[
"vrrp_instance something {", " virtual_router_id something", "", " # Advert interval", " advert_int 1", " # for electing MASTER, highest priority wins.", " priority 1", " state BACKUP", " interface eth0", " virtual_ipaddress {", " ", " 10.0.0.1 dev bond0.X", " }", "}"
]
)
end
end
describe 'when passing duplicated IP addresses' do
let :params do
default_params.merge(:virtual_ips => [ '10.0.0.1 dev bond0.X', '10.0.0.1 dev bond0.X', '10.0.0.2 dev bond1.X' ])
end
it 'it should only keep the first one' do
should contain_file(fragment_file)\
.with_content(
"vrrp_instance something {\n virtual_router_id something\n\n # Advert interval\n advert_int 1\n\n # for electing MASTER, highest priority wins.\n priority 1\n state BACKUP\n\n interface eth0\n\n virtual_ipaddress {\n \n 10.0.0.1 dev bond0.X\n 10.0.0.2 dev bond1.X\n \n \n }\n \n \n \n \n\n \n \n}\n"
)
end
end
end
| 29.569231 | 343 | 0.596774 |
28039ff19427506bf9e6685f185c9e4e76d57561 | 11,499 | require 'socket'
# == Synopsis
# A Piper is used to fork a child process and then establish a communication
# pipe between the parent and child. This communication pipe is used to pass
# Ruby objects between the two.
#
# == Details
# When a new piper instance is created, the Ruby process is forked into two
# processes - the parent and the child. Each continues execution from the
# point of the fork. The piper establishes a pipe for communication between
# the parent and the child. This communication pipe can be opened as read /
# write / read-write (from the perspective of the parent).
#
# Communication over the pipe is handled by marshalling Ruby objects through
# the pipe. This means that nearly any Ruby object can be passed between the
# two processes. For example, exceptions from the child process can be
# marshalled back to the parent and raised there.
#
# Object passing is handled by use of the +puts+ and +gets+ methods defined
# on the Piper. These methods use a +timeout+ and the Kernel#select method
# to ensure a timely return.
#
# == Examples
#
# piper = Servolux::Piper.new('r', :timeout => 5)
#
# piper.parent {
# $stdout.puts "parent pid #{Process.pid}"
# $stdout.puts "child pid #{piper.pid} [from fork]"
#
# child_pid = piper.gets
# $stdout.puts "child pid #{child_pid} [from child]"
#
# msg = piper.gets
# $stdout.puts "message from child #{msg.inspect}"
# }
#
# piper.child {
# sleep 2
# piper.puts Process.pid
# sleep 3
# piper.puts "The time is #{Time.now}"
# }
#
# piper.close
#
class Servolux::Piper
# :stopdoc:
SIZEOF_INT = [42].pack('I').size # @private
# :startdoc:
# Creates a new Piper with the child process configured as a daemon. The
# +pid+ method of the piper returns the PID of the daemon process.
#
# By default a daemon process will release its current working directory
# and the stdout/stderr/stdin file descriptors. This allows the parent
# process to exit cleanly. This behavior can be overridden by setting the
# _nochdir_ and _noclose_ flags to true. The first will keep the current
# working directory; the second will keep stdout/stderr/stdin open.
#
# @param [Boolean] nochdir Do not change working directories
# @param [Boolean] noclose Do not close stdin, stdout, and stderr
# @return [Piper]
#
def self.daemon( nochdir = false, noclose = false )
piper = self.new(:timeout => 1)
piper.parent {
pid = piper.gets
raise ::Servolux::Error, 'Could not get the child PID.' if pid.nil?
piper.instance_variable_set(:@child_pid, pid)
}
piper.child {
Process.setsid # Become session leader.
exit!(0) if fork # Zap session leader.
Dir.chdir '/' unless nochdir # Release old working directory.
File.umask 0000 # Ensure sensible umask.
unless noclose
STDIN.reopen '/dev/null' # Free file descriptors and
STDOUT.reopen '/dev/null', 'a' # point them somewhere sensible.
STDERR.reopen '/dev/null', 'a'
end
piper.puts Process.pid
}
return piper
end
# The timeout in seconds to wait for puts / gets commands.
attr_accessor :timeout
# The underlying socket the piper is using for communication.
attr_reader :socket
# @overload Piper.new( mode = 'r', opts = {} )
# Creates a new Piper instance with the communication pipe configured
# using the provided _mode_. The default mode is read-only (from the
# parent, and write-only from the child). The supported modes are as
# follows:
#
# Mode | Parent View | Child View
# -------------------------------
# r read-only write-only
# w write-only read-only
# rw read-write read-write
#
# @param [String] mode The communication mode of the pipe.
# @option opts [Numeric] :timeout (nil)
# The number of seconds to wait for a +puts+ or +gets+ to succeed. If not
# specified, calls through the pipe will block forever until data is
# available. You can configure the +puts+ and +gets+ to be non-blocking
# by setting the timeout to +0+.
# @return [Piper]
#
def initialize( *args )
opts = args.last.is_a?(Hash) ? args.pop : {}
mode = args.first || 'r'
unless %w[r w rw].include? mode
raise ArgumentError, "Unsupported mode #{mode.inspect}"
end
@status = nil
@timeout = opts.fetch(:timeout, nil)
socket_pair = Socket.pair(Socket::AF_UNIX, Socket::SOCK_STREAM, 0)
@child_pid = Kernel.fork
if child?
@socket = socket_pair[1]
socket_pair[0].close
case mode
when 'r'; @socket.close_read
when 'w'; @socket.close_write
end
else
@socket = socket_pair[0]
socket_pair[1].close
case mode
when 'r'; @socket.close_write
when 'w'; @socket.close_read
end
end
end
# Close both the communications socket. This only affects the process from
# which it was called -- the parent or the child.
#
# @return [Piper] self
#
def close
@socket.close unless @socket.closed?
self
end
# Returns +true+ if the piper has been closed. Returns +false+ otherwise.
#
# @return [Boolean]
#
def closed?
@socket.closed?
end
# Returns +true+ if the communications pipe is readable from the process
# and there is data waiting to be read.
#
# @return [Boolean]
#
def readable?
return false if @socket.closed?
r,_,_ = Kernel.select([@socket], nil, nil, @timeout) rescue nil
return !(r.nil? or r.empty?)
end
# Returns +true+ if the communications pipe is writeable from the process
# and the write buffer can accept more data.
#
# @return [Boolean]
#
def writeable?
return false if @socket.closed?
_,w,_ = Kernel.select(nil, [@socket], nil, @timeout) rescue nil
return !(w.nil? or w.empty?)
end
# Execute the _block_ only in the child process. This method returns
# immediately when called from the parent process. The piper instance is
# passed to the block if the arity is non-zero.
#
# @yield [self] Execute the block in the child process
# @yieldparam [Piper] self The piper instance (optional)
# @return The return value from the block or +nil+ when called from the
# parent.
#
def child( &block )
return unless child?
raise ArgumentError, "A block must be supplied" if block.nil?
if block.arity > 0
block.call(self)
else
block.call
end
end
# Returns +true+ if this is the child process and +false+ otherwise.
#
# @return [Boolean]
#
def child?
@child_pid.nil?
end
# Execute the _block_ only in the parent process. This method returns
# immediately when called from the child process. The piper instance is
# passed to the block if the arity is non-zero.
#
# @yield [self] Execute the block in the parent process
# @yieldparam [Piper] self The piper instance (optional)
# @return The return value from the block or +nil+ when called from the
# child.
#
def parent( &block )
return unless parent?
raise ArgumentError, "A block must be supplied" if block.nil?
if block.arity > 0
block.call(self)
else
block.call
end
end
# Returns +true+ if this is the parent process and +false+ otherwise.
#
# @return [Boolean]
#
def parent?
!@child_pid.nil?
end
# Returns the PID of the child process when called from the parent.
# Returns +nil+ when called from the child.
#
# @return [Integer, nil] The PID of the child process or +nil+
#
def pid
@child_pid
end
# Read an object from the communication pipe. If data is available then it
# is un-marshalled and returned as a Ruby object. If the pipe is closed for
# reading or if no data is available then the _default_ value is returned.
# You can pass in the _default_ value; otherwise it will be +nil+.
#
# This method will block until the +timeout+ is reached or data can be
# read from the pipe.
#
def gets( default = nil )
return default unless readable?
data = @socket.read SIZEOF_INT
return default if data.nil?
size = data.unpack('I').first
data = @socket.read size
return default if data.nil?
Marshal.load(data) rescue data
rescue SystemCallError
return default
end
# Write an object to the communication pipe. Returns +nil+ if the pipe is
# closed for writing or if the write buffer is full. The _obj_ is
# marshalled and written to the pipe (therefore, procs and other
# un-marshallable Ruby objects cannot be passed through the pipe).
#
# If the write is successful, then the number of bytes written to the pipe
# is returned. If this number is zero it means that the _obj_ was
# unsuccessfully communicated (sorry).
#
# @param [Object] obj The data to send to the "other" process. The object
# must be marshallable by Ruby (no Proc objects or lambdas).
# @return [Integer, nil] The number of bytes written to the pipe or +nil+ if
# there was an error or the pipe is not writeable.
#
def puts( obj )
return unless writeable?
data = Marshal.dump(obj)
@socket.write([data.size].pack('I')) + @socket.write(data)
rescue SystemCallError
return nil
end
# Send the given signal to the child process. The signal may be an integer
# signal number or a POSIX signal name (either with or without a +SIG+
# prefix).
#
# This method does nothing when called from the child process.
#
# @param [String, Integer] sig The signal to send to the child process.
# @return [Integer, nil] The result of Process#kill or +nil+ if called from
# the child process.
#
def signal( sig )
return if child?
return unless alive?
Process.kill(sig, @child_pid)
end
# Waits for the child process to exit and returns its exit status. The
# global variable $? is set to a Process::Status object containing
# information on the child process.
#
# Always returns +nil+ when called from the child process.
#
# You can get more information about how the child status exited by calling
# the following methods on the piper instance:
#
# * coredump?
# * exited?
# * signaled?
# * stopped?
# * success?
# * exitstatus
# * stopsig
# * termsig
#
# @param [Integer] flags Bit flags that will be passed to the system level
# wait call. See the Ruby core documentation for Process#wait for more
# information on these flags.
# @return [Integer, nil] The exit status of the child process or +nil+ if
# the child process is not running.
#
def wait( flags = 0 )
return if child?
_, @status = Process.wait2(@child_pid, flags) unless @status
exitstatus
rescue Errno::ECHILD
nil
end
# Returns +true+ if the child process is alive. Returns +nil+ if the child
# process has not been started.
#
# Always returns +nil+ when called from the child process.
#
# @return [Boolean, nil]
#
def alive?
return if child?
wait(Process::WNOHANG|Process::WUNTRACED)
Process.kill(0, @child_pid)
true
rescue Errno::ESRCH, Errno::ENOENT
false
end
%w[coredump? exited? signaled? stopped? success? exitstatus stopsig termsig].
each { |method|
self.class_eval <<-CODE
def #{method}
return if @status.nil?
@status.#{method}
end
CODE
}
end
| 30.582447 | 79 | 0.662058 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.